answer
stringlengths 17
10.2M
|
|---|
/**
* EVE Swagger Interface
* An OpenAPI for EVE Online
*/
package net.troja.eve.esi.auth;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.net.ssl.HttpsURLConnection;
import net.troja.eve.esi.ApiException;
import net.troja.eve.esi.Pair;
public class OAuth implements Authentication {
private static final String URI_OAUTH = "https://login.eveonline.com/v2/oauth";
private static final String URI_AUTHENTICATION = URI_OAUTH + "/authorize";
private static final String URI_ACCESS_TOKEN = URI_OAUTH + "/token";
private static final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-._~";
private static final SecureRandom RND = new SecureRandom();
private static final int LEN = 128;
private String codeVerifier;
private AccountData account;
private static final Map<String, AccountData> ACCOUNTS = new ConcurrentHashMap<>();
@Override
public void applyToParams(final List<Pair> queryParams, final Map<String, String> headerParams) {
// Add auth
AccountData accountData = getAccountData();
if (accountData != null) {
headerParams.put("Authorization", "Bearer " + accountData.getAccessToken());
}
}
public void setAccessToken(final String accessToken) {
if (account == null) {
account = new AccountData("", "");
}
account.setAccessToken(accessToken);
}
public String getRefreshToken() {
if (account != null) {
return account.getRefreshToken();
} else {
return null;
}
}
public String getClientId() {
if (account != null) {
return account.getClientId();
} else {
return null;
}
}
public void setAuth(final String clientId, final String refreshToken) {
final String authKey = clientId + refreshToken;
synchronized (ACCOUNTS) { //Sync here to avoid multiple instances being made of this account - better safe than sorry
AccountData accountData = ACCOUNTS.get(authKey);
if (accountData == null) {
accountData = new AccountData(clientId, refreshToken);
ACCOUNTS.put(authKey, accountData);
}
if (account != null) {
accountData.setAccessToken(account.getAccessToken());
}
account = accountData;
}
}
public void setClientId(final String clientId) {
setAuth(clientId, null);
}
public String getAccessToken() {
AccountData accessTokenData = getAccountData();
if (accessTokenData != null) {
return accessTokenData.getAccessToken();
} else {
return null;
}
}
public JWT getJWT() {
AccountData accountData = getAccountData(); // Update access token if needed;
if (accountData == null) {
return null;
}
try {
String accessToken = accountData.getAccessToken();
if (accessToken == null) {
return null;
}
String[] parts = accessToken.split("\\.");
if (parts.length != 3) {
return null;
}
ObjectMapper objectMapper = new ObjectMapper();
JWT.Header header = objectMapper.readValue(new String(Base64.getUrlDecoder().decode(parts[0])),
JWT.Header.class);
JWT.Payload payload = objectMapper.readValue(new String(Base64.getUrlDecoder().decode(parts[1])),
JWT.Payload.class);
String signature = parts[2];
return new JWT(header, payload, signature);
} catch (IOException ex) {
return null;
}
}
private AccountData getAccountData() {
if (account == null) {
return null;
} else {
account.update();
return account;
}
}
public String getAuthorizationUri(final String redirectUri, final Set<String> scopes, final String state) {
if (account == null) throw new IllegalArgumentException("Auth is not set");
StringBuilder builder = new StringBuilder();
builder.append(URI_AUTHENTICATION);
builder.append("?");
builder.append("response_type=");
builder.append(encode("code"));
builder.append("&redirect_uri=");
builder.append(encode(redirectUri));
builder.append("&client_id=");
builder.append(encode(account.getClientId()));
builder.append("&scope=");
builder.append(encode(getScopesString(scopes)));
builder.append("&state=");
builder.append(encode(state));
builder.append("&code_challenge");
builder.append(getCodeChallenge()); // Already url encoded
builder.append("&code_challenge_method=");
builder.append(encode("S256"));
return builder.toString();
}
/**
* Finish the oauth flow after the user was redirected back.
*
* @param code
* Code returned by the Eve Online SSO
* @param state
* This should be some secret to prevent XRSF see
* getAuthorizationUri
* @throws net.troja.eve.esi.ApiException
*/
public void finishFlow(final String code, final String state) throws ApiException {
if (account == null) throw new IllegalArgumentException("ClientID/Refresh Token is not set");
StringBuilder builder = new StringBuilder();
builder.append("grant_type=");
builder.append(encode("authorization_code"));
builder.append("&client_id=");
builder.append(encode(account.getClientId()));
builder.append("&code=");
builder.append(encode(code));
builder.append("&code_verifier=");
builder.append(encode(codeVerifier));
update(account, builder.toString());
}
private static void refreshToken(AccountData accountData) throws ApiException {
StringBuilder builder = new StringBuilder();
builder.append("grant_type=");
builder.append(encode("refresh_token"));
builder.append("&client_id=");
builder.append(encode(accountData.getClientId()));
builder.append("&refresh_token=");
builder.append(encode(accountData.getRefreshToken()));
update(accountData, builder.toString());
}
private static void update(AccountData accountData, String urlParameters) throws ApiException {
try {
URL obj = new URL(URI_ACCESS_TOKEN);
HttpsURLConnection con = (HttpsURLConnection) obj.openConnection();
// add request header
con.setRequestMethod("POST");
con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
con.setRequestProperty("Host", "login.eveonline.com");
con.setConnectTimeout(10000);
con.setReadTimeout(10000);
// Send post request
con.setDoOutput(true);
try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
wr.writeBytes(urlParameters);
wr.flush();
}
StringBuilder response;
try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
String inputLine;
response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
}
// read json
ObjectMapper objectMapper = new ObjectMapper();
Result result = objectMapper.readValue(response.toString(), Result.class);
// set data
long validUntil = System.currentTimeMillis() + result.getExpiresIn() * 1000 - 5000;
accountData.setAccessToken(result.getAccessToken());
accountData.setValidUntil(validUntil);
accountData.setRefreshToken(result.getRefreshToken());
ACCOUNTS.put(accountData.getKey(), accountData); //Update map in case the Refresh Token (AKA Key) have been changed
} catch (MalformedURLException ex) {
throw new ApiException(ex);
} catch (IOException ex) {
throw new ApiException(ex);
}
}
private String getScopesString(final Set<String> scopes) {
final StringBuilder scopesString = new StringBuilder();
if (scopes != null) {
for (final String scope : scopes) {
if (scopesString.length() > 0) {
scopesString.append(' ');
}
scopesString.append(scope);
}
}
return scopesString.toString();
}
private static class AccountData {
private final String clientId;
private String refreshToken;
private String accessToken;
private long validUntil;
public AccountData(String clientId, String refreshToken) {
this.clientId = clientId;
this.refreshToken = refreshToken;
}
public String getClientId() {
return clientId;
}
public String getRefreshToken() {
return refreshToken;
}
public String getAccessToken() {
return accessToken;
}
public long getValidUntil() {
return validUntil;
}
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public void setValidUntil(long validUntil) {
this.validUntil = validUntil;
}
private synchronized void update() {
if (refreshToken != null && (accessToken == null || getValidUntil() < System.currentTimeMillis())) {
try {
OAuth.refreshToken(this);
} catch (final ApiException ex) {
// This error will be handled by ESI once the request is made
}
}
}
public String getKey() {
return clientId + refreshToken;
}
}
private String getCodeChallenge() {
try {
StringBuilder sb = new StringBuilder(LEN);
for (int i = 0; i < LEN; i++) {
sb.append(AB.charAt(RND.nextInt(AB.length())));
}
codeVerifier = sb.toString();
byte[] ascii = codeVerifier.getBytes(StandardCharsets.US_ASCII);
MessageDigest digest = MessageDigest.getInstance("SHA-256");
byte[] sha = digest.digest(ascii);
return Base64.getUrlEncoder().encodeToString(sha);
} catch (NoSuchAlgorithmException ex) {
return null;
}
}
private static String encode(String parameter) {
try {
return URLEncoder.encode(parameter, "UTF-8");
} catch (UnsupportedEncodingException ex) {
return null;
}
}
private static class Result {
@JsonProperty("access_token")
private String accessToken;
@JsonProperty("expires_in")
private Long expiresIn;
@JsonProperty("token_type")
private String tokenType;
@JsonProperty("refresh_token")
private String refreshToken;
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public Long getExpiresIn() {
return expiresIn;
}
public void setExpiresIn(Long expiresIn) {
this.expiresIn = expiresIn;
}
public String getTokenType() {
return tokenType;
}
public void setTokenType(String tokenType) {
this.tokenType = tokenType;
}
public String getRefreshToken() {
return refreshToken;
}
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
}
}
|
package cpw.mods.fml.common;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.logging.log4j.Level;
import com.google.common.base.CharMatcher;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Multiset.Entry;
import com.google.common.collect.Multisets;
import com.google.common.collect.Ordering;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.collect.TreeMultimap;
import cpw.mods.fml.common.LoaderState.ModState;
import cpw.mods.fml.common.ModContainer.Disableable;
import cpw.mods.fml.common.discovery.ModDiscoverer;
import cpw.mods.fml.common.event.FMLInterModComms;
import cpw.mods.fml.common.event.FMLLoadEvent;
import cpw.mods.fml.common.event.FMLMissingMappingsEvent;
import cpw.mods.fml.common.event.FMLMissingMappingsEvent.MissingMapping;
import cpw.mods.fml.common.event.FMLModIdMappingEvent;
import cpw.mods.fml.common.functions.ArtifactVersionNameFunction;
import cpw.mods.fml.common.functions.ModIdFunction;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.common.registry.GameRegistry.Type;
import cpw.mods.fml.common.toposort.ModSorter;
import cpw.mods.fml.common.toposort.ModSortingException;
import cpw.mods.fml.common.toposort.ModSortingException.SortingExceptionData;
import cpw.mods.fml.common.toposort.TopologicalSort;
import cpw.mods.fml.common.versioning.ArtifactVersion;
import cpw.mods.fml.common.versioning.VersionParser;
import cpw.mods.fml.relauncher.Side;
/**
* The loader class performs the actual loading of the mod code from disk.
*
* <p>
* There are several {@link LoaderState}s to mod loading, triggered in two
* different stages from the FML handler code's hooks into the minecraft code.
* </p>
*
* <ol>
* <li>LOADING. Scanning the filesystem for mod containers to load (zips, jars,
* directories), adding them to the {@link #modClassLoader} Scanning, the loaded
* containers for mod classes to load and registering them appropriately.</li>
* <li>PREINIT. The mod classes are configured, they are sorted into a load
* order, and instances of the mods are constructed.</li>
* <li>INIT. The mod instances are initialized. For BaseMod mods, this involves
* calling the load method.</li>
* <li>POSTINIT. The mod instances are post initialized. For BaseMod mods this
* involves calling the modsLoaded method.</li>
* <li>UP. The Loader is complete</li>
* <li>ERRORED. The loader encountered an error during the LOADING phase and
* dropped to this state instead. It will not complete loading from this state,
* but it attempts to continue loading before abandoning and giving a fatal
* error.</li>
* </ol>
*
* Phase 1 code triggers the LOADING and PREINIT states. Phase 2 code triggers
* the INIT and POSTINIT states.
*
* @author cpw
*
*/
public class Loader
{
private static final Splitter DEPENDENCYPARTSPLITTER = Splitter.on(":").omitEmptyStrings().trimResults();
private static final Splitter DEPENDENCYSPLITTER = Splitter.on(";").omitEmptyStrings().trimResults();
/**
* The singleton instance
*/
private static Loader instance;
/**
* Build information for tracking purposes.
*/
private static String major;
private static String minor;
private static String rev;
private static String build;
private static String mccversion;
private static String mcpversion;
/**
* The class loader we load the mods into.
*/
private ModClassLoader modClassLoader;
/**
* The sorted list of mods.
*/
private List<ModContainer> mods;
/**
* A named list of mods
*/
private Map<String, ModContainer> namedMods;
/**
* A reverse dependency graph for mods
*/
private ListMultimap<String, String> reverseDependencies;
/**
* The canonical configuration directory
*/
private File canonicalConfigDir;
private File canonicalModsDir;
private LoadController modController;
private MinecraftDummyContainer minecraft;
private MCPDummyContainer mcp;
private static File minecraftDir;
private static List<String> injectedContainers;
private ImmutableMap<String, String> fmlBrandingProperties;
private File forcedModFile;
private ModDiscoverer discoverer;
public static Loader instance()
{
if (instance == null)
{
instance = new Loader();
}
return instance;
}
@SuppressWarnings("unchecked")
public static void injectData(Object... data)
{
major = (String) data[0];
minor = (String) data[1];
rev = (String) data[2];
build = (String) data[3];
mccversion = (String) data[4];
mcpversion = (String) data[5];
minecraftDir = (File) data[6];
injectedContainers = (List<String>)data[7];
}
private Loader()
{
modClassLoader = new ModClassLoader(getClass().getClassLoader());
String actualMCVersion = "1.7.2";
if (!mccversion.equals(actualMCVersion))
{
FMLLog.severe("This version of FML is built for Minecraft %s, we have detected Minecraft %s in your minecraft jar file", mccversion, actualMCVersion);
throw new LoaderException();
}
minecraft = new MinecraftDummyContainer(actualMCVersion);
mcp = new MCPDummyContainer(MetadataCollection.from(getClass().getResourceAsStream("/mcpmod.info"), "MCP").getMetadataForId("mcp", null));
}
/**
* Sort the mods into a sorted list, using dependency information from the
* containers. The sorting is performed using a {@link TopologicalSort}
* based on the pre- and post- dependency information provided by the mods.
*/
private void sortModList()
{
FMLLog.finer("Verifying mod requirements are satisfied");
try
{
BiMap<String, ArtifactVersion> modVersions = HashBiMap.create();
for (ModContainer mod : getActiveModList())
{
modVersions.put(mod.getModId(), mod.getProcessedVersion());
}
ArrayListMultimap<String, String> reqList = ArrayListMultimap.create();
for (ModContainer mod : getActiveModList())
{
if (!mod.acceptableMinecraftVersionRange().containsVersion(minecraft.getProcessedVersion()))
{
FMLLog.severe("The mod %s does not wish to run in Minecraft version %s. You will have to remove it to play.", mod.getModId(), getMCVersionString());
throw new WrongMinecraftVersionException(mod);
}
Map<String,ArtifactVersion> names = Maps.uniqueIndex(mod.getRequirements(), new ArtifactVersionNameFunction());
Set<ArtifactVersion> versionMissingMods = Sets.newHashSet();
Set<String> missingMods = Sets.difference(names.keySet(), modVersions.keySet());
if (!missingMods.isEmpty())
{
FMLLog.severe("The mod %s (%s) requires mods %s to be available", mod.getModId(), mod.getName(), missingMods);
for (String modid : missingMods)
{
versionMissingMods.add(names.get(modid));
}
throw new MissingModsException(versionMissingMods);
}
reqList.putAll(mod.getModId(), names.keySet());
ImmutableList<ArtifactVersion> allDeps = ImmutableList.<ArtifactVersion>builder().addAll(mod.getDependants()).addAll(mod.getDependencies()).build();
for (ArtifactVersion v : allDeps)
{
if (modVersions.containsKey(v.getLabel()))
{
if (!v.containsVersion(modVersions.get(v.getLabel())))
{
versionMissingMods.add(v);
}
}
}
if (!versionMissingMods.isEmpty())
{
FMLLog.severe("The mod %s (%s) requires mod versions %s to be available", mod.getModId(), mod.getName(), versionMissingMods);
throw new MissingModsException(versionMissingMods);
}
}
FMLLog.finer("All mod requirements are satisfied");
reverseDependencies = Multimaps.invertFrom(reqList, ArrayListMultimap.<String,String>create());
ModSorter sorter = new ModSorter(getActiveModList(), namedMods);
try
{
FMLLog.finer("Sorting mods into an ordered list");
List<ModContainer> sortedMods = sorter.sort();
// Reset active list to the sorted list
modController.getActiveModList().clear();
modController.getActiveModList().addAll(sortedMods);
// And inject the sorted list into the overall list
mods.removeAll(sortedMods);
sortedMods.addAll(mods);
mods = sortedMods;
FMLLog.finer("Mod sorting completed successfully");
}
catch (ModSortingException sortException)
{
FMLLog.severe("A dependency cycle was detected in the input mod set so an ordering cannot be determined");
SortingExceptionData<ModContainer> exceptionData = sortException.getExceptionData();
FMLLog.severe("The first mod in the cycle is %s", exceptionData.getFirstBadNode());
FMLLog.severe("The mod cycle involves");
for (ModContainer mc : exceptionData.getVisitedNodes())
{
FMLLog.severe("%s : before: %s, after: %s", mc.toString(), mc.getDependants(), mc.getDependencies());
}
FMLLog.log(Level.ERROR, sortException, "The full error");
throw sortException;
}
}
finally
{
FMLLog.fine("Mod sorting data");
int unprintedMods = mods.size();
for (ModContainer mod : getActiveModList())
{
if (!mod.isImmutable())
{
FMLLog.fine("\t%s(%s:%s): %s (%s)", mod.getModId(), mod.getName(), mod.getVersion(), mod.getSource().getName(), mod.getSortingRules());
unprintedMods
}
}
if (unprintedMods == mods.size())
{
FMLLog.fine("No user mods found to sort");
}
}
}
/**
* The primary loading code
*
*
* The found resources are first loaded into the {@link #modClassLoader}
* (always) then scanned for class resources matching the specification
* above.
*
* If they provide the {@link Mod} annotation, they will be loaded as
* "FML mods"
*
* Finally, if they are successfully loaded as classes, they are then added
* to the available mod list.
*/
private ModDiscoverer identifyMods()
{
FMLLog.fine("Building injected Mod Containers %s", injectedContainers);
// Add in the MCP mod container
mods.add(new InjectedModContainer(mcp,new File("minecraft.jar")));
for (String cont : injectedContainers)
{
ModContainer mc;
try
{
mc = (ModContainer) Class.forName(cont,true,modClassLoader).newInstance();
}
catch (Exception e)
{
FMLLog.log(Level.ERROR, e, "A problem occured instantiating the injected mod container %s", cont);
throw new LoaderException(e);
}
mods.add(new InjectedModContainer(mc,mc.getSource()));
}
ModDiscoverer discoverer = new ModDiscoverer();
FMLLog.fine("Attempting to load mods contained in the minecraft jar file and associated classes");
discoverer.findClasspathMods(modClassLoader);
FMLLog.fine("Minecraft jar mods loaded successfully");
FMLLog.info("Searching %s for mods", canonicalModsDir.getAbsolutePath());
discoverer.findModDirMods(canonicalModsDir);
File versionSpecificModsDir = new File(canonicalModsDir,mccversion);
if (versionSpecificModsDir.isDirectory())
{
FMLLog.info("Also searching %s for mods", versionSpecificModsDir);
discoverer.findModDirMods(versionSpecificModsDir);
}
mods.addAll(discoverer.identifyMods());
identifyDuplicates(mods);
namedMods = Maps.uniqueIndex(mods, new ModIdFunction());
FMLLog.info("Forge Mod Loader has identified %d mod%s to load", mods.size(), mods.size() != 1 ? "s" : "");
return discoverer;
}
private class ModIdComparator implements Comparator<ModContainer>
{
@Override
public int compare(ModContainer o1, ModContainer o2)
{
return o1.getModId().compareTo(o2.getModId());
}
}
private void identifyDuplicates(List<ModContainer> mods)
{
TreeMultimap<ModContainer, File> dupsearch = TreeMultimap.create(new ModIdComparator(), Ordering.arbitrary());
for (ModContainer mc : mods)
{
if (mc.getSource() != null)
{
dupsearch.put(mc, mc.getSource());
}
}
ImmutableMultiset<ModContainer> duplist = Multisets.copyHighestCountFirst(dupsearch.keys());
SetMultimap<ModContainer, File> dupes = LinkedHashMultimap.create();
for (Entry<ModContainer> e : duplist.entrySet())
{
if (e.getCount() > 1)
{
FMLLog.severe("Found a duplicate mod %s at %s", e.getElement().getModId(), dupsearch.get(e.getElement()));
dupes.putAll(e.getElement(),dupsearch.get(e.getElement()));
}
}
if (!dupes.isEmpty())
{
throw new DuplicateModsFoundException(dupes);
}
}
private void initializeLoader()
{
File modsDir = new File(minecraftDir, "mods");
File configDir = new File(minecraftDir, "config");
String canonicalModsPath;
String canonicalConfigPath;
try
{
canonicalModsPath = modsDir.getCanonicalPath();
canonicalConfigPath = configDir.getCanonicalPath();
canonicalConfigDir = configDir.getCanonicalFile();
canonicalModsDir = modsDir.getCanonicalFile();
}
catch (IOException ioe)
{
FMLLog.log(Level.ERROR, ioe, "Failed to resolve loader directories: mods : %s ; config %s", canonicalModsDir.getAbsolutePath(),
configDir.getAbsolutePath());
throw new LoaderException(ioe);
}
if (!canonicalModsDir.exists())
{
FMLLog.info("No mod directory found, creating one: %s", canonicalModsPath);
boolean dirMade = canonicalModsDir.mkdir();
if (!dirMade)
{
FMLLog.severe("Unable to create the mod directory %s", canonicalModsPath);
throw new LoaderException();
}
FMLLog.info("Mod directory created successfully");
}
if (!canonicalConfigDir.exists())
{
FMLLog.fine("No config directory found, creating one: %s", canonicalConfigPath);
boolean dirMade = canonicalConfigDir.mkdir();
if (!dirMade)
{
FMLLog.severe("Unable to create the config directory %s", canonicalConfigPath);
throw new LoaderException();
}
FMLLog.info("Config directory created successfully");
}
if (!canonicalModsDir.isDirectory())
{
FMLLog.severe("Attempting to load mods from %s, which is not a directory", canonicalModsPath);
throw new LoaderException();
}
if (!configDir.isDirectory())
{
FMLLog.severe("Attempting to load configuration from %s, which is not a directory", canonicalConfigPath);
throw new LoaderException();
}
}
public List<ModContainer> getModList()
{
return instance().mods != null ? ImmutableList.copyOf(instance().mods) : ImmutableList.<ModContainer>of();
}
/**
* Called from the hook to start mod loading. We trigger the
* {@link #identifyMods()} and Constructing, Preinitalization, and Initalization phases here. Finally,
* the mod list is frozen completely and is consider immutable from then on.
*/
public void loadMods()
{
initializeLoader();
mods = Lists.newArrayList();
namedMods = Maps.newHashMap();
modController = new LoadController(this);
modController.transition(LoaderState.LOADING, false);
discoverer = identifyMods();
ModAPIManager.INSTANCE.manageAPI(modClassLoader, discoverer);
disableRequestedMods();
modController.distributeStateMessage(FMLLoadEvent.class);
sortModList();
ModAPIManager.INSTANCE.cleanupAPIContainers(modController.getActiveModList());
ModAPIManager.INSTANCE.cleanupAPIContainers(mods);
mods = ImmutableList.copyOf(mods);
for (File nonMod : discoverer.getNonModLibs())
{
if (nonMod.isFile())
{
FMLLog.info("FML has found a non-mod file %s in your mods directory. It will now be injected into your classpath. This could severe stability issues, it should be removed if possible.", nonMod.getName());
try
{
modClassLoader.addFile(nonMod);
}
catch (MalformedURLException e)
{
FMLLog.log(Level.ERROR, e, "Encountered a weird problem with non-mod file injection : %s", nonMod.getName());
}
}
}
modController.transition(LoaderState.CONSTRUCTING, false);
modController.distributeStateMessage(LoaderState.CONSTRUCTING, modClassLoader, discoverer.getASMTable(), reverseDependencies);
FMLLog.fine("Mod signature data");
for (ModContainer mod : getActiveModList())
{
FMLLog.fine("\t%s(%s:%s): %s (%s)", mod.getModId(), mod.getName(), mod.getVersion(), mod.getSource().getName(), CertificateHelper.getFingerprint(mod.getSigningCertificate()));
}
if (getActiveModList().isEmpty())
{
FMLLog.fine("No user mod signature data found");
}
modController.transition(LoaderState.PREINITIALIZATION, false);
}
public void preinitializeMods()
{
if (!modController.isInState(LoaderState.PREINITIALIZATION))
{
FMLLog.warning("There were errors previously. Not beginning mod initialization phase");
return;
}
modController.distributeStateMessage(LoaderState.PREINITIALIZATION, discoverer.getASMTable(), canonicalConfigDir);
modController.transition(LoaderState.INITIALIZATION, false);
}
private void disableRequestedMods()
{
String forcedModList = System.getProperty("fml.modStates", "");
FMLLog.finer("Received a system property request \'%s\'",forcedModList);
Map<String, String> sysPropertyStateList = Splitter.on(CharMatcher.anyOf(";:"))
.omitEmptyStrings().trimResults().withKeyValueSeparator("=")
.split(forcedModList);
FMLLog.finer("System property request managing the state of %d mods", sysPropertyStateList.size());
Map<String, String> modStates = Maps.newHashMap();
forcedModFile = new File(canonicalConfigDir, "fmlModState.properties");
Properties forcedModListProperties = new Properties();
if (forcedModFile.exists() && forcedModFile.isFile())
{
FMLLog.finer("Found a mod state file %s", forcedModFile.getName());
try
{
forcedModListProperties.load(new FileReader(forcedModFile));
FMLLog.finer("Loaded states for %d mods from file", forcedModListProperties.size());
}
catch (Exception e)
{
FMLLog.log(Level.INFO, e, "An error occurred reading the fmlModState.properties file");
}
}
modStates.putAll(Maps.fromProperties(forcedModListProperties));
modStates.putAll(sysPropertyStateList);
FMLLog.fine("After merging, found state information for %d mods", modStates.size());
Map<String, Boolean> isEnabled = Maps.transformValues(modStates, new Function<String, Boolean>()
{
@Override
public Boolean apply(String input)
{
return Boolean.parseBoolean(input);
}
});
for (Map.Entry<String, Boolean> entry : isEnabled.entrySet())
{
if (namedMods.containsKey(entry.getKey()))
{
FMLLog.info("Setting mod %s to enabled state %b", entry.getKey(), entry.getValue());
namedMods.get(entry.getKey()).setEnabledState(entry.getValue());
}
}
}
/**
* Query if we know of a mod named modname
*
* @param modname
* @return If the mod is loaded
*/
public static boolean isModLoaded(String modname)
{
return instance().namedMods.containsKey(modname) && instance().modController.getModState(instance.namedMods.get(modname))!=ModState.DISABLED;
}
public File getConfigDir()
{
return canonicalConfigDir;
}
public String getCrashInformation()
{
// Handle being called before we've begun setup
if (modController == null)
{
return "";
}
StringBuilder ret = new StringBuilder();
List<String> branding = FMLCommonHandler.instance().getBrandings(false);
Joiner.on(' ').skipNulls().appendTo(ret, branding);
if (modController != null)
{
modController.printModStates(ret);
}
return ret.toString();
}
public String getFMLVersionString()
{
return String.format("%s.%s.%s.%s", major, minor, rev, build);
}
public ClassLoader getModClassLoader()
{
return modClassLoader;
}
public void computeDependencies(String dependencyString, Set<ArtifactVersion> requirements, List<ArtifactVersion> dependencies, List<ArtifactVersion> dependants)
{
if (dependencyString == null || dependencyString.length() == 0)
{
return;
}
boolean parseFailure = false;
for (String dep : DEPENDENCYSPLITTER.split(dependencyString))
{
List<String> depparts = Lists.newArrayList(DEPENDENCYPARTSPLITTER.split(dep));
// Need two parts to the string
if (depparts.size() != 2)
{
parseFailure = true;
continue;
}
String instruction = depparts.get(0);
String target = depparts.get(1);
boolean targetIsAll = target.startsWith("*");
// Cannot have an "all" relationship with anything except pure *
if (targetIsAll && target.length() > 1)
{
parseFailure = true;
continue;
}
// If this is a required element, add it to the required list
if ("required-before".equals(instruction) || "required-after".equals(instruction))
{
// You can't require everything
if (!targetIsAll)
{
requirements.add(VersionParser.parseVersionReference(target));
}
else
{
parseFailure = true;
continue;
}
}
// You cannot have a versioned dependency on everything
if (targetIsAll && target.indexOf('@') > -1)
{
parseFailure = true;
continue;
}
// before elements are things we are loaded before (so they are our dependants)
if ("required-before".equals(instruction) || "before".equals(instruction))
{
dependants.add(VersionParser.parseVersionReference(target));
}
// after elements are things that load before we do (so they are out dependencies)
else if ("required-after".equals(instruction) || "after".equals(instruction))
{
dependencies.add(VersionParser.parseVersionReference(target));
}
else
{
parseFailure = true;
}
}
if (parseFailure)
{
FMLLog.log(Level.WARN, "Unable to parse dependency string %s", dependencyString);
throw new LoaderException();
}
}
public Map<String,ModContainer> getIndexedModList()
{
return ImmutableMap.copyOf(namedMods);
}
public void initializeMods()
{
// Mod controller should be in the initialization state here
modController.distributeStateMessage(LoaderState.INITIALIZATION);
modController.transition(LoaderState.POSTINITIALIZATION, false);
modController.distributeStateMessage(FMLInterModComms.IMCEvent.class);
modController.distributeStateMessage(LoaderState.POSTINITIALIZATION);
modController.transition(LoaderState.AVAILABLE, false);
modController.distributeStateMessage(LoaderState.AVAILABLE);
GameData.freezeData();
// Dump the custom registry data map, if necessary
GameData.dumpRegistry(minecraftDir);
FMLLog.info("Forge Mod Loader has successfully loaded %d mod%s", mods.size(), mods.size() == 1 ? "" : "s");
}
public ICrashCallable getCallableCrashInformation()
{
return new ICrashCallable() {
@Override
public String call() throws Exception
{
return getCrashInformation();
}
@Override
public String getLabel()
{
return "FML";
}
};
}
public List<ModContainer> getActiveModList()
{
return modController != null ? modController.getActiveModList() : ImmutableList.<ModContainer>of();
}
public ModState getModState(ModContainer selectedMod)
{
return modController.getModState(selectedMod);
}
public String getMCVersionString()
{
return "Minecraft " + mccversion;
}
public boolean serverStarting(Object server)
{
try
{
modController.distributeStateMessage(LoaderState.SERVER_STARTING, server);
modController.transition(LoaderState.SERVER_STARTING, false);
}
catch (Throwable t)
{
FMLLog.log(Level.ERROR, t, "A fatal exception occurred during the server starting event");
return false;
}
return true;
}
public void serverStarted()
{
modController.distributeStateMessage(LoaderState.SERVER_STARTED);
modController.transition(LoaderState.SERVER_STARTED, false);
}
public void serverStopping()
{
modController.distributeStateMessage(LoaderState.SERVER_STOPPING);
modController.transition(LoaderState.SERVER_STOPPING, false);
}
public BiMap<ModContainer, Object> getModObjectList()
{
return modController.getModObjectList();
}
public BiMap<Object, ModContainer> getReversedModObjectList()
{
return getModObjectList().inverse();
}
public ModContainer activeModContainer()
{
return modController != null ? modController.activeContainer() : null;
}
public boolean isInState(LoaderState state)
{
return modController.isInState(state);
}
public MinecraftDummyContainer getMinecraftModContainer()
{
return minecraft;
}
public boolean hasReachedState(LoaderState state)
{
return modController != null ? modController.hasReachedState(state) : false;
}
public String getMCPVersionString()
{
return String.format("MCP v%s", mcpversion);
}
public void serverStopped()
{
GameData.revertToFrozen();
modController.distributeStateMessage(LoaderState.SERVER_STOPPED);
modController.transition(LoaderState.SERVER_STOPPED, true);
modController.transition(LoaderState.AVAILABLE, true);
}
public boolean serverAboutToStart(Object server)
{
try
{
modController.distributeStateMessage(LoaderState.SERVER_ABOUT_TO_START, server);
modController.transition(LoaderState.SERVER_ABOUT_TO_START, false);
}
catch (Throwable t)
{
FMLLog.log(Level.ERROR, t, "A fatal exception occurred during the server about to start event");
return false;
}
return true;
}
public Map<String,String> getFMLBrandingProperties()
{
if (fmlBrandingProperties == null)
{
Properties loaded = new Properties();
try
{
loaded.load(getClass().getClassLoader().getResourceAsStream("fmlbranding.properties"));
}
catch (Exception e)
{
// File not found - ignore
}
fmlBrandingProperties = Maps.fromProperties(loaded);
}
return fmlBrandingProperties;
}
public Map<String,String> getCustomModProperties(String modId)
{
return getIndexedModList().get(modId).getCustomModProperties();
}
boolean checkRemoteModList(Map<String, String> modList, Side side)
{
Set<String> remoteModIds = modList.keySet();
Set<String> localModIds = namedMods.keySet();
Set<String> difference = Sets.newLinkedHashSet(Sets.difference(localModIds, remoteModIds));
for (Iterator<String> iterator = difference.iterator(); iterator.hasNext();)
{
String missingRemotely = iterator.next();
ModState modState = modController.getModState(namedMods.get(missingRemotely));
if (modState == ModState.DISABLED)
{
iterator.remove();
}
}
FMLLog.info("Attempting connection with missing mods %s at %s", difference, side);
return true;
}
/**
* Fire a FMLMissingMappingsEvent to let mods determine how blocks/items defined in the world
* save, but missing from the runtime, are to be handled.
*
* @param missing Map containing missing names with their associated id, blocks need to come before items for remapping.
* @param isLocalWorld Whether this is executing for a world load (local/server) or a client.
* @param gameData GameData instance where the new map's config is to be loaded into.
* @return List with the mapping results.
*/
public List<String> fireMissingMappingEvent(LinkedHashMap<String, Integer> missing, boolean isLocalWorld, GameData gameData, Map<String, Integer[]> remaps)
{
if (missing.isEmpty()) // nothing to do
{
return ImmutableList.of();
}
FMLLog.fine("There are %d mappings missing - attempting a mod remap", missing.size());
ArrayListMultimap<String, MissingMapping> missingMappings = ArrayListMultimap.create();
for (Map.Entry<String, Integer> mapping : missing.entrySet())
{
int id = mapping.getValue();
MissingMapping m = new MissingMapping(mapping.getKey(), id);
missingMappings.put(m.name.substring(0, m.name.indexOf(':')), m);
}
FMLMissingMappingsEvent missingEvent = new FMLMissingMappingsEvent(missingMappings);
modController.propogateStateMessage(missingEvent);
if (isLocalWorld) // local world, warn about entries still being set to the default action
{
boolean didWarn = false;
for (MissingMapping mapping : missingMappings.values())
{
if (mapping.getAction() == FMLMissingMappingsEvent.Action.DEFAULT)
{
if (!didWarn)
{
FMLLog.severe("There are unidentified mappings in this world - we are going to attempt to process anyway");
didWarn = true;
}
FMLLog.severe("Unidentified %s: %s, id %d", mapping.type == Type.BLOCK ? "block" : "item", mapping.name, mapping.id);
}
}
}
else // remote world, fail on entries with the default action
{
List<String> missedMapping = new ArrayList<String>();
for (MissingMapping mapping : missingMappings.values())
{
if (mapping.getAction() == FMLMissingMappingsEvent.Action.DEFAULT)
{
missedMapping.add(mapping.name);
}
}
if (!missedMapping.isEmpty())
{
return ImmutableList.copyOf(missedMapping);
}
}
return GameData.processIdRematches(missingMappings.values(), isLocalWorld, gameData, remaps);
}
public void fireRemapEvent(Map<String, Integer[]> remaps)
{
if (remaps.isEmpty())
{
FMLLog.finer("Skipping remap event - no remaps occured");
}
else
{
modController.propogateStateMessage(new FMLModIdMappingEvent(remaps));
}
}
public void runtimeDisableMod(String modId)
{
ModContainer mc = namedMods.get(modId);
Disableable disableable = mc.canBeDisabled();
if (disableable == Disableable.NEVER)
{
FMLLog.info("Cannot disable mod %s - it is never allowed to be disabled", modId);
return;
}
if (disableable == Disableable.DEPENDENCIES)
{
FMLLog.info("Cannot disable mod %s - there are dependent mods that require its presence", modId);
return;
}
if (disableable == Disableable.YES)
{
FMLLog.info("Runtime disabling mod %s", modId);
modController.disableMod(mc);
List<ModContainer> localmods = Lists.newArrayList(mods);
localmods.remove(mc);
mods = ImmutableList.copyOf(localmods);
}
try
{
Properties props = new Properties();
props.load(new FileReader(forcedModFile));
props.put(modId, "false");
props.store(new FileWriter(forcedModFile), null);
}
catch (Exception e)
{
FMLLog.log(Level.INFO, e, "An error occurred writing the fml mod states file, your disabled change won't persist");
}
}
}
|
package ru.atom;
import org.junit.Assert;
import org.junit.Test;
import ru.atom.geometry.Point;
import ru.atom.model.GameObject;
import ru.atom.model.GameSession;
import ru.atom.model.Movable;
import java.util.List;
public class GameModelTest {
@Test
public void gameIsCreated() {
GameSession gameSession = TestGameSessionCreator.createGameSession();
Assert.assertNotNull(gameSession);
}
@Test
public void gameObjectsAreInstantiated() {
GameSession gameSession = TestGameSessionCreator.createGameSession();
List<GameObject> gameObjects = gameSession.getGameObjects();
Assert.assertNotNull(gameObjects);
Assert.assertFalse(gameObjects.size() == 0);
}
/**
* Checks that Movable GameObjects-s move
* Collisions are ignored
*/
@Test
public void movement() {
GameSession gameSession = TestGameSessionCreator.createGameSession();
List<GameObject> gameObjects = gameSession.getGameObjects();
for (GameObject gameObject : gameObjects) {
if (gameObject instanceof Movable) {
Point firstPosition = ((Movable) gameObject).getPosition();
Point currentPosition = ((Movable) gameObject).move(Movable.Direction.UP, 1000);
Assert.assertTrue(currentPosition.getY() > firstPosition.getY());
currentPosition = ((Movable) gameObject).move(Movable.Direction.DOWN, 1000);
Assert.assertTrue(currentPosition.getY() == firstPosition.getY());
currentPosition = ((Movable) gameObject).move(Movable.Direction.RIGHT, 500);
Assert.assertTrue(currentPosition.getX() > firstPosition.getX());
currentPosition = ((Movable) gameObject).move(Movable.Direction.LEFT, 500);
Assert.assertTrue(currentPosition.getX() == firstPosition.getX());
currentPosition = ((Movable) gameObject).move(Movable.Direction.IDLE, 1000);
Assert.assertTrue(currentPosition.getX() == firstPosition.getX());
Assert.assertTrue(currentPosition.getY() == firstPosition.getY());
}
}
}
}
|
package openmods.liquids;
import java.util.*;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
import net.minecraftforge.common.ForgeDirection;
import net.minecraftforge.fluids.*;
import openmods.Log;
import openmods.OpenMods;
import openmods.integration.modules.BuildCraftPipes;
import openmods.sync.SyncableFlags;
import openmods.utils.BlockUtils;
import openmods.utils.Coord;
import com.google.common.collect.Lists;
public class GenericTank extends FluidTank {
private List<ForgeDirection> surroundingTanks = Lists.newArrayList();
private long lastUpdate = 0;
private final IFluidFilter filter;
public interface IFluidFilter {
public boolean canAcceptFluid(FluidStack stack);
}
private static final IFluidFilter NO_RESTRICTIONS = new IFluidFilter() {
@Override
public boolean canAcceptFluid(FluidStack stack) {
return true;
}
};
private static IFluidFilter filter(final FluidStack... acceptableFluids) {
if (acceptableFluids.length == 0) return NO_RESTRICTIONS;
return new IFluidFilter() {
@Override
public boolean canAcceptFluid(FluidStack stack) {
for (FluidStack acceptableFluid : acceptableFluids)
if (acceptableFluid.isFluidEqual(stack)) return true;
return false;
}
};
}
public GenericTank(int capacity, FluidStack... acceptableFluids) {
super(capacity);
this.filter = filter(acceptableFluids);
}
private static boolean isNeighbourTank(World world, Coord coord, ForgeDirection dir) {
TileEntity tile = BlockUtils.getTileInDirection(world, coord, dir);
return tile instanceof IFluidHandler;
}
private static Set<ForgeDirection> getSurroundingTanks(World world, Coord coord, SyncableFlags sides) {
EnumSet<ForgeDirection> result = EnumSet.noneOf(ForgeDirection.class);
if (sides == null) {
for (ForgeDirection dir : ForgeDirection.VALID_DIRECTIONS)
if (isNeighbourTank(world, coord, dir)) result.add(dir);
}
else
{
for (Integer s : sides.getActiveSlots()) {
ForgeDirection dir = ForgeDirection.getOrientation(s);
if (isNeighbourTank(world, coord, dir)) result.add(dir);
}
}
return result;
}
public FluidStack drain(FluidStack resource, boolean doDrain) {
if (resource == null ||
fluid == null ||
fluid.isFluidEqual(resource)) return null;
return drain(resource.amount, doDrain);
}
public int getSpace() {
return getCapacity() - getFluidAmount();
}
@Override
public int fill(FluidStack resource, boolean doFill) {
if (resource == null || !filter.canAcceptFluid(resource)) return 0;
return super.fill(resource, doFill);
}
private void periodicUpdateNeighbours(World world, Coord coord, SyncableFlags sides) {
long currentTime = OpenMods.proxy.getTicks(world);
if (currentTime - lastUpdate > 10) {
surroundingTanks = Lists.newArrayList(getSurroundingTanks(world, coord, sides));
lastUpdate = currentTime;
}
}
private static int tryFillNeighbour(FluidStack drainedFluid, ForgeDirection side, TileEntity otherTank) {
final FluidStack toFill = drainedFluid.copy();
final ForgeDirection fillSide = side.getOpposite();
if (otherTank instanceof IFluidHandler) return ((IFluidHandler)otherTank).fill(fillSide, toFill, true);
else return BuildCraftPipes.access().tryAcceptIntoPipe(otherTank, toFill, fillSide);
}
public void distributeToSides(int amountPerTick, World world, Coord coord, SyncableFlags sides) {
if (world == null) return;
if (getFluidAmount() <= 0) return;
periodicUpdateNeighbours(world, coord, sides);
if (surroundingTanks.isEmpty()) return;
FluidStack drainedFluid = drain(amountPerTick, false);
if (drainedFluid != null && drainedFluid.amount > 0) {
int startingAmount = drainedFluid.amount;
Collections.shuffle(surroundingTanks);
for (ForgeDirection side : surroundingTanks) {
if (drainedFluid.amount <= 0) break;
TileEntity otherTank = BlockUtils.getTileInDirection(world, coord, side);
if (otherTank != null) drainedFluid.amount -= tryFillNeighbour(drainedFluid, side, otherTank);
}
// return any remainder
int distributed = startingAmount - drainedFluid.amount;
if (distributed > 0) drain(distributed, true);
}
}
public void fillFromSides(int maxAmount, World world, Coord coord) {
fillFromSides(maxAmount, world, coord, null);
}
public void fillFromSides(int maxAmount, World world, Coord coord, SyncableFlags sides) {
if (world == null) return;
int toDrain = Math.min(maxAmount, getSpace());
if (toDrain <= 0) return;
periodicUpdateNeighbours(world, coord, sides);
if (surroundingTanks.isEmpty()) return;
Collections.shuffle(surroundingTanks);
MAIN: for (ForgeDirection side : surroundingTanks) {
if (toDrain <= 0) break;
TileEntity otherTank = BlockUtils.getTileInDirection(world, coord, side);
if (otherTank instanceof IFluidHandler) {
final ForgeDirection drainSide = side.getOpposite();
final IFluidHandler handler = (IFluidHandler)otherTank;
final FluidTankInfo[] infos = handler.getTankInfo(drainSide);
if (infos == null) {
Log.fine("Tank %s @ (%d,%d,%d) returned null tank info. Nasty.",
otherTank.getClass(), otherTank.xCoord, otherTank.yCoord, otherTank.zCoord);
continue;
}
for (FluidTankInfo info : infos) {
if (filter.canAcceptFluid(info.fluid)) {
FluidStack stack = info.fluid.copy();
stack.amount = toDrain;
FluidStack drained = handler.drain(drainSide, stack, true);
if (drained != null) {
fill(drained, true);
toDrain -= drained.amount;
}
if (toDrain <= 0) break MAIN;
}
}
}
}
}
}
|
package creator;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.json.JSONObject;
import age.api.SkillFile;
import de.ust.skill.common.java.api.Access;
import de.ust.skill.common.java.internal.FieldDeclaration;
import de.ust.skill.common.java.internal.SkillObject;
public class SkillObjectCreator {
private static final String FIELD_DECLARATION_CLASS_NAME = "de.ust.skill.common.java.api.FieldDeclaration";
public static void main(String[] args) {
String className = "age.Age";
String fieldToSet = "age";
String fieldTypeToSet = "long";
Long valueToSet = 255L;
Map<String, Object> values = new HashMap<>();
Map<String, String> fieldTypes = new HashMap<>();
values.put(fieldToSet, valueToSet);
fieldTypes.put(fieldToSet, fieldTypeToSet);
try {
SkillObject obj = instantiateSkillObject(className, values, fieldTypes);
System.out.println(obj.prettyString());
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
/**
* Initialise a SKilLObject based on a JSON representation of it and its data
* @param sf SKilL file from which type and field definitions are loaded
* @param obj JSON representation of the object which should be initialised
* @return
*/
public static SkillObject initSkillObject(SkillFile sf, JSONObject obj) {
Map<String, Access<?>> types = new HashMap<>();
Map<String, HashMap<String, FieldDeclaration<?, ?>>> typeFieldMapping = new HashMap<>();
//Iterate over all SkilL types present in the given SKilL file
for (Access<?> currentType : sf.allTypes()) {
types.put(currentType.name(), currentType);
//Save all fields defined in this type into a map
HashMap<String, FieldDeclaration<?, ?>> fieldMapping = new HashMap<String, FieldDeclaration<?, ?>>();
Iterator<? extends de.ust.skill.common.java.api.FieldDeclaration<?>> iter = currentType.fields();
while (iter.hasNext()) {
FieldDeclaration<?, ?> fieldDeclaration = (FieldDeclaration<?, ?>) iter.next();
fieldMapping.put(fieldDeclaration.name(), fieldDeclaration);
}
typeFieldMapping.put(currentType.name(), fieldMapping);
}
SkillObject targetObj = types.get(obj.getString("type")).make();
targetObj.skillName();
//TODO Iterate over all fields in JSON & set those fields
//TODO Write object to file
sf.close();
return targetObj;
}
/**
* Create an empty SkillObject from a fully specified class name
*
* @param className
* fully classified class name incl. package identifier
* @return empty SkillObject of the specified class
*/
public static SkillObject createSkillObject(String className)
throws ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Class<?> refClass = Class.forName(className);
Constructor<?> refConstructor = refClass.getConstructor();
SkillObject refVar = (SkillObject) refConstructor.newInstance();
return refVar;
}
/**
* Create and fill a SkillObject with the provided values
*
* @param className
* fully specified class name incl. package identifier of the
* object to be created
* @param values
* mapping of field names to values
* @param fieldTypes
* mapping of field names to the corresponding types of values
* @return a SkillObject with the provided values as attributes
*/
public static SkillObject instantiateSkillObject(String className, Map<String, Object> values,
Map<String, String> fieldTypes) throws ClassNotFoundException, NoSuchMethodException, SecurityException,
InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
SkillObject obj = createSkillObject(className);
Map<String, Type> fieldMapping = getFieldMapping(className);
for (String key : fieldMapping.keySet()) {
String type = fieldMapping.get(key).getTypeName();
System.out.println("Present field: " + key + "(Type: " + type + ")");
if (values.containsKey(key)) {
System.out.println("Setting field '" + key + "' to be " + values.get(key));
reflectiveSetValue(obj, values.get(key), key, fieldTypes.get(key));
}
}
return obj;
}
/**
* Set the specified field of a SkillObject to a given value
*
* @param obj
* the SkillObject for which the field is to be set
* @param value
* the value to set the field of the SkillObject to
* @param fieldName
* the name of the field to be set
* @param fieldType
* fully qualified class name of the field to be set
* @return the provided SkillObject with a set new value
*/
public static SkillObject reflectiveSetValue(SkillObject obj, Object value, String fieldName, String fieldType) {
try {
Method setterMethod = obj.getClass().getMethod(getSetterName(fieldName), getAptClass(fieldType));
System.out.println("Found method " + setterMethod.getName());
setterMethod.invoke(obj, value);
} catch (SecurityException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
/**
* Get a Class object from a fully qualified class name incl. package
* identifier
*
* @param type
* fully qualified class name
* @return Class object for the provided class name
* @throws ClassNotFoundException
*/
private static Class<?> getAptClass(String type) throws ClassNotFoundException {
if (!SKilLType.isPrimitive(type)) {
return Class.forName(type);
} else {
switch (type) {
case "byte":
return byte.class;
case "short":
return short.class;
case "int":
return int.class;
case "long":
return long.class;
case "float":
return float.class;
case "double":
return double.class;
case "boolean":
return boolean.class;
case "char":
return char.class;
default:
return null;
}
}
}
/**
* Parse a value encoded in a string to the specified type
*
* @param type
* @param valueString
* @return instance of the actual class of the provided value
*/
public static Object valueOf(String type, String valueString) {
switch (type) {
case "byte":
return Byte.valueOf(valueString);
case "short":
return Short.valueOf(valueString);
case "int":
return Integer.valueOf(valueString);
case "long":
return Long.valueOf(valueString);
case "float":
return Float.valueOf(valueString);
case "double":
return Double.valueOf(valueString);
case "boolean":
return Boolean.valueOf(valueString);
case "char":
return Character.valueOf(valueString.charAt(0));
case "java.lang.String":
return valueString;
default:
return null;
}
}
/**
* Return the method name of the setter method responsible for the field
* with the name 'fieldName'.
*
* @param fieldName
* the name of the field for which to get the name of the setter
* method
* @return the name of the setter method
*/
public static String getSetterName(String fieldName) {
return "set" + fieldName.substring(0, 1).toUpperCase() + fieldName.substring(1);
}
/**
* Create a mapping of field names and their corresponding types of the
* given class.
*
* @param className
* the name of the class for which to generate the mapping
* @return a mapping of field names to Type objects
*/
public static Map<String, Type> getFieldMapping(String className) {
Map<String, Type> fieldTypeMapping;
try {
Class<?> cls = Class.forName(className);
Field[] declaredFields = cls.getDeclaredFields();
fieldTypeMapping = new HashMap<>(declaredFields.length);
for (Field field : declaredFields) {
fieldTypeMapping.put(field.getName(), field.getGenericType());
}
return fieldTypeMapping;
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
/**
* Create a mapping of field names and their corresponding types of the
* given class.
*
* @param cls
* the class for which to generate the mapping
* @return a mapping of field names to Type objects
*/
public static Map<String, Type> getFieldMapping(Class<?> cls) {
Map<String, Type> fieldTypeMapping;
Field[] declaredFields = cls.getDeclaredFields();
fieldTypeMapping = new HashMap<>(declaredFields.length);
for (Field field : declaredFields) {
fieldTypeMapping.put(field.getName(), field.getGenericType());
}
return fieldTypeMapping;
}
}
|
package org.I0Itec.zkclient;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.TimeUnit;
import org.I0Itec.zkclient.ZkEventThread.ZkEvent;
import org.I0Itec.zkclient.exception.ZkBadVersionException;
import org.I0Itec.zkclient.exception.ZkException;
import org.I0Itec.zkclient.exception.ZkInterruptedException;
import org.I0Itec.zkclient.exception.ZkMarshallingError;
import org.I0Itec.zkclient.exception.ZkNoNodeException;
import org.I0Itec.zkclient.exception.ZkTimeoutException;
import org.apache.log4j.Logger;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.KeeperException.ConnectionLossException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
import org.apache.zookeeper.Watcher.Event.EventType;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.apache.zookeeper.ZooKeeper.States;
import org.apache.zookeeper.data.Stat;
/**
* Abstracts the interaction with zookeeper and allows permanent (not just one time) watches on nodes in ZooKeeper
*/
public class ZkClient implements Watcher {
private final static Logger LOG = Logger.getLogger(ZkClient.class);
private IZkConnection _connection;
private final Map<String, Set<IZkChildListener>> _childListener = new ConcurrentHashMap<String, Set<IZkChildListener>>();
private final ConcurrentHashMap<String, Set<IZkDataListener>> _dataListener = new ConcurrentHashMap<String, Set<IZkDataListener>>();
private final Set<IZkStateListener> _stateListener = new CopyOnWriteArraySet<IZkStateListener>();
private KeeperState _currentState;
private final ZkLock _zkEventLock = new ZkLock();
private boolean _shutdownTriggered;
private ZkEventThread _eventThread;
// TODO PVo remove this later
private Thread _zookeeperEventThread;
public ZkClient(IZkConnection connection) {
this(connection, Integer.MAX_VALUE);
}
public ZkClient(IZkConnection connection, int connectionTimeout) {
_connection = connection;
connect(connectionTimeout, this);
}
public ZkClient(String zkServers, int sessionTimeout, int connectionTimeout) {
this(new ZkConnection(zkServers, sessionTimeout), connectionTimeout);
}
public ZkClient(String zkServers, int connectionTimeout) {
this(new ZkConnection(zkServers), connectionTimeout);
}
public ZkClient(String serverstring) {
this(serverstring, Integer.MAX_VALUE);
}
public List<String> subscribeChildChanges(String path, IZkChildListener listener) {
synchronized (_childListener) {
Set<IZkChildListener> listeners = _childListener.get(path);
if (listeners == null) {
listeners = new CopyOnWriteArraySet<IZkChildListener>();
_childListener.put(path, listeners);
}
listeners.add(listener);
}
return watchForChilds(path);
}
public void unsubscribeChildChanges(String path, IZkChildListener childListener) {
synchronized (_childListener) {
final Set<IZkChildListener> listeners = _childListener.get(path);
if (listeners != null) {
listeners.remove(childListener);
}
}
}
public void subscribeDataChanges(String path, IZkDataListener listener) {
Set<IZkDataListener> listeners;
synchronized (_dataListener) {
listeners = _dataListener.get(path);
if (listeners == null) {
listeners = new CopyOnWriteArraySet<IZkDataListener>();
_dataListener.put(path, listeners);
}
listeners.add(listener);
}
watchForData(path);
LOG.debug("Subscribed data changes for " + path);
}
public void unsubscribeDataChanges(String path, IZkDataListener dataListener) {
synchronized (_dataListener) {
final Set<IZkDataListener> listeners = _dataListener.get(path);
if (listeners != null) {
listeners.remove(dataListener);
}
}
}
public void subscribeStateChanges(final IZkStateListener listener) {
synchronized (_stateListener) {
_stateListener.add(listener);
}
}
public void unsubscribeStateChanges(IZkStateListener stateListener) {
synchronized (_stateListener) {
_stateListener.remove(stateListener);
}
}
public void unsubscribeAll() {
synchronized (_childListener) {
_childListener.clear();
}
synchronized (_dataListener) {
_dataListener.clear();
}
synchronized (_stateListener) {
_stateListener.clear();
}
}
// </listeners>
public void createPersistent(String path) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, null, CreateMode.PERSISTENT);
}
public void createPersistent(String path, Serializable serializable) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, serializable, CreateMode.PERSISTENT);
}
public String createPersistentSequential(String path, Serializable serializable) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, serializable, CreateMode.PERSISTENT_SEQUENTIAL);
}
public void createEphemeral(final String path) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, null, CreateMode.EPHEMERAL);
}
public String create(final String path, Serializable serializable, final CreateMode mode) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
if (path == null) {
throw new NullPointerException("path must not be null.");
}
final byte[] data = serializable == null ? null : toByteArray(serializable);
return retryUntilConnected(new Callable<String>() {
@Override
public String call() throws Exception {
return _connection.create(path, data, mode);
}
});
}
private byte[] toByteArray(Serializable serializable) {
try {
ByteArrayOutputStream byteArrayOS = new ByteArrayOutputStream();
ObjectOutputStream stream = new ObjectOutputStream(byteArrayOS);
stream.writeObject(serializable);
stream.close();
return byteArrayOS.toByteArray();
} catch (IOException e) {
throw new ZkMarshallingError(e);
}
}
public void createEphemeral(final String path, final Serializable serializable) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, serializable, CreateMode.EPHEMERAL);
}
public String createEphemeralSequential(final String path, final Serializable serializable) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, serializable, CreateMode.EPHEMERAL_SEQUENTIAL);
}
public void process(WatchedEvent event) {
LOG.debug("Received event: " + event);
_zookeeperEventThread = Thread.currentThread();
boolean stateChanged = event.getPath() == null;
boolean znodeChanged = event.getPath() != null;
boolean dataChanged = event.getType() == EventType.NodeDataChanged || event.getType() == EventType.NodeDeleted || event.getType() == EventType.NodeCreated
|| event.getType() == EventType.NodeChildrenChanged;
getEventLock().lock();
try {
// We might have to install child change event listener if a new node was created
if (getShutdownTrigger()) {
LOG.debug("ignoring event '{" + event.getType() + " | " + event.getPath() + "}' since shutdown triggered");
return;
}
if (stateChanged) {
processStateChanged(event);
}
if (dataChanged) {
processDataOrChildChange(event);
}
} finally {
if (stateChanged) {
getEventLock().getStateChangedCondition().signalAll();
// If the session expired we have to signal all conditions, because watches might have been removed and
// there is no guarantee that those
// conditions will be signaled at all after an Expired event
// TODO PVo write a test for this
if (event.getState() == KeeperState.Expired) {
getEventLock().getZNodeEventCondition().signalAll();
getEventLock().getDataChangedCondition().signalAll();
// We also have to notify all listeners that something might have changed
fireAllEvents();
}
}
if (znodeChanged) {
getEventLock().getZNodeEventCondition().signalAll();
}
if (dataChanged) {
getEventLock().getDataChangedCondition().signalAll();
}
getEventLock().unlock();
LOG.debug("Leaving process event");
}
}
private void fireAllEvents() {
for (Entry<String, Set<IZkChildListener>> entry : _childListener.entrySet()) {
fireChildChangedEvents(entry.getKey(), entry.getValue());
}
for (Entry<String, Set<IZkDataListener>> entry : _dataListener.entrySet()) {
fireDataChangedEvents(entry.getKey(), entry.getValue());
}
}
public List<String> getChildren(String path) {
return getChildren(path, hasListeners(path));
}
private List<String> getChildren(final String path, final boolean watch) {
return retryUntilConnected(new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
return _connection.getChildren(path, watch);
}
});
}
/**
* Counts number of children for the given path.
*
* @param path
* @return number of children or 0 if path does not exist.
*/
public int countChildren(String path) {
try {
return getChildren(path).size();
} catch (ZkNoNodeException e) {
return 0;
}
}
private boolean exists(final String path, final boolean watch) {
return retryUntilConnected(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return _connection.exists(path, watch);
}
});
}
public boolean exists(final String path) {
return exists(path, hasListeners(path));
}
private void processStateChanged(WatchedEvent event) {
LOG.info("zookeeper state changed (" + event.getState() + ")");
setCurrentState(event.getState());
if (getShutdownTrigger()) {
return;
}
try {
fireStateChangedEvent(event.getState());
if (event.getState() == KeeperState.Expired) {
reconnect();
fireNewSessionEvents();
}
} catch (final Exception e) {
throw new RuntimeException("Exception while restarting zk client", e);
}
}
private void fireNewSessionEvents() {
for (final IZkStateListener stateListener : _stateListener) {
_eventThread.send(new ZkEvent("New session event sent to " + stateListener.getClass().getName()) {
@Override
public void run() throws Exception {
stateListener.handleNewSession();
}
});
}
}
private void fireStateChangedEvent(final KeeperState state) {
for (final IZkStateListener stateListener : _stateListener) {
_eventThread.send(new ZkEvent("State changed to " + state + " sent to " + stateListener.getClass().getName()) {
@Override
public void run() throws Exception {
stateListener.handleStateChanged(state);
}
});
}
}
private boolean hasListeners(String path) {
Set<IZkDataListener> dataListeners = _dataListener.get(path);
if (dataListeners != null && dataListeners.size() > 0) {
return true;
}
Set<IZkChildListener> childListeners = _childListener.get(path);
if (childListeners != null && childListeners.size() > 0) {
return true;
}
return false;
}
public boolean deleteRecursive(String path) {
List<String> children;
try {
children = getChildren(path, false);
} catch (ZkNoNodeException e) {
return true;
}
for (String subPath : children) {
if (!deleteRecursive(path + "/" + subPath)) {
return false;
}
}
return delete(path);
}
private void processDataOrChildChange(WatchedEvent event) {
// ZkEventType eventType = ZkEventType.getMappedType(event.getType());
final String path = event.getPath();
if (event.getType() == EventType.NodeChildrenChanged || event.getType() == EventType.NodeCreated || event.getType() == EventType.NodeDeleted) {
Set<IZkChildListener> childListeners = _childListener.get(path);
if (childListeners != null && !childListeners.isEmpty()) {
fireChildChangedEvents(path, childListeners);
}
}
if (event.getType() == EventType.NodeDataChanged || event.getType() == EventType.NodeDeleted || event.getType() == EventType.NodeCreated) {
Set<IZkDataListener> listeners = _dataListener.get(path);
if (listeners != null && !listeners.isEmpty()) {
fireDataChangedEvents(event.getPath(), listeners);
}
}
}
private void fireDataChangedEvents(final String path, Set<IZkDataListener> listeners) {
for (final IZkDataListener listener : listeners) {
_eventThread.send(new ZkEvent("Data of " + path + " changed sent to " + listener.getClass().getName()) {
@Override
public void run() throws Exception {
// reinstall watch
exists(path, true);
try {
Serializable data = readData(path, null, true);
listener.handleDataChange(path, data);
} catch (ZkNoNodeException e) {
listener.handleDataDeleted(path);
}
}
});
}
}
private void fireChildChangedEvents(final String path, Set<IZkChildListener> childListeners) {
try {
// reinstall the watch
for (final IZkChildListener listener : childListeners) {
_eventThread.send(new ZkEvent("Children of " + path + " changed sent to " + listener.getClass().getName()) {
@Override
public void run() throws Exception {
try {
// if the node doesn't exist we should listen for the root node to reappear
exists(path);
List<String> children = getChildren(path);
listener.handleChildChange(path, children);
} catch (ZkNoNodeException e) {
listener.handleChildChange(path, null);
}
}
});
}
} catch (Exception e) {
LOG.error("Failed to fire child changed event. Unable to getChildren. ", e);
}
}
public boolean waitUntilExists(String path, TimeUnit timeUnit, long time) throws ZkInterruptedException {
Date timeout = new Date(System.currentTimeMillis() + timeUnit.toMillis(time));
LOG.debug("Waiting until znode '" + path + "' becomes available.");
if (exists(path)) {
return true;
}
acquireEventLock();
try {
while (!exists(path, true)) {
boolean gotSignal = getEventLock().getZNodeEventCondition().awaitUntil(timeout);
if (!gotSignal) {
return false;
}
}
return true;
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
protected Set<IZkDataListener> getDataListener(String path) {
return _dataListener.get(path);
}
public void showFolders(OutputStream output) {
final int level = 1;
final StringBuilder builder = new StringBuilder();
final String startPath = "/";
addChildrenToStringBuilder(level, builder, startPath);
try {
output.write(builder.toString().getBytes());
} catch (final IOException e) {
e.printStackTrace();
}
}
private void addChildrenToStringBuilder(final int level, final StringBuilder builder, final String startPath) {
final List<String> children = getChildren(startPath);
for (final String node : children) {
builder.append(getSpaces(level - 1) + "'-" + "+" + node + "\n");
String nestedPath;
if (startPath.endsWith("/")) {
nestedPath = startPath + node;
} else {
nestedPath = startPath + "/" + node;
}
addChildrenToStringBuilder(level + 1, builder, nestedPath);
}
}
private String getSpaces(final int level) {
String s = "";
for (int i = 0; i < level; i++) {
s += " ";
}
return s;
}
public void waitUntilConnected() throws ZkInterruptedException {
waitUntilConnected(Integer.MAX_VALUE, TimeUnit.MILLISECONDS);
}
public boolean waitUntilConnected(long time, TimeUnit timeUnit) throws ZkInterruptedException {
return waitForKeeperState(KeeperState.SyncConnected, time, timeUnit);
}
public boolean waitForKeeperState(KeeperState keeperState, long time, TimeUnit timeUnit) throws ZkInterruptedException {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
Date timeout = new Date(System.currentTimeMillis() + timeUnit.toMillis(time));
LOG.debug("Waiting for keeper state " + keeperState);
acquireEventLock();
try {
boolean stillWaiting = true;
while (_currentState != keeperState) {
if (!stillWaiting) {
return false;
}
stillWaiting = getEventLock().getStateChangedCondition().awaitUntil(timeout);
}
LOG.debug("State is " + _currentState);
return true;
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
private void acquireEventLock() {
try {
getEventLock().lockInterruptibly();
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
}
}
public <T> T retryUntilConnected(Callable<T> callable) throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
while (true) {
try {
return callable.call();
} catch (ConnectionLossException e) {
// we give the event thread some time to update the status to 'Disconnected'
Thread.yield();
waitUntilConnected();
} catch (SessionExpiredException e) {
// we give the event thread some time to update the status to 'Expired'
Thread.yield();
waitUntilConnected();
} catch (KeeperException e) {
throw ZkException.create(e);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} catch (Exception e) {
throw ExceptionUtil.convertToRuntimeException(e);
}
}
}
public void setCurrentState(KeeperState currentState) {
getEventLock().lock();
try {
_currentState = currentState;
} finally {
getEventLock().unlock();
}
}
/**
* Returns a mutex all zookeeper events are synchronized aginst. So in case you need to do something without getting
* any zookeeper event interruption synchronize against this mutex. Also all threads waiting on this mutex object
* will be notified on an event.
*
* @return the mutex.
*/
public ZkLock getEventLock() {
return _zkEventLock;
}
public boolean delete(final String path) {
try {
retryUntilConnected(new Callable<Object>() {
@Override
public Object call() throws Exception {
_connection.delete(path);
return null;
}
});
return true;
} catch (ZkNoNodeException e) {
return false;
}
}
public <T extends Serializable> T readData(String path) {
return (T) readData(path, null);
}
@SuppressWarnings("unchecked")
public <T extends Serializable> T readData(String path, Stat stat) {
return (T) readData(path, stat, hasListeners(path));
}
@SuppressWarnings("unchecked")
private <T extends Serializable> T readData(final String path, final Stat stat, final boolean watch) {
byte[] data = retryUntilConnected(new Callable<byte[]>() {
@Override
public byte[] call() throws Exception {
return _connection.readData(path, stat, watch);
}
});
return (T) readSerializable(data);
}
@SuppressWarnings("unchecked")
private <T extends Serializable> T readSerializable(byte[] data) {
if (data == null) {
return null;
}
try {
ObjectInputStream inputStream = new ObjectInputStream(new ByteArrayInputStream(data));
Object object = inputStream.readObject();
return (T) object;
} catch (ClassNotFoundException e) {
throw new ZkMarshallingError("Unable to find object class.", e);
} catch (IOException e) {
throw new ZkMarshallingError(e);
}
}
public void writeData(String path, Serializable serializable) {
writeData(path, serializable, -1);
}
/**
* Updates data of an existing znode. The current content of the znode is passed to the {@link DataUpdater} that is
* passed into this method, which returns the new content. The new content is only written back to ZooKeeper if
* nobody has modified the given znode in between. If a concurrent change has been detected the new data of the
* znode is passed to the updater once again until the new contents can be successfully written back to ZooKeeper.
*
* @param <T>
* @param path
* The path of the znode.
* @param updater
* Updater that creates the new contents.
*/
public <T extends Serializable> void updateDataSerialized(String path, DataUpdater<T> updater) {
Stat stat = new Stat();
boolean retry;
do {
retry = false;
try {
T oldData = (T) readData(path, stat);
T newData = updater.update(oldData);
writeData(path, newData, stat.getVersion());
} catch (ZkBadVersionException e) {
retry = true;
}
} while (retry);
}
public void writeData(final String path, Serializable serializable, final int expectedVersion) {
final byte[] data = toByteArray(serializable);
retryUntilConnected(new Callable<Object>() {
@Override
public Object call() throws Exception {
_connection.writeData(path, data, expectedVersion);
return null;
}
});
}
public void watchForData(final String path) {
retryUntilConnected(new Callable<Object>() {
@Override
public Object call() throws Exception {
_connection.exists(path, true);
return null;
}
});
}
/**
* Installs a child watch for the given path.
*
* @param path
* @return the current children of the path or null if the zk node with the given path doesn't exist.
*/
public List<String> watchForChilds(final String path) {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
return retryUntilConnected(new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
exists(path, true);
try {
return getChildren(path, true);
} catch (ZkNoNodeException e) {
// ignore, the "exists" watch will listen for the parent node to appear
}
return null;
}
});
}
public void connect(final long maxMsToWaitUntilConnected, Watcher watcher) throws ZkInterruptedException, ZkTimeoutException, IllegalStateException {
boolean started = false;
try {
getEventLock().lockInterruptibly();
setShutdownTrigger(false);
_eventThread = new ZkEventThread(_connection.getServers());
_eventThread.start();
_connection.connect(watcher);
LOG.debug("Awaiting connection to Zookeeper server");
if (!waitUntilConnected(maxMsToWaitUntilConnected, TimeUnit.MILLISECONDS)) {
throw new ZkTimeoutException("Unable to connect to zookeeper server within timeout: " + maxMsToWaitUntilConnected);
}
started = true;
} catch (InterruptedException e) {
States state = _connection.getZookeeperState();
throw new IllegalStateException("Not connected with zookeeper server yet. Current state is " + state);
} finally {
getEventLock().unlock();
// we should close the zookeeper instance, otherwise it would keep
// on trying to connect
if (!started) {
close();
}
}
}
public long getCreationTime(String path) {
try {
getEventLock().lockInterruptibly();
return _connection.getCreateTime(path);
} catch (KeeperException e) {
throw ZkException.create(e);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
/**
* Close the client.
*
* @throws ZkInterruptedException
*/
public void close() throws ZkInterruptedException {
if (_connection == null) {
return;
}
LOG.debug("Closing ZkClient...");
getEventLock().lock();
try {
setShutdownTrigger(true);
_eventThread.interrupt();
_eventThread.join(2000);
_connection.close();
_connection = null;
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
LOG.debug("Closing ZkClient...done");
}
private void reconnect() {
getEventLock().lock();
try {
_connection.close();
_connection.connect(this);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
public void setShutdownTrigger(boolean triggerState) {
_shutdownTriggered = triggerState;
}
public boolean getShutdownTrigger() {
return _shutdownTriggered;
}
public int numberOfListeners() {
int listeners = 0;
for (Set<IZkChildListener> childListeners : _childListener.values()) {
listeners += childListeners.size();
}
for (Set<IZkDataListener> dataListeners : _dataListener.values()) {
listeners += dataListeners.size();
}
listeners += _stateListener.size();
return listeners;
}
}
|
package org.hibernate.ogm.datastore.mongodb.test.performance;
import static org.fest.assertions.Assertions.assertThat;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.ogm.backendtck.associations.collection.types.Child;
import org.hibernate.ogm.backendtck.associations.collection.types.GrandChild;
import org.hibernate.ogm.backendtck.associations.collection.types.GrandMother;
import org.hibernate.ogm.datastore.impl.DatastoreProviderType;
import org.hibernate.ogm.utils.BytemanHelper;
import org.hibernate.ogm.utils.BytemanHelperStateCleanup;
import org.hibernate.ogm.utils.OgmTestCase;
import org.hibernate.ogm.utils.SkipByDatastoreProvider;
import org.jboss.byteman.contrib.bmunit.BMRule;
import org.jboss.byteman.contrib.bmunit.BMRules;
import org.junit.Rule;
import org.junit.Test;
/**
* @author Guillaume Smet
*/
@SkipByDatastoreProvider({ DatastoreProviderType.FONGO })
public class MongoDBPerformanceTest extends OgmTestCase {
private static final String MONGO_COLLECTION = "com.mongodb.client.MongoCollection";
private static final String HELPER = "org.hibernate.ogm.utils.BytemanHelper";
private static final String BSON_DOCUMENT = "org.bson.conversions.Bson";
@Rule
public BytemanHelperStateCleanup bytemanState = new BytemanHelperStateCleanup();
@Test
@BMRules(rules = {
@BMRule(
targetClass = MONGO_COLLECTION,
isInterface = true,
targetMethod = "insertMany(java.util.List)",
helper = HELPER,
action = "countInvocation(\"update\")",
name = "countInsertMany"),
@BMRule(targetClass = MONGO_COLLECTION,
isInterface = true,
targetMethod = "updateOne(" + BSON_DOCUMENT + "," + BSON_DOCUMENT + ",com.mongodb.client.model.UpdateOptions)",
helper = HELPER,
action = "countInvocation(\"update\")",
name = "countUpdateOne"),
@BMRule(targetClass = MONGO_COLLECTION,
isInterface = true,
targetMethod = "find(" + BSON_DOCUMENT + ")",
helper = HELPER,
action = "countInvocation(\"load\")",
name = "countFind")
})
public void testNumberOfCallsToDatastore() throws Exception {
//insert entity with embedded collection
Session session = openSession();
Transaction tx = session.beginTransaction();
GrandChild luke = new GrandChild();
luke.setName( "Luke" );
GrandChild leia = new GrandChild();
leia.setName( "Leia" );
GrandMother grandMother = new GrandMother();
grandMother.getGrandChildren().add( luke );
grandMother.getGrandChildren().add( leia );
session.persist( grandMother );
tx.commit();
session.clear();
int loadInvocationCount = BytemanHelper.getAndResetInvocationCount( "load" );
int updateInvocationCount = BytemanHelper.getAndResetInvocationCount( "update" );
assertThat( loadInvocationCount ).isEqualTo( 0 );
assertThat( updateInvocationCount ).isEqualTo( 1 );
// Check that all the counters have been reset to 0
loadInvocationCount = BytemanHelper.getAndResetInvocationCount( "load" );
updateInvocationCount = BytemanHelper.getAndResetInvocationCount( "update" );
assertThat( loadInvocationCount ).isEqualTo( 0 );
assertThat( updateInvocationCount ).isEqualTo( 0 );
//remove one of the elements and add a new one
tx = session.beginTransaction();
grandMother = (GrandMother) session.get( GrandMother.class, grandMother.getId() );
grandMother.getGrandChildren().remove( 0 );
tx.commit();
session.clear();
loadInvocationCount = BytemanHelper.getAndResetInvocationCount( "load" );
updateInvocationCount = BytemanHelper.getAndResetInvocationCount( "update" );
assertThat( loadInvocationCount ).isEqualTo( 1 );
assertThat( updateInvocationCount ).isEqualTo( 1 );
//assert removal has been propagated
tx = session.beginTransaction();
grandMother = (GrandMother) session.get( GrandMother.class, grandMother.getId() );
assertThat( grandMother.getGrandChildren() ).onProperty( "name" ).containsExactly( "Leia" );
session.delete( grandMother );
tx.commit();
session.close();
}
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
GrandMother.class,
Child.class
};
}
}
|
package cx.cad.nfsn.objects;
import cx.cad.nfsn.API;
import cx.cad.nfsn.net.APIResponse;
import java.util.ArrayList;
import java.util.List;
public class Member extends APIObject {
private static final String type = "member";
public Member(String username, API api) {
super(username, api, type);
}
//properties
public String getEmail() {
String path = "/email";
APIResponse res = executeGetFromPath(path);
return res.getString();
}
public String getPassword() {//this is probably not what it means
return null;
}
public String getStatus() {
String path = "/status";
APIResponse res = executeGetFromPath(path);
return res.getString();
}
public ArrayList<Account> getAccounts() {
String path = "/accounts";
APIResponse res = executeGetFromPath(path);
List<String> list = (List) res.getObject();
ArrayList<Account> output = new ArrayList<Account>(list.size());
for (String id : list) {
output.add(getAPI().getAccount(id));
}
return output;
}
public ArrayList<Site> getSites() {
String path = "/sites";
APIResponse res = executeGetFromPath(path);
List<String> list = (List) res.getObject();
ArrayList<Site> output = new ArrayList<Site>(list.size());
for (String id : list) {
output.add(getAPI().getSite(id));
}
return output;
}
//methods
public void confirmNewEmail(String confirmationCode) {
String path = "/confirmNewEmail";
}
public String getAPIKey(String password) {
String path = "/getAPIKey";
return null;
}
public String getInfo() {
String path = "/getInfo";
return null;
}
public void requestNewEmail(String email) {
String path = "/requestNewEmail";
}
public String summarizeAccounts() {
String path = "/summarizeAccounts";
return null;
}
public String summarizeDatabases() {
String path = "/summarizeDatabases";
return null;
}
public String summarizeDomains() {
String path = "/summarizeDomains";
return null;
}
public String summarizeSites() {
String path = "/summarizeSites";
return null;
}
}
|
package org.amc.game.chess;
public class KingPiece extends SimplePiece {
public KingPiece(Colour colour) {
super(colour);
}
/**
* @see SimplePiece#validMovement(Move)
*/
@Override
boolean validMovement(Move move) {
return move.getAbsoluteDistanceX() <= 1 &&
move.getAbsoluteDistanceY() <= 1;
}
/**
* @see SimplePiece#canMakeMove(ChessBoard, Move)
*/
@Override
boolean canMakeMove(ChessBoard board, Move move) {
ChessPiece piece = board.getPieceFromBoardAt(move.getEnd().getLetter().getName(),
move.getEnd().getNumber());
if (piece == null) {
return true;
} else {
return endSquareNotOccupiedByPlayersOwnPiece(piece);
}
}
private boolean endSquareNotOccupiedByPlayersOwnPiece(ChessPiece piece) {
return !piece.getColour().equals(getColour());
}
}
|
package org.navalplanner.business.planner.entities;
import static org.navalplanner.business.workingday.EffortDuration.hours;
import static org.navalplanner.business.workingday.EffortDuration.seconds;
import static org.navalplanner.business.workingday.EffortDuration.zero;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.Validate;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.validator.Min;
import org.hibernate.validator.NotNull;
import org.joda.time.LocalDate;
import org.navalplanner.business.calendars.entities.AvailabilityTimeLine;
import org.navalplanner.business.calendars.entities.BaseCalendar;
import org.navalplanner.business.calendars.entities.CombinedWorkHours;
import org.navalplanner.business.calendars.entities.ICalendar;
import org.navalplanner.business.calendars.entities.SameWorkHoursEveryDay;
import org.navalplanner.business.calendars.entities.ThereAreHoursOnWorkHoursCalculator;
import org.navalplanner.business.calendars.entities.ThereAreHoursOnWorkHoursCalculator.CapacityResult;
import org.navalplanner.business.common.BaseEntity;
import org.navalplanner.business.common.Registry;
import org.navalplanner.business.planner.entities.DerivedAllocationGenerator.IWorkerFinder;
import org.navalplanner.business.planner.entities.allocationalgorithms.AllocatorForTaskDurationAndSpecifiedResourcesPerDay;
import org.navalplanner.business.planner.entities.allocationalgorithms.HoursModification;
import org.navalplanner.business.planner.entities.allocationalgorithms.ResourcesPerDayModification;
import org.navalplanner.business.planner.entities.allocationalgorithms.UntilFillingHoursAllocator;
import org.navalplanner.business.planner.limiting.entities.LimitingResourceQueueElement;
import org.navalplanner.business.resources.daos.IResourcesSearcher;
import org.navalplanner.business.resources.entities.Criterion;
import org.navalplanner.business.resources.entities.Machine;
import org.navalplanner.business.resources.entities.MachineWorkersConfigurationUnit;
import org.navalplanner.business.resources.entities.Resource;
import org.navalplanner.business.scenarios.IScenarioManager;
import org.navalplanner.business.scenarios.entities.Scenario;
import org.navalplanner.business.util.deepcopy.OnCopy;
import org.navalplanner.business.util.deepcopy.Strategy;
import org.navalplanner.business.workingday.EffortDuration;
import org.navalplanner.business.workingday.IntraDayDate;
import org.navalplanner.business.workingday.IntraDayDate.PartialDay;
import org.navalplanner.business.workingday.ResourcesPerDay;
public abstract class ResourceAllocation<T extends DayAssignment> extends
BaseEntity {
private static final Log LOG = LogFactory.getLog(ResourceAllocation.class);
private static final NoneFunction NONE_FUNCTION = NoneFunction.create();
public static <T extends ResourceAllocation<?>> List<T> getSatisfied(
Collection<T> resourceAllocations) {
Validate.notNull(resourceAllocations);
Validate.noNullElements(resourceAllocations);
List<T> result = new ArrayList<T>();
for (T each : resourceAllocations) {
if (each.isSatisfied()) {
result.add(each);
}
}
return result;
}
public static <T extends ResourceAllocation<?>> List<T> getOfType(
Class<T> type,
Collection<? extends ResourceAllocation<?>> resourceAllocations) {
List<T> result = new ArrayList<T>();
for (ResourceAllocation<?> allocation : resourceAllocations) {
if (type.isInstance(allocation)) {
result.add(type.cast(allocation));
}
}
return result;
}
public static <R extends ResourceAllocation<?>> Map<Resource, List<R>> byResource(
Collection<? extends R> allocations) {
Map<Resource, List<R>> result = new HashMap<Resource, List<R>>();
for (R resourceAllocation : allocations) {
for (Resource resource : resourceAllocation
.getAssociatedResources()) {
if (!result.containsKey(resource)) {
result.put(resource, new ArrayList<R>());
}
result.get(resource).add(resourceAllocation);
}
}
return result;
}
public static <R extends ResourceAllocation<?>> List<R> sortedByStartDate(
Collection<R> allocations) {
List<R> result = new ArrayList<R>(allocations);
Collections.sort(result, byStartDateComparator());
return result;
}
public static <R extends ResourceAllocation<?>> Map<Task, List<R>> byTask(
List<? extends R> allocations) {
Map<Task, List<R>> result = new LinkedHashMap<Task, List<R>>();
for (R resourceAllocation : allocations) {
if (resourceAllocation.getTask() != null) {
Task task = resourceAllocation.getTask();
initializeIfNeeded(result, task);
result.get(task).add(resourceAllocation);
}
}
return result;
}
private static <E extends ResourceAllocation<?>> void initializeIfNeeded(
Map<Task, List<E>> result, Task task) {
if (!result.containsKey(task)) {
result.put(task, new ArrayList<E>());
}
}
private static Comparator<ResourceAllocation<?>> byStartDateComparator() {
return new Comparator<ResourceAllocation<?>>() {
@Override
public int compare(ResourceAllocation<?> o1,
ResourceAllocation<?> o2) {
if (o1.getIntraDayStartDate() == null) {
return -1;
}
if (o2.getIntraDayStartDate() == null) {
return 1;
}
return o1.getIntraDayStartDate().compareTo(
o2.getIntraDayStartDate());
}
};
}
public enum Direction {
FORWARD {
@Override
public IntraDayDate getDateFromWhichToAllocate(Task task) {
return IntraDayDate.max(task.getFirstDayNotConsolidated(),
task.getIntraDayStartDate());
}
@Override
void limitAvailabilityOn(AvailabilityTimeLine availability,
IntraDayDate dateFromWhichToAllocate) {
availability.invalidUntil(dateFromWhichToAllocate
.asExclusiveEnd());
}
},
BACKWARD {
@Override
public IntraDayDate getDateFromWhichToAllocate(Task task) {
return task.getIntraDayEndDate();
}
@Override
void limitAvailabilityOn(AvailabilityTimeLine availability,
IntraDayDate dateFromWhichToAllocate) {
availability.invalidFrom(dateFromWhichToAllocate.getDate());
}
};
public abstract IntraDayDate getDateFromWhichToAllocate(Task task);
abstract void limitAvailabilityOn(AvailabilityTimeLine availability,
IntraDayDate dateFromWhichToAllocate);
}
public static AllocationsSpecified allocating(
List<ResourcesPerDayModification> resourceAllocations) {
return new AllocationsSpecified(resourceAllocations);
}
/**
* Needed for doing fluent interface calls:
* <ul>
* <li>
* {@link ResourceAllocation#allocating(List)}.
* {@link AllocationsSpecified#untilAllocating(int) untiAllocating(int)}</li>
* <li> {@link ResourceAllocation#allocating(List)}.
* {@link AllocationsSpecified#allocateOnTaskLength() allocateOnTaskLength}</li>
* <li>
* {@link ResourceAllocation#allocating(List)}.
* {@link AllocationsSpecified#allocateUntil(LocalDate)
* allocateUntil(LocalDate)}</li>
* </ul>
*
*/
public static class AllocationsSpecified {
private final List<ResourcesPerDayModification> allocations;
private final Task task;
public AllocationsSpecified(
List<ResourcesPerDayModification> resourceAllocations) {
Validate.notNull(resourceAllocations);
Validate.notEmpty(resourceAllocations);
Validate.noNullElements(resourceAllocations);
checkNoOneHasNullTask(resourceAllocations);
checkAllHaveSameTask(resourceAllocations);
checkNoAllocationWithZeroResourcesPerDay(resourceAllocations);
this.allocations = resourceAllocations;
this.task = resourceAllocations.get(0).getBeingModified()
.getTask();
}
private static void checkNoAllocationWithZeroResourcesPerDay(
List<ResourcesPerDayModification> allocations) {
for (ResourcesPerDayModification r : allocations) {
if (isZero(r.getGoal().getAmount())) {
throw new IllegalArgumentException(
"all resources per day must be no zero");
}
}
}
public static boolean isZero(BigDecimal amount) {
return amount.movePointRight(amount.scale()).intValue() == 0;
}
private static void checkNoOneHasNullTask(
List<ResourcesPerDayModification> allocations) {
for (ResourcesPerDayModification resourcesPerDayModification : allocations) {
if (resourcesPerDayModification
.getBeingModified().getTask() == null) {
throw new IllegalArgumentException(
"all allocations must have task");
}
}
}
private static void checkAllHaveSameTask(
List<ResourcesPerDayModification> resourceAllocations) {
Task task = null;
for (ResourcesPerDayModification r : resourceAllocations) {
if (task == null) {
task = r.getBeingModified().getTask();
}
if (!task.equals(r.getBeingModified().getTask())) {
throw new IllegalArgumentException(
"all allocations must belong to the same task");
}
}
}
public interface INotFulfilledReceiver {
public void cantFulfill(
ResourcesPerDayModification allocationAttempt,
CapacityResult capacityResult);
}
public IntraDayDate untilAllocating(int hoursToAllocate) {
return untilAllocating(Direction.FORWARD, hoursToAllocate);
}
public IntraDayDate untilAllocating(Direction direction,
int hoursToAllocate) {
return untilAllocating(direction, hoursToAllocate, doNothing());
}
private static INotFulfilledReceiver doNothing() {
return new INotFulfilledReceiver() {
@Override
public void cantFulfill(
ResourcesPerDayModification allocationAttempt,
CapacityResult capacityResult) {
}
};
}
public IntraDayDate untilAllocating(int hoursToAllocate,
final INotFulfilledReceiver receiver) {
return untilAllocating(Direction.FORWARD, hoursToAllocate, receiver);
}
public IntraDayDate untilAllocating(Direction direction,
int hoursToAllocate, final INotFulfilledReceiver receiver) {
UntilFillingHoursAllocator allocator = new UntilFillingHoursAllocator(
direction,
task, allocations) {
@Override
protected <T extends DayAssignment> void setNewDataForAllocation(
ResourceAllocation<T> allocation,
IntraDayDate resultDate,
ResourcesPerDay resourcesPerDay, List<T> dayAssignments) {
Task task = AllocationsSpecified.this.task;
allocation.setIntendedResourcesPerDay(resourcesPerDay);
if (isForwardScheduling()) {
allocation.resetAllAllocationAssignmentsTo(
dayAssignments,
task.getIntraDayStartDate(), resultDate);
} else {
allocation.resetAllAllocationAssignmentsTo(
dayAssignments,
resultDate, task.getIntraDayEndDate());
}
allocation.updateResourcesPerDay();
}
@Override
protected CapacityResult thereAreAvailableHoursFrom(
IntraDayDate dateFromWhichToAllocate,
ResourcesPerDayModification resourcesPerDayModification,
EffortDuration effortToAllocate) {
ICalendar calendar = getCalendar(resourcesPerDayModification);
ResourcesPerDay resourcesPerDay = resourcesPerDayModification
.getGoal();
AvailabilityTimeLine availability = resourcesPerDayModification
.getAvailability();
getDirection().limitAvailabilityOn(availability,
dateFromWhichToAllocate);
return ThereAreHoursOnWorkHoursCalculator
.thereIsAvailableCapacityFor(calendar,
availability, resourcesPerDay,
effortToAllocate);
}
private CombinedWorkHours getCalendar(
ResourcesPerDayModification resourcesPerDayModification) {
return CombinedWorkHours.minOf(resourcesPerDayModification
.getBeingModified().getTaskCalendar(),
resourcesPerDayModification.getResourcesCalendar());
}
@Override
protected void markUnsatisfied(
ResourcesPerDayModification allocationAttempt,
CapacityResult capacityResult) {
allocationAttempt.getBeingModified().markAsUnsatisfied();
receiver.cantFulfill(allocationAttempt, capacityResult);
}
};
IntraDayDate result = allocator
.untilAllocating(hours(hoursToAllocate));
if (result == null) {
// allocation could not be done
return direction == Direction.FORWARD ? task
.getIntraDayEndDate() : task.getIntraDayStartDate();
}
return result;
}
public void allocateOnTaskLength() {
AllocatorForTaskDurationAndSpecifiedResourcesPerDay allocator = new AllocatorForTaskDurationAndSpecifiedResourcesPerDay(
allocations);
allocator.allocateOnTaskLength();
}
public void allocateUntil(LocalDate endExclusive) {
AllocatorForTaskDurationAndSpecifiedResourcesPerDay allocator = new AllocatorForTaskDurationAndSpecifiedResourcesPerDay(
allocations);
allocator.allocateUntil(endExclusive);
}
public void allocateFromEndUntil(LocalDate start) {
AllocatorForTaskDurationAndSpecifiedResourcesPerDay allocator = new AllocatorForTaskDurationAndSpecifiedResourcesPerDay(
allocations);
allocator.allocateFromEndUntil(start);
}
}
public static HoursAllocationSpecified allocatingHours(
List<HoursModification> hoursModifications) {
return new HoursAllocationSpecified(hoursModifications);
}
/**
* Needed for doing fluent interface calls:
* <ul>
* <li>
* {@link ResourceAllocation#allocatingHours(List)}.
* {@link HoursAllocationSpecified#allocateUntil(LocalDate)
* allocateUntil(LocalDate)}</li>
* <li>
* {@link ResourceAllocation#allocatingHours(List)}.
* {@link HoursAllocationSpecified#allocate() allocate()}</li>
* </li>
* </ul>
*
*/
public static class HoursAllocationSpecified {
private final List<HoursModification> hoursModifications;
private Task task;
public HoursAllocationSpecified(List<HoursModification> hoursModifications) {
Validate.noNullElements(hoursModifications);
Validate.isTrue(!hoursModifications.isEmpty());
this.hoursModifications = hoursModifications;
this.task = hoursModifications.get(0).getBeingModified().getTask();
Validate.notNull(task);
}
public void allocate() {
allocateUntil(new LocalDate(task.getEndDate()));
}
public void allocateUntil(LocalDate end) {
Validate.notNull(end);
Validate.isTrue(!end.isBefore(new LocalDate(task.getStartDate())));
for (HoursModification each : hoursModifications) {
each.allocateUntil(end);
}
}
public void allocateFromEndUntil(LocalDate start) {
Validate.notNull(start);
Validate.isTrue(start.isBefore(task.getEndAsLocalDate()));
for (HoursModification each : hoursModifications) {
each.allocateFromEndUntil(start);
}
}
}
private Task task;
private AssignmentFunction assignmentFunction;
@OnCopy(Strategy.SHARE)
private ResourcesPerDay resourcesPerDay;
@OnCopy(Strategy.SHARE)
private ResourcesPerDay intendedResourcesPerDay;
private Integer intendedTotalHours;
private Set<DerivedAllocation> derivedAllocations = new HashSet<DerivedAllocation>();
@OnCopy(Strategy.SHARE_COLLECTION_ELEMENTS)
private Set<LimitingResourceQueueElement> limitingResourceQueueElements = new HashSet<LimitingResourceQueueElement>();
private int originalTotalAssignment = 0;
private IOnDayAssignmentRemoval dayAssignmenteRemoval = new DoNothing();
public interface IOnDayAssignmentRemoval {
public void onRemoval(ResourceAllocation<?> allocation,
DayAssignment assignment);
}
public static class DoNothing implements IOnDayAssignmentRemoval {
@Override
public void onRemoval(
ResourceAllocation<?> allocation, DayAssignment assignment) {
}
}
public static class DetachDayAssignmentOnRemoval implements
IOnDayAssignmentRemoval {
@Override
public void onRemoval(ResourceAllocation<?> allocation,
DayAssignment assignment) {
assignment.detach();
}
}
public void setOnDayAssignmentRemoval(
IOnDayAssignmentRemoval dayAssignmentRemoval) {
Validate.notNull(dayAssignmentRemoval);
this.dayAssignmenteRemoval = dayAssignmentRemoval;
}
/**
* Constructor for hibernate. Do not use!
*/
public ResourceAllocation() {
this.assignmentsState = buildFromDBState();
}
/**
* Returns the associated resources from the day assignments of this
* {@link ResourceAllocation}.
* @return the associated resources with no repeated elements
*/
public abstract List<Resource> getAssociatedResources();
public void switchToScenario(Scenario scenario) {
Validate.notNull(scenario);
assignmentsState = assignmentsState.switchTo(scenario);
switchDerivedAllocationsTo(scenario);
}
private void switchDerivedAllocationsTo(Scenario scenario) {
for (DerivedAllocation each : derivedAllocations) {
each.useScenario(scenario);
}
}
protected void updateResourcesPerDay() {
if (!isSatisfied()) {
return;
}
ResourcesPerDay resourcesPerDay = calculateResourcesPerDayFromAssignments(getAssignments());
if (resourcesPerDay == null) {
this.resourcesPerDay = ResourcesPerDay.amount(0);
} else {
this.resourcesPerDay = resourcesPerDay;
}
}
protected void setResourcesPerDayToAmount(int amount) {
this.resourcesPerDay = ResourcesPerDay.amount(amount);
this.intendedResourcesPerDay = this.resourcesPerDay;
}
private void setIntendedResourcesPerDay(ResourcesPerDay resourcesPerDay) {
Validate.notNull(resourcesPerDay);
Validate.isTrue(!resourcesPerDay.isZero());
this.intendedResourcesPerDay = resourcesPerDay;
}
/**
* Returns the last specified resources per day
*/
public ResourcesPerDay getIntendedResourcesPerDay() {
if (getTask().getCalculatedValue() == CalculatedValue.RESOURCES_PER_DAY) {
return null;
}
return intendedResourcesPerDay;
}
private ResourcesPerDay getReassignationResourcesPerDay() {
ResourcesPerDay intended = getIntendedResourcesPerDay();
if (intended != null) {
return intended;
}
return getResourcesPerDay();
}
public boolean areIntendedResourcesPerDaySatisfied() {
CalculatedValue calculatedValue = getTask().getCalculatedValue();
return calculatedValue == CalculatedValue.RESOURCES_PER_DAY
|| ObjectUtils.equals(getNonConsolidatedResourcePerDay(),
getIntendedResourcesPerDay());
}
public ResourceAllocation(Task task) {
this(task, null);
}
public ResourceAllocation(Task task, AssignmentFunction assignmentFunction) {
Validate.notNull(task);
this.task = task;
this.assignmentFunction = assignmentFunction;
this.assignmentsState = buildInitialTransientState();
}
protected ResourceAllocation(ResourcesPerDay resourcesPerDay, Task task) {
this(task);
Validate.notNull(resourcesPerDay);
this.resourcesPerDay = resourcesPerDay;
}
@NotNull
public Task getTask() {
return task;
}
private void updateOriginalTotalAssigment() {
if (!isSatisfied()) {
return;
}
if ((task.getConsolidation() == null)
|| (task.getConsolidation().getConsolidatedValues().isEmpty())) {
originalTotalAssignment = getNonConsolidatedHours();
} else {
BigDecimal lastConslidation = task.getConsolidation()
.getConsolidatedValues().last().getValue();
BigDecimal unconsolitedPercentage = BigDecimal.ONE
.subtract(lastConslidation.setScale(2).divide(
new BigDecimal(100), RoundingMode.DOWN));
if (unconsolitedPercentage.setScale(2).equals(
BigDecimal.ZERO.setScale(2))) {
originalTotalAssignment = getConsolidatedHours();
} else {
originalTotalAssignment = new BigDecimal(
getNonConsolidatedHours()).divide(
unconsolitedPercentage, RoundingMode.DOWN).intValue();
}
}
}
@Min(0)
public int getOriginalTotalAssigment() {
return originalTotalAssignment;
}
public interface IVisitor<T> {
T on(SpecificResourceAllocation specificAllocation);
T on(GenericResourceAllocation genericAllocation);
}
public static <T> T visit(ResourceAllocation<?> allocation,
IVisitor<T> visitor) {
Validate.notNull(allocation);
Validate.notNull(visitor);
if (allocation instanceof GenericResourceAllocation) {
GenericResourceAllocation generic = (GenericResourceAllocation) allocation;
return visitor.on(generic);
} else if (allocation instanceof SpecificResourceAllocation) {
SpecificResourceAllocation specific = (SpecificResourceAllocation) allocation;
return visitor.on(specific);
}
throw new RuntimeException("can't handle: " + allocation.getClass());
}
public abstract ResourcesPerDayModification withDesiredResourcesPerDay(
ResourcesPerDay resourcesPerDay);
public final ResourcesPerDayModification asResourcesPerDayModification() {
if (getReassignationResourcesPerDay().isZero()) {
return null;
}
return visit(this, new IVisitor<ResourcesPerDayModification>() {
@Override
public ResourcesPerDayModification on(
SpecificResourceAllocation specificAllocation) {
return ResourcesPerDayModification.create(specificAllocation,
getReassignationResourcesPerDay());
}
@Override
public ResourcesPerDayModification on(
GenericResourceAllocation genericAllocation) {
return ResourcesPerDayModification.create(genericAllocation,
getReassignationResourcesPerDay(),
getAssociatedResources());
}
});
}
public final HoursModification asHoursModification(){
return visit(this, new IVisitor<HoursModification>() {
@Override
public HoursModification on(GenericResourceAllocation genericAllocation) {
return HoursModification.create(genericAllocation,
getEffortForReassignation().roundToHours(),
getAssociatedResources());
}
@Override
public HoursModification on(SpecificResourceAllocation specificAllocation) {
return HoursModification.create(specificAllocation,
getEffortForReassignation().roundToHours());
}
});
}
public abstract IAllocatable withPreviousAssociatedResources();
public interface IEffortDistributor<T extends DayAssignment> {
/**
* It does not add the created assigments to the underlying allocation.
* It just distributes them.
*
*/
List<T> distributeForDay(PartialDay day, EffortDuration effort);
}
protected abstract class AssignmentsAllocator implements IAllocatable,
IEffortDistributor<T> {
@Override
public final void allocate(ResourcesPerDay resourcesPerDay) {
Task currentTask = getTask();
AllocateResourcesPerDayOnInterval allocator = new AllocateResourcesPerDayOnInterval(
currentTask.getIntraDayStartDate(),
currentTask.getIntraDayEndDate());
allocator.allocate(resourcesPerDay);
}
private List<T> createAssignments(ResourcesPerDay resourcesPerDay,
IntraDayDate startInclusive, IntraDayDate endExclusive) {
List<T> assignmentsCreated = new ArrayList<T>();
for (PartialDay day : getDays(startInclusive, endExclusive)) {
EffortDuration durationForDay = calculateTotalToDistribute(day,
resourcesPerDay);
assignmentsCreated
.addAll(distributeForDay(day, durationForDay));
}
return onlyNonZeroHours(assignmentsCreated);
}
@Override
public IAllocateResourcesPerDay resourcesPerDayUntil(final LocalDate endExclusive) {
IntraDayDate startInclusive = getStartSpecifiedByTask();
IntraDayDate end = IntraDayDate.startOfDay(endExclusive);
return new AllocateResourcesPerDayOnInterval(startInclusive, end);
}
@Override
public IAllocateResourcesPerDay resourcesPerDayFromEndUntil(
LocalDate start) {
IntraDayDate startInclusive = IntraDayDate.max(
IntraDayDate.startOfDay(start), getStartSpecifiedByTask());
IntraDayDate endDate = task.getIntraDayEndDate();
return new AllocateResourcesPerDayOnInterval(startInclusive,
endDate);
}
private Iterable<PartialDay> getDays(IntraDayDate startInclusive,
IntraDayDate endExclusive) {
Validate.isTrue(startInclusive.compareTo(endExclusive) <= 0,
"the end must be equal or posterior than start");
Iterable<PartialDay> daysUntil = startInclusive
.daysUntil(endExclusive);
return daysUntil;
}
private final class AllocateResourcesPerDayOnInterval implements
IAllocateResourcesPerDay {
private final IntraDayDate startInclusive;
private final IntraDayDate endExclusive;
private AllocateResourcesPerDayOnInterval(
IntraDayDate startInclusive, IntraDayDate endExclusive) {
this.startInclusive = startInclusive;
this.endExclusive = endExclusive;
}
@Override
public void allocate(ResourcesPerDay resourcesPerDay) {
setIntendedResourcesPerDay(resourcesPerDay);
List<T> assignmentsCreated = createAssignments(resourcesPerDay,
startInclusive, endExclusive);
resetAllAllocationAssignmentsTo(assignmentsCreated,
startInclusive,
endExclusive);
updateResourcesPerDay();
}
}
@Override
public IAllocateHoursOnInterval onIntervalWithinTask(
final LocalDate start, final LocalDate end) {
checkStartBeforeOrEqualEnd(start, end);
return new OnSubIntervalAllocator(
new AllocationIntervalInsideTask(start, end));
}
@Override
public IAllocateHoursOnInterval onIntervalWithinTask(
IntraDayDate start, IntraDayDate end) {
checkStartBeforeOrEqualEnd(start, end);
return new OnSubIntervalAllocator(new AllocationIntervalInsideTask(
start, end));
}
@Override
public IAllocateHoursOnInterval onInterval(
final LocalDate startInclusive, final LocalDate endExclusive) {
checkStartBeforeOrEqualEnd(startInclusive, endExclusive);
return new OnSubIntervalAllocator(new AllocationInterval(
startInclusive, endExclusive));
}
@Override
public IAllocateHoursOnInterval onInterval(IntraDayDate start,
IntraDayDate end) {
checkStartBeforeOrEqualEnd(start, end);
return new OnSubIntervalAllocator(
new AllocationInterval(start,
end));
}
private void checkStartBeforeOrEqualEnd(LocalDate start, LocalDate end) {
Validate.isTrue(start.compareTo(end) <= 0,
"the end must be equal or posterior than start");
}
private void checkStartBeforeOrEqualEnd(IntraDayDate start,
IntraDayDate end) {
Validate.isTrue(start.compareTo(end) <= 0,
"the end must be equal or posterior than start");
}
private class OnSubIntervalAllocator implements
IAllocateHoursOnInterval {
private final AllocationInterval allocationInterval;
private OnSubIntervalAllocator(
AllocationInterval allocationInterval) {
this.allocationInterval = allocationInterval;
}
@Override
public void allocateHours(int hours) {
allocateDuration(hours(hours));
}
private void allocateDuration(EffortDuration duration) {
List<T> assignmentsCreated = createAssignments(
allocationInterval, duration);
allocationInterval.resetAssignments(assignmentsCreated);
}
@Override
public void allocate(List<EffortDuration> durationsByDay) {
allocateDurationsByDay(allocationInterval, durationsByDay);
}
}
private void allocateDurationsByDay(AllocationInterval interval,
List<EffortDuration> durationsByDay) {
List<EffortDuration> rightSlice = interval
.getRightSlice(durationsByDay);
AvailabilityTimeLine availability = getAvailability();
List<T> assignments = createAssignments(interval, availability,
rightSlice.toArray(new EffortDuration[0]));
interval.resetAssignments(assignments);
}
@Override
public IAllocateHoursOnInterval fromStartUntil(final LocalDate end) {
final AllocationInterval interval = new AllocationInterval(
getStartSpecifiedByTask(), IntraDayDate.startOfDay(end));
return new IAllocateHoursOnInterval() {
@Override
public void allocateHours(int hours) {
allocateTheWholeAllocation(interval, hours(hours));
}
@Override
public void allocate(List<EffortDuration> durationsByDay) {
List<EffortDuration> rightSlice = interval
.getRightSlice(durationsByDay);
AvailabilityTimeLine availability = getAvailability();
createAssignments(interval, availability,
rightSlice.toArray(new EffortDuration[0]));
}
};
}
@Override
public IAllocateHoursOnInterval fromEndUntil(final LocalDate start) {
final AllocationInterval interval = new AllocationInterval(
IntraDayDate.startOfDay(start), task.getIntraDayEndDate());
return new IAllocateHoursOnInterval() {
@Override
public void allocateHours(int hours) {
allocateTheWholeAllocation(interval, hours(hours));
}
@Override
public void allocate(List<EffortDuration> durationsByDay) {
allocateDurationsByDay(interval, durationsByDay);
}
};
}
private void allocateTheWholeAllocation(AllocationInterval interval,
EffortDuration durationToAssign) {
List<T> assignmentsCreated = createAssignments(interval,
durationToAssign);
allocateTheWholeAllocation(interval, assignmentsCreated);
}
private void allocateTheWholeAllocation(AllocationInterval interval,
List<T> assignments) {
resetAllAllocationAssignmentsTo(assignments,
interval.getStartInclusive(), interval.getEndExclusive());
updateResourcesPerDay();
}
protected abstract AvailabilityTimeLine getResourcesAvailability();
private List<T> createAssignments(AllocationInterval interval,
EffortDuration durationToAssign) {
AvailabilityTimeLine availability = getAvailability();
Iterable<PartialDay> days = getDays(interval.getStartInclusive(),
interval.getEndExclusive());
EffortDuration[] durationsEachDay = secondsDistribution(
availability, days, durationToAssign);
return createAssignments(interval, availability, durationsEachDay);
}
private List<T> createAssignments(AllocationInterval interval,
AvailabilityTimeLine availability,
EffortDuration[] durationsEachDay) {
List<T> result = new ArrayList<T>();
int i = 0;
for (PartialDay day : getDays(interval.getStartInclusive(),
interval.getEndExclusive())) {
// if all days are not available, it would try to assign
// them anyway, preventing it with a check
if (availability.isValid(day.getDate())) {
result.addAll(distributeForDay(day, durationsEachDay[i]));
}
i++;
}
return onlyNonZeroHours(result);
}
private AvailabilityTimeLine getAvailability() {
AvailabilityTimeLine resourcesAvailability = getResourcesAvailability();
BaseCalendar taskCalendar = getTask().getCalendar();
if (taskCalendar != null) {
return taskCalendar.getAvailability()
.and(resourcesAvailability);
} else {
return resourcesAvailability;
}
}
private List<T> onlyNonZeroHours(List<T> assignmentsCreated) {
List<T> result = new ArrayList<T>();
for (T each : assignmentsCreated) {
if (!each.getDuration().isZero()) {
result.add(each);
}
}
return result;
}
private EffortDuration[] secondsDistribution(
AvailabilityTimeLine availability, Iterable<PartialDay> days,
EffortDuration duration) {
List<Share> shares = new ArrayList<Share>();
for (PartialDay each : days) {
shares.add(getShareAt(each, availability));
}
ShareDivision original = ShareDivision.create(shares);
ShareDivision newShare = original.plus(duration.getSeconds());
return fromSecondsToDurations(original.to(newShare));
}
private EffortDuration[] fromSecondsToDurations(int[] seconds) {
EffortDuration[] result = new EffortDuration[seconds.length];
for (int i = 0; i < result.length; i++) {
result[i] = seconds(seconds[i]);
}
return result;
}
private Share getShareAt(PartialDay day,
AvailabilityTimeLine availability) {
if (availability.isValid(day.getDate())) {
EffortDuration capacityAtDay = getAllocationCalendar()
.getCapacityOn(day);
return new Share(-capacityAtDay.getSeconds());
} else {
return new Share(Integer.MAX_VALUE);
}
}
}
public void markAsUnsatisfied() {
removingAssignments(getAssignments());
assert isUnsatisfied();
}
public boolean isLimiting() {
return getLimitingResourceQueueElement() != null;
}
public boolean isLimitingAndHasDayAssignments() {
return isLimiting() && hasAssignments();
}
public boolean isSatisfied() {
return hasAssignments();
}
public boolean isUnsatisfied() {
return !isSatisfied();
}
public void copyAssignmentsFromOneScenarioToAnother(Scenario from, Scenario to){
copyAssignments(from, to);
for (DerivedAllocation each : derivedAllocations) {
each.copyAssignments(from, to);
}
}
protected abstract void copyAssignments(Scenario from, Scenario to);
protected void resetAssignmentsTo(List<T> assignments) {
resetAllAllocationAssignmentsTo(assignments,
task.getIntraDayStartDate(),
task.getIntraDayEndDate());
}
protected void resetAllAllocationAssignmentsTo(List<T> assignments,
IntraDayDate intraDayStart,
IntraDayDate intraDayEnd) {
removingAssignments(withoutConsolidated(getAssignments()));
addingAssignments(assignments);
updateOriginalTotalAssigment();
getDayAssignmentsState().setIntraDayStart(intraDayStart);
getDayAssignmentsState().setIntraDayEnd(intraDayEnd);
}
class AllocationInterval {
private IntraDayDate originalStart;
private IntraDayDate originalEnd;
private final IntraDayDate start;
private final IntraDayDate end;
AllocationInterval(IntraDayDate originalStart,
IntraDayDate originalEnd, IntraDayDate start, IntraDayDate end) {
this.originalStart = originalStart;
this.originalEnd = originalEnd;
IntraDayDate startConsideringConsolidated = task
.hasConsolidations() ? IntraDayDate
.max(task.getFirstDayNotConsolidated(), start) : start;
this.start = IntraDayDate.min(startConsideringConsolidated, end);
this.end = IntraDayDate.max(this.start, end);
}
AllocationInterval(IntraDayDate start, IntraDayDate end) {
this(start, end, start, end);
}
AllocationInterval(LocalDate startInclusive, LocalDate endExclusive) {
this(IntraDayDate.startOfDay(startInclusive), IntraDayDate
.startOfDay(endExclusive));
}
public List<EffortDuration> getRightSlice(List<EffortDuration> original) {
List<EffortDuration> result = new ArrayList<EffortDuration>(
original);
final int numberOfDaysToFill = originalStart
.numberOfDaysUntil(originalEnd);
for (int i = 0; i < numberOfDaysToFill - original.size(); i++) {
result.add(zero());
}
return result.subList(originalStart.numberOfDaysUntil(start),
result.size() - end.numberOfDaysUntil(originalEnd));
}
public void resetAssignments(List<T> assignmentsCreated) {
resetAssigmentsFittingAllocationDatesToResultingAssignments(this,
assignmentsCreated);
}
public IntraDayDate getStartInclusive() {
return this.start;
}
public IntraDayDate getEndExclusive() {
return this.end;
}
public List<DayAssignment> getAssignmentsOnInterval() {
return getAssignments(this.start.getDate(),
this.end.asExclusiveEnd());
}
}
class AllocationIntervalInsideTask extends AllocationInterval {
AllocationIntervalInsideTask(LocalDate startInclusive,
LocalDate endExclusive) {
this(IntraDayDate.startOfDay(startInclusive), IntraDayDate
.startOfDay(endExclusive));
}
AllocationIntervalInsideTask(IntraDayDate startInclusive,
IntraDayDate endExclusive) {
super(startInclusive, endExclusive, IntraDayDate.max(
startInclusive, getTask()
.getFirstDayNotConsolidated()), IntraDayDate.min(
endExclusive, task.getIntraDayEndDate()));
}
@Override
public void resetAssignments(List<T> assignmentsCreated) {
resetAssigmentsForInterval(this, assignmentsCreated);
}
}
protected void resetAssigmentsForInterval(
AllocationIntervalInsideTask interval,
List<T> assignmentsCreated) {
IntraDayDate originalStart = getIntraDayStartDate();
IntraDayDate originalEnd = getIntraDayEndDate();
updateAssignments(interval, assignmentsCreated);
// The resource allocation cannot grow beyond the start of the task.
// This
// is guaranteed by IntervalInsideTask. It also cannot shrink from the
// original size, this is guaranteed by originalStart
getDayAssignmentsState().setIntraDayStart(
IntraDayDate.min(originalStart, interval.getStartInclusive()));
// The resource allocation cannot grow beyond the end of the task. This
// is guaranteed by IntervalInsideTask. It also cannot shrink from the
// original size, this is guaranteed by originalEnd
getDayAssignmentsState().setIntraDayEnd(
IntraDayDate.max(originalEnd, interval.getEndExclusive()));
}
private void updateAssignments(AllocationInterval interval,
List<T> assignmentsCreated) {
removingAssignments(withoutConsolidated(interval
.getAssignmentsOnInterval()));
addingAssignments(assignmentsCreated);
updateOriginalTotalAssigment();
updateResourcesPerDay();
}
private void resetAssigmentsFittingAllocationDatesToResultingAssignments(
AllocationInterval interval, List<T> assignmentsCreated) {
updateAssignments(interval, assignmentsCreated);
LocalDate startConsideringAssignments = getStartConsideringAssignments();
IntraDayDate start = IntraDayDate
.startOfDay(startConsideringAssignments);
if (interval.getStartInclusive()
.areSameDay(startConsideringAssignments)) {
start = interval.getStartInclusive();
}
getDayAssignmentsState().setIntraDayStart(start);
LocalDate endConsideringAssignments = getEndDateGiven(getAssignments());
IntraDayDate end = IntraDayDate.startOfDay(endConsideringAssignments);
if (interval.getEndExclusive().areSameDay(endConsideringAssignments)) {
end = interval.getEndExclusive();
}
getDayAssignmentsState().setIntraDayEnd(end);
}
private static <T extends DayAssignment> List<T> withoutConsolidated(
List<? extends T> assignments) {
List<T> result = new ArrayList<T>();
for (T each : assignments) {
if (!each.isConsolidated()) {
result.add(each);
}
}
return result;
}
protected final void addingAssignments(Collection<? extends T> assignments) {
getDayAssignmentsState().addingAssignments(assignments);
}
public void removeLimitingDayAssignments() {
allocateLimitingDayAssignments(Collections.<T>emptyList());
}
@SuppressWarnings("unchecked")
public void allocateLimitingDayAssignments(List<? extends DayAssignment> assignments) {
assert isLimiting();
resetAssignmentsTo((List<T>) assignments);
}
private void removingAssignments(
List<? extends DayAssignment> assignments) {
getDayAssignmentsState().removingAssignments(assignments);
}
public final EffortDuration calculateTotalToDistribute(PartialDay day,
ResourcesPerDay resourcesPerDay) {
return getAllocationCalendar().asDurationOn(day, resourcesPerDay);
}
public ResourcesPerDay calculateResourcesPerDayFromAssignments() {
return calculateResourcesPerDayFromAssignments(getAssignments());
}
private ResourcesPerDay calculateResourcesPerDayFromAssignments(
Collection<? extends T> assignments) {
if (assignments.isEmpty()) {
return ResourcesPerDay.amount(0);
}
Map<LocalDate, List<T>> byDay = DayAssignment.byDay(assignments);
LocalDate min = Collections.min(byDay.keySet());
LocalDate max = Collections.max(byDay.keySet());
Iterable<PartialDay> daysToIterate = startFor(min).daysUntil(
endFor(max));
EffortDuration sumTotalEffort = zero();
EffortDuration sumWorkableEffort = zero();
final ResourcesPerDay ONE_RESOURCE_PER_DAY = ResourcesPerDay.amount(1);
for (PartialDay day : daysToIterate) {
List<T> assignmentsAtDay = avoidNull(byDay.get(day.getDate()),
Collections.<T> emptyList());
EffortDuration incrementWorkable = getAllocationCalendar()
.asDurationOn(day, ONE_RESOURCE_PER_DAY);
sumWorkableEffort = sumWorkableEffort.plus(incrementWorkable);
sumTotalEffort = sumTotalEffort
.plus(getAssignedDuration(assignmentsAtDay));
}
if (sumWorkableEffort.equals(zero())) {
return ResourcesPerDay.amount(0);
}
return ResourcesPerDay.calculateFrom(sumTotalEffort, sumWorkableEffort);
}
private IntraDayDate startFor(LocalDate dayDate) {
IntraDayDate start = getIntraDayStartDate();
if (start.getDate().equals(dayDate)) {
return start;
} else {
return IntraDayDate.startOfDay(dayDate);
}
}
private IntraDayDate endFor(LocalDate assignmentDate) {
IntraDayDate end = getIntraDayEndDate();
if (end.getDate().equals(assignmentDate)) {
return end;
} else {
return IntraDayDate.startOfDay(assignmentDate).nextDayAtStart();
}
}
private static <T> T avoidNull(T value, T defaultValue) {
if (value != null) {
return value;
} else {
return defaultValue;
}
}
public ICalendar getAllocationCalendar() {
return getCalendarGivenTaskCalendar(getTaskCalendar());
}
private ICalendar getTaskCalendar() {
if (getTask().getCalendar() == null) {
return SameWorkHoursEveryDay.getDefaultWorkingDay();
} else {
return getTask().getCalendar();
}
}
protected abstract ICalendar getCalendarGivenTaskCalendar(
ICalendar taskCalendar);
protected abstract Class<T> getDayAssignmentType();
public ResourceAllocation<T> copy(Scenario scenario) {
Validate.notNull(scenario);
ResourceAllocation<T> copy = createCopy(scenario);
copy.assignmentsState = copy.toTransientStateWithInitial(
getUnorderedFor(scenario), getIntraDayStartDateFor(scenario),
getIntraDayEndFor(scenario));
copy.resourcesPerDay = resourcesPerDay;
copy.originalTotalAssignment = originalTotalAssignment;
copy.task = task;
copy.assignmentFunction = assignmentFunction;
copy.intendedResourcesPerDay = intendedResourcesPerDay;
return copy;
}
private DayAssignmentsState toTransientStateWithInitial(
Collection<? extends T> initialAssignments, IntraDayDate start,
IntraDayDate end) {
TransientState result = new TransientState(initialAssignments);
result.setIntraDayStart(start);
result.setIntraDayEnd(end);
return result;
}
private Set<T> getUnorderedFor(Scenario scenario) {
IDayAssignmentsContainer<T> container = retrieveContainerFor(scenario);
if (container == null) {
return new HashSet<T>();
}
return container.getDayAssignments();
}
private IntraDayDate getIntraDayStartDateFor(Scenario scenario) {
IDayAssignmentsContainer<T> container = retrieveContainerFor(scenario);
if (container == null) {
return null;
}
return container.getIntraDayStart();
}
private IntraDayDate getIntraDayEndFor(Scenario scenario) {
IDayAssignmentsContainer<T> container = retrieveContainerFor(scenario);
if (container == null) {
return null;
}
return container.getIntraDayEnd();
}
abstract ResourceAllocation<T> createCopy(Scenario scenario);
public AssignmentFunction getAssignmentFunction() {
return assignmentFunction;
}
public void setAssignmentFunction(AssignmentFunction assignmentFunction) {
// If the assignment function is empty, avoid creating an association
// between the resource allocation and the assignment function
if (assignmentFunction == null) {
NONE_FUNCTION.applyTo(this);
return;
}
this.assignmentFunction = assignmentFunction;
this.assignmentFunction.applyTo(this);
}
private void setWithoutApply(AssignmentFunction assignmentFunction) {
this.assignmentFunction = assignmentFunction;
}
public int getAssignedHours() {
return DayAssignment.sum(getAssignments()).roundToHours();
}
protected int getIntendedHours() {
return originalTotalAssignment;
}
@OnCopy(Strategy.IGNORE)
private DayAssignmentsState assignmentsState;
protected DayAssignmentsState getDayAssignmentsState() {
return assignmentsState;
}
private TransientState buildInitialTransientState() {
return new TransientState(new HashSet<T>());
}
private DayAssignmentsState buildFromDBState() {
return new NoExplicitlySpecifiedScenario();
}
abstract class DayAssignmentsState {
private List<T> dayAssignmentsOrdered = null;
protected List<T> getOrderedDayAssignments() {
if (dayAssignmentsOrdered == null) {
dayAssignmentsOrdered = DayAssignment
.orderedByDay(getUnorderedAssignments());
}
return dayAssignmentsOrdered;
}
/**
* It can be null. It allows to mark that the allocation is started in a
* point within a day instead of the start of the day
*/
abstract IntraDayDate getIntraDayStart();
/**
* Set a new intraDayStart.
*
* @param intraDayStart
* it can be <code>null</code>
* @see getIntraDayStart
*/
public abstract void setIntraDayStart(IntraDayDate intraDayStart);
/**
* It can be null. It allows to mark that the allocation is finished in
* a point within a day instead of taking the whole day
*/
abstract IntraDayDate getIntraDayEnd();
/**
* Set a new intraDayEnd.
*
* @param intraDayEnd
* it can be <code>null</code>
* @see getIntraDayEnd
*/
public abstract void setIntraDayEnd(IntraDayDate intraDayEnd);
protected abstract Collection<T> getUnorderedAssignments();
protected void addingAssignments(Collection<? extends T> assignments) {
setParentFor(assignments);
addAssignments(assignments);
clearCachedData();
}
protected void clearCachedData() {
dayAssignmentsOrdered = null;
}
private void setParentFor(Collection<? extends T> assignments) {
for (T each : assignments) {
setItselfAsParentFor(each);
}
}
protected void removingAssignments(
List<? extends DayAssignment> assignments){
removeAssignments(assignments);
clearCachedData();
for (DayAssignment each : assignments) {
dayAssignmenteRemoval.onRemoval(ResourceAllocation.this, each);
}
}
protected abstract void removeAssignments(
List<? extends DayAssignment> assignments);
protected abstract void addAssignments(
Collection<? extends T> assignments);
@SuppressWarnings("unchecked")
public void mergeAssignments(ResourceAllocation<?> modification) {
detachAssignments();
resetTo(((ResourceAllocation<T>) modification).getAssignments());
clearCachedData();
}
protected abstract void resetTo(Collection<T> assignmentsCopied);
void detachAssignments() {
for (DayAssignment each : getUnorderedAssignments()) {
each.detach();
}
}
final protected DayAssignmentsState switchTo(Scenario scenario) {
DayAssignmentsState result = explicitlySpecifiedState(scenario);
copyTransientPropertiesIfAppropiateTo(result);
return result;
}
/**
* Override if necessary to do extra actions
*/
protected void copyTransientPropertiesIfAppropiateTo(
DayAssignmentsState newStateForScenario) {
}
}
protected abstract void setItselfAsParentFor(T dayAssignment);
private class TransientState extends DayAssignmentsState {
private final Set<T> assignments;
private IntraDayDate intraDayStart;
private IntraDayDate intraDayEnd;
TransientState(Collection<? extends T> assignments) {
this.assignments = new HashSet<T>(assignments);
}
@Override
final protected Collection<T> getUnorderedAssignments() {
return assignments;
}
@Override
final protected void removeAssignments(
List<? extends DayAssignment> assignments) {
this.assignments.removeAll(assignments);
}
@Override
final protected void addAssignments(Collection<? extends T> assignments) {
this.assignments.addAll(assignments);
}
@Override
final protected void resetTo(Collection<T> assignments) {
this.assignments.clear();
this.assignments.addAll(assignments);
}
@Override
public IntraDayDate getIntraDayStart() {
return intraDayStart;
}
@Override
public void setIntraDayStart(IntraDayDate intraDayStart) {
this.intraDayStart = intraDayStart;
}
@Override
final IntraDayDate getIntraDayEnd() {
return intraDayEnd;
}
@Override
public final void setIntraDayEnd(IntraDayDate intraDayEnd) {
this.intraDayEnd = intraDayEnd;
}
protected void copyTransientPropertiesIfAppropiateTo(
DayAssignmentsState newStateForScenario) {
newStateForScenario.resetTo(getUnorderedAssignments());
newStateForScenario.setIntraDayStart(getIntraDayStart());
newStateForScenario.setIntraDayEnd(getIntraDayEnd());
};
}
private DayAssignmentsState explicitlySpecifiedState(Scenario scenario) {
IDayAssignmentsContainer<T> container;
container = retrieveOrCreateContainerFor(scenario);
return new ExplicitlySpecifiedScenarioState(container);
}
protected abstract IDayAssignmentsContainer<T> retrieveContainerFor(
Scenario scenario);
protected abstract IDayAssignmentsContainer<T> retrieveOrCreateContainerFor(
Scenario scenario);
/**
* It uses the current scenario retrieved from {@link IScenarioManager} in
* order to return the assignments for that scenario. This state doesn't
* allow to update the current assignments for that scenario.<br />
* Note that this implementation doesn't work well if the current scenario
* is changed since the assignments are cached and the assignments for the
* previous one would be returned<br />
*/
private class NoExplicitlySpecifiedScenario extends
DayAssignmentsState {
@Override
protected final void removeAssignments(
List<? extends DayAssignment> assignments) {
modificationsNotAllowed();
}
@Override
protected final void addAssignments(Collection<? extends T> assignments) {
modificationsNotAllowed();
}
@Override
final void detachAssignments() {
modificationsNotAllowed();
}
@Override
protected final void resetTo(Collection<T> assignmentsCopied) {
modificationsNotAllowed();
}
private void modificationsNotAllowed() {
throw new IllegalStateException(
"modifications to assignments can't be done "
+ "if the scenario on which to work on is not explicitly specified");
}
@Override
protected Collection<T> getUnorderedAssignments() {
Scenario scenario = currentScenario();
return retrieveOrCreateContainerFor(scenario).getDayAssignments();
}
private Scenario currentScenario() {
return Registry.getScenarioManager().getCurrent();
}
@Override
IntraDayDate getIntraDayStart() {
return retrieveContainerFor(currentScenario()).getIntraDayStart();
}
@Override
IntraDayDate getIntraDayEnd() {
return retrieveOrCreateContainerFor(currentScenario())
.getIntraDayEnd();
}
@Override
public void setIntraDayEnd(IntraDayDate intraDayEnd) {
modificationsNotAllowed();
}
@Override
public void setIntraDayStart(IntraDayDate intraDayStart) {
modificationsNotAllowed();
}
}
private class ExplicitlySpecifiedScenarioState extends
DayAssignmentsState {
private final IDayAssignmentsContainer<T> container;
ExplicitlySpecifiedScenarioState(IDayAssignmentsContainer<T> container) {
Validate.notNull(container);
this.container = container;
}
@Override
protected void addAssignments(Collection<? extends T> assignments) {
container.addAll(assignments);
}
@Override
protected Collection<T> getUnorderedAssignments() {
return container.getDayAssignments();
}
@Override
protected void removeAssignments(
List<? extends DayAssignment> assignments) {
container.removeAll(assignments);
}
@Override
protected void resetTo(Collection<T> assignmentsCopied) {
container.resetTo(assignmentsCopied);
}
@Override
IntraDayDate getIntraDayStart() {
return container.getIntraDayStart();
}
@Override
public void setIntraDayStart(IntraDayDate intraDayStart) {
container.setIntraDayStart(intraDayStart);
}
@Override
IntraDayDate getIntraDayEnd() {
return container.getIntraDayEnd();
}
@Override
public void setIntraDayEnd(IntraDayDate intraDayEnd) {
container.setIntraDayEnd(intraDayEnd);
}
}
public int getConsolidatedHours() {
return DayAssignment.sum(getConsolidatedAssignments()).roundToHours();
}
public int getNonConsolidatedHours() {
return getNonConsolidatedEffort().roundToHours();
}
public EffortDuration getEffortForReassignation() {
if (isSatisfied()) {
return getNonConsolidatedEffort();
} else {
return hours(getIntendedHours());
}
}
public EffortDuration getNonConsolidatedEffort() {
return DayAssignment.sum(getNonConsolidatedAssignments());
}
/**
* @return a list of {@link DayAssignment} ordered by date
*/
public final List<T> getAssignments() {
return getDayAssignmentsState().getOrderedDayAssignments();
}
public List<T> getNonConsolidatedAssignments() {
return getDayAssignmentsByConsolidated(false);
}
public List<T> getConsolidatedAssignments() {
return getDayAssignmentsByConsolidated(true);
}
private List<T> getDayAssignmentsByConsolidated(
boolean consolidated) {
List<T> result = new ArrayList<T>();
for (T day : getAssignments()) {
if (day.isConsolidated() == consolidated) {
result.add(day);
}
}
return result;
}
public ResourcesPerDay getNonConsolidatedResourcePerDay() {
return calculateResourcesPerDayFromAssignments(getNonConsolidatedAssignments());
}
public ResourcesPerDay getConsolidatedResourcePerDay() {
return calculateResourcesPerDayFromAssignments(getConsolidatedAssignments());
}
// just called for validation purposes. It must be public, otherwise if it's
// a proxy the call is not intercepted.
@NotNull
public ResourcesPerDay getRawResourcesPerDay() {
return resourcesPerDay;
}
public ResourcesPerDay getResourcesPerDay() {
if (resourcesPerDay == null) {
return ResourcesPerDay.amount(0);
}
return resourcesPerDay;
}
public void createDerived(IWorkerFinder finder) {
final List<? extends DayAssignment> assignments = getAssignments();
List<DerivedAllocation> result = new ArrayList<DerivedAllocation>();
List<Machine> machines = Resource.machines(getAssociatedResources());
for (Machine machine : machines) {
for (MachineWorkersConfigurationUnit each : machine
.getConfigurationUnits()) {
result.add(DerivedAllocationGenerator.generate(this, finder,
each,
assignments));
}
}
resetDerivedAllocationsTo(result);
}
/**
* Resets the derived allocations
*/
private void resetDerivedAllocationsTo(
Collection<DerivedAllocation> derivedAllocations) {
// avoiding error: A collection with cascade="all-delete-orphan" was no
// longer referenced by the owning entity instance
this.derivedAllocations.clear();
this.derivedAllocations.addAll(derivedAllocations);
}
public Set<DerivedAllocation> getDerivedAllocations() {
return Collections.unmodifiableSet(derivedAllocations);
}
public LocalDate getStartConsideringAssignments() {
List<? extends DayAssignment> assignments = getAssignments();
if (assignments.isEmpty()) {
return getStartDate();
}
return assignments.get(0).getDay();
}
public LocalDate getStartDate() {
IntraDayDate start = getIntraDayStartDate();
return start != null ? start.getDate() : null;
}
private IntraDayDate getStartSpecifiedByTask() {
IntraDayDate taskStart = task.getIntraDayStartDate();
IntraDayDate firstDayNotConsolidated = getTask()
.getFirstDayNotConsolidated();
return IntraDayDate.max(taskStart, firstDayNotConsolidated);
}
public IntraDayDate getIntraDayStartDate() {
IntraDayDate intraDayStart = getDayAssignmentsState()
.getIntraDayStart();
if (intraDayStart != null) {
return intraDayStart;
}
return task.getIntraDayStartDate();
}
public LocalDate getEndDate() {
IntraDayDate intraDayEndDate = getIntraDayEndDate();
return intraDayEndDate != null ? intraDayEndDate.asExclusiveEnd()
: null;
}
public IntraDayDate getIntraDayEndDate() {
IntraDayDate intraDayEnd = getDayAssignmentsState().getIntraDayEnd();
if (intraDayEnd != null) {
return intraDayEnd;
}
LocalDate l = getEndDateGiven(getAssignments());
if (l == null) {
return task.getIntraDayEndDate();
}
return IntraDayDate.startOfDay(l);
}
private LocalDate getEndDateGiven(
List<? extends DayAssignment> assignments) {
if (assignments.isEmpty()) {
return null;
}
DayAssignment lastAssignment = assignments.get(assignments.size() - 1);
return lastAssignment.getDay().plusDays(1);
}
public boolean isAlreadyFinishedBy(LocalDate date) {
if (getEndDate() == null) {
return false;
}
return getEndDate().compareTo(date) <= 0;
}
private interface PredicateOnDayAssignment {
boolean satisfiedBy(DayAssignment dayAssignment);
}
public int getAssignedHours(final Resource resource, LocalDate start,
LocalDate endExclusive) {
return getAssignedEffort(resource, start, endExclusive).roundToHours();
}
public EffortDuration getAssignedEffort(final Resource resource,
LocalDate start, LocalDate endExclusive) {
return getAssignedDuration(
filter(getAssignments(start, endExclusive),
new PredicateOnDayAssignment() {
@Override
public boolean satisfiedBy(
DayAssignment dayAssignment) {
return dayAssignment.isAssignedTo(resource);
}
}));
}
public List<DayAssignment> getAssignments(LocalDate start,
LocalDate endExclusive) {
return new ArrayList<DayAssignment>(DayAssignment.getAtInterval(
getAssignments(), start, endExclusive));
}
public int getAssignedHours(LocalDate start, LocalDate endExclusive) {
return getAssignedDuration(start, endExclusive).roundToHours();
}
public abstract EffortDuration getAssignedEffort(Criterion criterion,
LocalDate start,
LocalDate endExclusive);
private List<DayAssignment> filter(List<DayAssignment> assignments,
PredicateOnDayAssignment predicate) {
List<DayAssignment> result = new ArrayList<DayAssignment>();
for (DayAssignment dayAssignment : assignments) {
if (predicate.satisfiedBy(dayAssignment)) {
result.add(dayAssignment);
}
}
return result;
}
protected EffortDuration getAssignedDuration(LocalDate startInclusive,
LocalDate endExclusive) {
return getAssignedDuration(getAssignments(startInclusive, endExclusive));
}
private EffortDuration getAssignedDuration(
List<? extends DayAssignment> assignments) {
EffortDuration result = zero();
for (DayAssignment dayAssignment : assignments) {
result = result.plus(dayAssignment.getDuration());
}
return result;
}
public void mergeAssignmentsAndResourcesPerDay(Scenario scenario,
ResourceAllocation<?> modifications) {
if (modifications == this) {
return;
}
switchToScenario(scenario);
mergeAssignments(modifications);
this.intendedResourcesPerDay = modifications.intendedResourcesPerDay;
if (modifications.isSatisfied()) {
updateOriginalTotalAssigment();
updateResourcesPerDay();
}
setWithoutApply(modifications.getAssignmentFunction());
mergeDerivedAllocations(scenario, modifications.getDerivedAllocations());
}
private void mergeDerivedAllocations(Scenario scenario,
Set<DerivedAllocation> derivedAllocations) {
Map<MachineWorkersConfigurationUnit, DerivedAllocation> newMap = DerivedAllocation
.byConfigurationUnit(derivedAllocations);
Map<MachineWorkersConfigurationUnit, DerivedAllocation> currentMap = DerivedAllocation
.byConfigurationUnit(getDerivedAllocations());
for (Entry<MachineWorkersConfigurationUnit, DerivedAllocation> entry : newMap
.entrySet()) {
final MachineWorkersConfigurationUnit key = entry.getKey();
final DerivedAllocation modification = entry.getValue();
DerivedAllocation current = currentMap.get(key);
if (current == null) {
DerivedAllocation derived = modification.asDerivedFrom(this);
derived.useScenario(scenario);
currentMap.put(key, derived);
} else {
current.useScenario(scenario);
current.resetAssignmentsTo(modification.getAssignments());
}
}
resetDerivedAllocationsTo(currentMap.values());
}
final void mergeAssignments(ResourceAllocation<?> modifications) {
getDayAssignmentsState().mergeAssignments(modifications);
getDayAssignmentsState().setIntraDayStart(
modifications.getDayAssignmentsState().getIntraDayStart());
getDayAssignmentsState().setIntraDayEnd(
modifications.getDayAssignmentsState().getIntraDayEnd());
}
public void detach() {
getDayAssignmentsState().detachAssignments();
}
void associateAssignmentsToResource() {
for (DayAssignment dayAssignment : getAssignments()) {
dayAssignment.associateToResource();
}
}
public boolean hasAssignments() {
return !getAssignments().isEmpty();
}
public LimitingResourceQueueElement getLimitingResourceQueueElement() {
return (!limitingResourceQueueElements.isEmpty()) ? (LimitingResourceQueueElement) limitingResourceQueueElements.iterator().next() : null;
}
public void setLimitingResourceQueueElement(LimitingResourceQueueElement element) {
limitingResourceQueueElements.clear();
if (element != null) {
element.setResourceAllocation(this);
limitingResourceQueueElements.add(element);
}
}
public Integer getIntendedTotalHours() {
return intendedTotalHours;
}
public void setIntendedTotalHours(Integer intendedTotalHours) {
this.intendedTotalHours = intendedTotalHours;
}
/**
* Do a query to recover a list of resources that are suitable for this
* allocation. For a {@link SpecificResourceAllocation} returns the current
* resource. For a {@link GenericResourceAllocation} returns the resources
* that currently match this allocation criterions
* @return a list of resources that are proper for this allocation
*/
public abstract List<Resource> querySuitableResources(
IResourcesSearcher resourceSearcher);
public abstract void makeAssignmentsContainersDontPoseAsTransientAnyMore();
public void removePredecessorsDayAssignmentsFor(Scenario scenario) {
for (DerivedAllocation each : getDerivedAllocations()) {
each.removePredecessorContainersFor(scenario);
}
removePredecessorContainersFor(scenario);
}
protected abstract void removePredecessorContainersFor(Scenario scenario);
public void removeDayAssigmentsFor(Scenario scenario) {
for (DerivedAllocation each : getDerivedAllocations()) {
each.removeContainersFor(scenario);
}
removeContainersFor(scenario);
}
protected abstract void removeContainersFor(Scenario scenario);
/*
* Returns first non consolidated day
*/
public LocalDate getFirstNonConsolidatedDate() {
List<T> nonConsolidated = getNonConsolidatedAssignments();
return (!nonConsolidated.isEmpty()) ? nonConsolidated.get(0).getDay()
: null;
}
}
|
package de.hdm.wim;
import de.hdm.wim.events.MessageEvent;
import de.hdm.wim.patterns.SenderPattern;
import de.hdm.wim.patterns.TestPattern;
import de.hdm.wim.source.MessageEventSource;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class CEPSpeechTokens {
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
env.setParallelism(1); // set Parallelism to 1 Task, to get chat order right
MessageEventSource evtSrc = new MessageEventSource();
// create a DataStream of MessageEvent from our source
DataStream<MessageEvent> messageStream = env
.addSource(evtSrc);
TestPattern testPattern = new TestPattern();
testPattern.run(env,messageStream);
SenderPattern senderPattern = new SenderPattern();
senderPattern.run(env,messageStream);
/*
// Warning pattern: Two consecutive temperature events whose temperature is higher than the given threshold
// appearing within a time interval of 10 seconds
Pattern<MessageEvent, ?> projectPattern = Pattern.<MessageEvent>begin("first")
.subtype( ProjectEvent.class )
.where( evt -> evt.getTokens().contains("concerning") );
//.stream()
//.anyMatch( token -> token.equals("concerning") )
//);
// .anyMatch( token -> (token.equals("concerning") || token.equals("regarding") ) && token.equals("project") ));
// Create a pattern stream from our project pattern
PatternStream<MessageEvent> projectPatternStream = CEP.pattern(
messageStream.keyBy("_messageId"),
projectPattern);
// Generate ProjectEvents for each matched project pattern
DataStream<ProjectEvent> projectStream = projectPatternStream.select(
(Map<String, MessageEvent> pattern) -> {
ProjectEvent projectEvent = (ProjectEvent) pattern.get("first");
return projectEvent;
}
);
// print to stdout
//projectStream.print();
*/
// print message stream
messageStream.print();
env.execute("CEP chat stream job");
}
}
|
package org.apache.jmeter;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.lang.Thread.UncaughtExceptionHandler;
import java.security.Permission;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.transform.TransformerException;
import org.apache.commons.io.IOUtils;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import org.apache.tools.ant.DirectoryScanner;
/**
* JMeter Maven plugin.
*
* @author Tim McCune
* @goal jmeter
* @requiresProject true
*/
public class JMeterMojo extends AbstractMojo {
/**
* Path to a Jmeter test XML file.
* Relative to srcDir.
* May be declared instead of the parameter includes.
*
* @parameter
*/
private File jmeterTestFile;
/**
* Sets the list of include patterns to use in directory scan for JMeter Test XML files.
* Relative to srcDir.
* May be declared instead of a single jmeterTestFile.
* Ignored if parameter jmeterTestFile is given.
*
* @parameter
*/
private List<String> includes;
/**
* Sets the list of exclude patterns to use in directory scan for Test files.
* Relative to srcDir.
* Ignored if parameter jmeterTestFile file is given.
*
* @parameter
*/
private List<String> excludes;
/**
* Path under which JMeter test XML files are stored.
*
* @parameter expression="${jmeter.testfiles.basedir}"
* default-value="${basedir}/src/test/jmeter"
*/
private File srcDir;
/**
* Directory in which the reports are stored.
*
* @parameter expression="${jmeter.reports.dir}"
* default-value="${basedir}/target/jmeter-report"
*/
private File reportDir;
/**
* Whether or not to generate reports after measurement.
*
* @parameter default-value="true"
*/
private boolean enableReports;
/**
* Custom Xslt which is used to create the report.
*
* @parameter
*/
private File reportXslt;
/**
* Absolute path to JMeter default properties file.
* The default properties file is part of a JMeter installation and sets basic properties needed for running JMeter.
*
* @parameter expression="${jmeter.properties}"
* default-value="${basedir}/src/test/jmeter/jmeter.properties"
* @required
*/
private File jmeterDefaultPropertiesFile;
/**
* Absolute path to JMeter custom (test dependent) properties file.
*
* @parameter
*/
private File jmeterCustomPropertiesFile;
/**
* @parameter expression="${settings.localRepository}"
*/
private File repoDir;
/**
* JMeter Properties that override those given in jmeterProps
*
* @parameter
*/
@SuppressWarnings("rawtypes")
private Map jmeterUserProperties;
/**
* Use remote JMeter installation to run tests
*
* @parameter default-value=false
*/
private boolean remote;
/**
* Sets whether ErrorScanner should ignore failures in JMeter result file.
*
* @parameter expression="${jmeter.ignore.failure}" default-value=false
*/
private boolean jmeterIgnoreFailure;
/**
* Sets whether ErrorScanner should ignore errors in JMeter result file.
*
* @parameter expression="${jmeter.ignore.error}" default-value=false
*/
private boolean jmeterIgnoreError;
/**
* Absolute path to File to log results to.
*
* @parameter
*/
private String resultFileName;
/**
* @parameter expression="${project}"
* @required
*/
@SuppressWarnings("unused")
private MavenProject mavenProject;
/**
* Postfix to add to report file.
*
* @parameter default-value="-report.html"
*/
private String reportPostfix;
private File workDir;
private List<File> temporaryPropertyFiles = new ArrayList<File>();
private File jmeterLog;
private DateFormat fmt = new SimpleDateFormat("yyMMdd");
/**
* Run all JMeter tests.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
initSystemProps();
try {
List<String> jmeterTestFiles = new ArrayList<String>();
List<String> results = new ArrayList<String>();
if(jmeterTestFile != null) {
jmeterTestFiles.add(jmeterTestFile.getName());
} else {
DirectoryScanner scanner = new DirectoryScanner();
scanner.setBasedir(srcDir);
scanner.setIncludes(includes == null ? new String[] { "**/*.jmx" } : includes.toArray(new String[] {}));
if (excludes != null) {
scanner.setExcludes(excludes.toArray(new String[] {}));
}
scanner.scan();
jmeterTestFiles.addAll(Arrays.asList(scanner.getIncludedFiles()));
}
for (String file : jmeterTestFiles) {
results.add(executeTest(new File(srcDir, file)));
}
if (this.enableReports) {
makeReport(results);
}
checkForErrors(results);
} finally {
for(File temporaryPropertyFile : temporaryPropertyFiles) {
temporaryPropertyFile.delete();
}
}
}
private void makeReport(List<String> results) throws MojoExecutionException {
try {
ReportTransformer transformer;
transformer = new ReportTransformer(getXslt());
getLog().info("Building JMeter Report.");
for (String resultFile : results) {
final String outputFile = toOutputFileName(resultFile);
getLog().info("transforming: " + resultFile + " to " + outputFile);
transformer.transform(resultFile, outputFile);
}
} catch (FileNotFoundException e) {
throw new MojoExecutionException("Error writing report file jmeter file.", e);
} catch (TransformerException e) {
throw new MojoExecutionException("Error transforming jmeter results", e);
} catch (IOException e) {
throw new MojoExecutionException("Error copying resources to jmeter results", e);
}
}
/**
* returns the fileName with the configured reportPostfix
*
* @param fileName the String to modify
* @return modified fileName
*/
private String toOutputFileName(String fileName) {
if (fileName.endsWith(".xml")) {
return fileName.replace(".xml", this.reportPostfix);
} else {
return fileName + this.reportPostfix;
}
}
private InputStream getXslt() throws IOException {
if (this.reportXslt == null) {
//if we are using the default report, also copy the images out.
IOUtils.copy(Thread.currentThread()
.getContextClassLoader()
.getResourceAsStream("reports/collapse.jpg"), new FileOutputStream(this.reportDir.getPath() + File.separator + "collapse.jpg"));
IOUtils.copy(Thread.currentThread()
.getContextClassLoader()
.getResourceAsStream("reports/expand.jpg"), new FileOutputStream(this.reportDir.getPath() + File.separator + "expand.jpg"));
return Thread.currentThread().getContextClassLoader().getResourceAsStream("reports/jmeter-results-detail-report_21.xsl");
} else {
return new FileInputStream(this.reportXslt);
}
}
private void checkForErrors(List<String> results) throws MojoExecutionException, MojoFailureException {
ErrorScanner scanner = new ErrorScanner(this.jmeterIgnoreError, this.jmeterIgnoreFailure);
try {
for (String file : results) {
if (scanner.scanForProblems(new File(file))) {
getLog().warn("There were test errors. See the jmeter logs for details");
}
}
} catch (IOException e) {
throw new MojoExecutionException("Can't read log file", e);
}
}
private void initSystemProps() throws MojoExecutionException {
workDir = new File("target" + File.separator + "jmeter");
workDir.mkdirs();
createTemporaryProperties();
resolveJmeterArtifact();
jmeterLog = new File(workDir, "jmeter.log");
try {
System.setProperty("log_file", jmeterLog.getCanonicalPath());
} catch (IOException e) {
throw new MojoExecutionException("Can't get canonical path for log file", e);
}
}
/**
* Resolve JMeter artifact, set necessary System Property.
*
* This mess is necessary because JMeter must load this info from a file.
* Loading resources from classpath won't work.
*/
private void resolveJmeterArtifact() {
//set search path for JMeter. JMeter loads function classes from this path.
System.setProperty("search_paths", repoDir.toString() + "/org/apache/jmeter/jmeter/2.4/jmeter-2.4.jar");
}
/**
* Create temporary property files and set necessary System Properties.
*
* This mess is necessary because JMeter must load this info from a file.
* Loading resources from classpath won't work.
*
* @throws org.apache.maven.plugin.MojoExecutionException
* Exception
*/
@SuppressWarnings("unchecked")
private void createTemporaryProperties() throws MojoExecutionException {
String jmeterTargetDir = File.separator + "target" + File.separator + "jmeter" + File.separator;
File saveServiceProps = new File(workDir, "saveservice.properties");
System.setProperty("saveservice_properties", jmeterTargetDir + saveServiceProps.getName());
temporaryPropertyFiles.add(saveServiceProps);
File upgradeProps = new File(workDir, "upgrade.properties");
System.setProperty("upgrade_properties", jmeterTargetDir + upgradeProps.getName());
temporaryPropertyFiles.add(upgradeProps);
for (File propertyFile : temporaryPropertyFiles) {
try {
FileWriter out = new FileWriter(propertyFile);
IOUtils.copy(Thread.currentThread().getContextClassLoader().getResourceAsStream(propertyFile.getName()), out);
out.flush();
out.close();
} catch (IOException e) {
throw new MojoExecutionException("Could not create temporary property file "+propertyFile.getName()+" in directory "+jmeterTargetDir, e);
}
}
}
/**
* Executes a single JMeter test by building up a list of command line
* parameters to pass to JMeter.start().
*
* @param test JMeter test XML
* @return the report file names.
* @throws org.apache.maven.plugin.MojoExecutionException
* Exception
*/
private String executeTest(File test) throws MojoExecutionException {
try {
getLog().info("Executing test: " + test.getCanonicalPath());
if(resultFileName == null) {
resultFileName = reportDir.toString() + File.separator + test.getName().substring(0, test.getName().lastIndexOf(".")) + "-" + fmt.format(new Date()) + ".xml";
}
//delete file if it already exists
new File(resultFileName).delete();
List<String> argsTmp = Arrays.asList("-n", "-t",
test.getCanonicalPath(),
"-l", resultFileName,
"-p", jmeterDefaultPropertiesFile.toString(),
"-d", System.getProperty("user.dir"));
List<String> args = new ArrayList<String>();
args.addAll(argsTmp);
args.addAll(getUserProperties());
if(jmeterCustomPropertiesFile != null) {
args.add("-q");
args.add(jmeterCustomPropertiesFile.toString());
}
if (remote) {
args.add("-r");
}
// This mess is necessary because JMeter likes to use System.exit.
// We need to trap the exit call.
SecurityManager oldManager = System.getSecurityManager();
System.setSecurityManager(new SecurityManager() {
@Override
public void checkExit(int status) {
throw new ExitException(status);
}
@Override
public void checkPermission(Permission perm, Object context) {
}
@Override
public void checkPermission(Permission perm) {
}
});
UncaughtExceptionHandler oldHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
if (e instanceof ExitException && ((ExitException) e).getCode() == 0) {
return; // Ignore
}
getLog().error("Error in thread " + t.getName());
}
});
try {
// This mess is necessary because the only way to know when
// JMeter
// is done is to wait for its test end message!
new JMeter().start(args.toArray(new String[] {}));
BufferedReader in = new BufferedReader(new FileReader(jmeterLog));
while (!checkForEndOfTest(in)) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
break;
}
}
} catch (ExitException e) {
if (e.getCode() != 0) {
throw new MojoExecutionException("Test failed", e);
}
} finally {
System.setSecurityManager(oldManager);
Thread.setDefaultUncaughtExceptionHandler(oldHandler);
}
return resultFileName;
} catch (IOException e) {
throw new MojoExecutionException("Can't execute test", e);
}
}
private boolean checkForEndOfTest(BufferedReader in) throws MojoExecutionException {
boolean testEnded = false;
try {
String line;
while ((line = in.readLine()) != null) {
if (line.contains("Test has ended")) {
testEnded = true;
break;
}
}
} catch (IOException e) {
throw new MojoExecutionException("Can't read log file", e);
}
return testEnded;
}
@SuppressWarnings("unchecked")
private ArrayList<String> getUserProperties() {
ArrayList<String> propsList = new ArrayList<String>();
if (jmeterUserProperties == null) {
return propsList;
}
Set<String> keySet = (Set<String>) jmeterUserProperties.keySet();
for (String key : keySet) {
propsList.add("-J");
propsList.add(key + "=" + jmeterUserProperties.get(key));
}
return propsList;
}
private static class ExitException extends SecurityException {
private static final long serialVersionUID = 5544099211927987521L;
public int _rc;
public ExitException(int rc) {
super(Integer.toString(rc));
_rc = rc;
}
public int getCode() {
return _rc;
}
}
}
|
import hzarrabi_CSCI201_Assignment3.CantAddShipException;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Event;
import java.awt.FlowLayout;
import java.awt.Graphics;
import java.awt.GridLayout;
import java.awt.Image;
import java.awt.MenuBar;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.Scanner;
import javax.imageio.ImageIO;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.UnsupportedAudioFileException;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.KeyStroke;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.swing.text.html.BlockView;
import javax.swing.Timer;
public class BattleShipServer extends JFrame
{
private JComponent leftGrid[][]=new JComponent[11][11];
private JComponent rightGrid[][]=new JComponent[11][11];
private char compGrid[][]=new char[10][10];//this is the one we click on
private char userGrid[][]=new char[10][10];//this is the one the computer guesses
JPanel left;
JPanel right;
String playersAim="N/A";
String computersAim="N/A";
JButton selectFileButton=new JButton("Select File...");
JLabel fileName=new JLabel("File: ");
JButton startButton = new JButton("START");
//for the ship placement
int carriers=0;
int battlships=0;
int cruisers=0;
int destroyers=0;
//ships hit
int playerCarriers=0;
int playerBattlships=0;
int playerCruisers=0;
int playerDestroyers=0;
int compCarriers=0;
int compBattlships=0;
int compCruisers=0;
int compDestroyers=0;
//bools for different modes of game
boolean selectedFile=false;
boolean editMode=true;
//images
private ImageIcon wave=new ImageIcon("wave.jpg");
private ImageIcon miss=new ImageIcon("x.jpg");
private ImageIcon hit=new ImageIcon("hit.jpg");
private ImageIcon aship=new ImageIcon("AShip.jpg");
private ImageIcon bship=new ImageIcon("BShip.jpg");
private ImageIcon cship=new ImageIcon("CShip.jpg");
private ImageIcon dship=new ImageIcon("DShip.jpg");
//int for how many hit each side had taken
int compHits=0;//so if this equals 16 that means that the USER won
int userHits=0;
//menus
JMenuBar menuBar = new JMenuBar();
JMenu fileMenu=new JMenu("Info");
JMenuItem howToMenu = new JMenuItem("How To");
JMenuItem aboutMenu=new JMenuItem("About");
//timer and log
JLabel timeLabel= new JLabel("0:15");
int seconds=15;
Timer time;
boolean playerShot=false;//the boolean that indicates if the player shot
boolean compShot=false;
int computerSeconds=12;//this will be the random time assigned to the computer's turn
JTextArea log =new JTextArea();
JScrollPane scroll = new JScrollPane (log, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
JPanel south=new JPanel(new BorderLayout());//holds buttons to file and start
int round=1;
//animations
BufferedImage wave1;
BufferedImage wave2;
BufferedImage expl1;
BufferedImage expl2;
BufferedImage expl3;
BufferedImage expl4;
BufferedImage expl5;
BufferedImage[] expl = new BufferedImage[5];
BufferedImage splash1;
BufferedImage splash2;
BufferedImage splash3;
BufferedImage splash4;
BufferedImage splash5;
BufferedImage splash6;
BufferedImage splash7;
BufferedImage[] splash = new BufferedImage[7];
BufferedImage imageA;
BufferedImage imageB;
BufferedImage imageC;
BufferedImage imageD;
BufferedImage imageM;
BufferedImage imageQ;
BufferedImage imageX;
SoundLibrary sl = new SoundLibrary();
Socket s;
String myName;
BufferedReader br;
PrintWriter pw;
int opponentShips;
public BattleShipServer(Socket s, String name)
{
this.s=s;
myName=name;
opponentShips=0;
try
{
br=new BufferedReader(new InputStreamReader(this.s.getInputStream()));
pw = new PrintWriter(this.s.getOutputStream(), true);
}
catch (IOException e){System.out.println("something wrong with instantiating br");}
new Thread()
{
public void run()
{
while(true)
{
System.out.println("Press enter to continue...");
Scanner keyboard = new Scanner(System.in);
keyboard.nextLine();
pw.println("hello:sdfds");
}
}
}.start();
new Thread()
{
public void run()
{
while(true)
{
try
{
String hello=br.readLine();
opponentCommand(hello);
}
catch (IOException e){System.out.println("problem with br reading in");}
}
}
}.start();
load();
fillUserGrid();//this will instantiate userArray with X's
setTitle("BattleShip");
setLayout(new BorderLayout());
setSize(690,460);
setLocationRelativeTo(null);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel north=new JPanel(new FlowLayout(FlowLayout.CENTER));
north.setAlignmentX(100);
north.add(new JLabel(myName+" "));
north.add(timeLabel);
north.add(new JLabel(" COMPUTER"));
add(north,BorderLayout.NORTH);
JPanel center=new JPanel(new FlowLayout());//center holds the left and right grids
left=new JPanel(new GridLayout(11, 11));
left.setBorder(BorderFactory.createLineBorder(Color.BLACK));
setLeftGrid();
right=new JPanel(new GridLayout(11, 11));
right.setBorder(BorderFactory.createLineBorder(Color.BLACK));
setRightGrid();
gridLabelListener();
center.add(left);
center.add(right);
add(center,BorderLayout.CENTER);
//JPanel south=new JPanel(new BorderLayout());//holds buttons to file and start
JPanel southLeft=new JPanel(new FlowLayout(FlowLayout.LEFT));
JPanel southRight=new JPanel(new FlowLayout(FlowLayout.LEFT));
southRight.add(selectFileButton);
southRight.add(fileName);
southRight.add(startButton);
startButton.setEnabled(false);
south.add(southLeft,BorderLayout.WEST);
south.add(southRight,BorderLayout.EAST);
add(south,BorderLayout.SOUTH);
selectFileListener();
startButtonListener();
JMenuBar menuBar = new JMenuBar();
setJMenuBar(menuBar);
menuBar.add(fileMenu);
fileMenu.setMnemonic('I');
fileMenu.add(howToMenu);
howToMenu.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_H, Event.CTRL_MASK));
howToMenu.setMnemonic('h');
fileMenu.add(aboutMenu);
aboutMenu.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_A, Event.CTRL_MASK));
aboutMenu.setMnemonic('a');
menuListeners();
left.setBackground(Color.cyan);
right.setBackground(Color.cyan);
setResizable(false);
setVisible(true);
}
//this takes in commands from opponent through socket and interprets them(protocol)
public void opponentCommand(String command)
{
String[] theCommand = command.split(":");
if(theCommand[0].equals("hello")) System.out.println("hello");
//receiving opponent's name
else if(theCommand[0].equals("name"));
//receiving opponents coordinates of where to place ships
else if(theCommand[0].equals("placeCoor"))
{
System.out.println(theCommand[1]);
System.out.println("The length of the string is: "+theCommand[1].length());
char tempArray[][]= new char[10][10];
//trying to turn string back into char array
for(int i=0;i<10;i++)
{
String upToNCharacters =theCommand[1].substring(i*10,(i*10)+10);
System.out.println("the row is:"+ upToNCharacters);
tempArray[i]=upToNCharacters.toCharArray();
}
System.out.println("pringing char array");
for(int i =0;i<10; i++){
for (int j = 0; j < 10; j++) {//Iterate rows
System.out.print(tempArray[i][j]);//Print colmns
}
System.out.println("");
}
//copying the opponents grid to compGrid
compGrid=tempArray.clone();
opponentShips++;//this counts how many ships the opponent has placed down
//if both players have placed all their ships
if(opponentShips==5 && carriers+battlships+cruisers+destroyers==5)
{
startButton.setEnabled(true);
}
}
//opponent removed coordinates
else if(theCommand[0].equals("removeCoor"))
{
System.out.println(theCommand[1]);
System.out.println("The length of the string is: "+theCommand[1].length());
char tempArray[][]= new char[10][10];
//trying to turn string back into char array
for(int i=0;i<10;i++)
{
String upToNCharacters =theCommand[1].substring(i*10,(i*10)+10);
System.out.println("the row is:"+ upToNCharacters);
tempArray[i]=upToNCharacters.toCharArray();
}
System.out.println("pringing char array");
for(int i =0;i<10; i++){
for (int j = 0; j < 10; j++) {//Iterate rows
System.out.print(tempArray[i][j]);//Print colmns
}
System.out.println("");
}
//copying the opponents grid to compGrid
compGrid=tempArray.clone();
opponentShips--;//this counts how many ships the opponent has placed down
//disabling start button in case it was enabled
startButton.setEnabled(false);
}
//receiving opponents coordinates of where they attacked
else if(theCommand[0].equals("attackCoor"));
//reseting the time because we both guessed TODO we may not have to do this because other person guesses we send attack coordinates (and reset)
else if(theCommand[0].equals("resetTime"));
//resetting game because you won
else if(theCommand[0].equals("reset"));
}
//loads images and sounds etc
private void load()
{
try
{
wave1=ImageIO.read(new File("4Resources/animatedWater/water1.png"));
wave2=ImageIO.read(new File("4Resources/animatedWater/water2.png"));
for(int i=0;i<5;i++)
{
expl[i]=ImageIO.read(new File("4Resources/explosion/expl"+(i+1)+".png"));
}
for(int i=0;i<7;i++)
{
splash[i]=ImageIO.read(new File("4Resources/splash/splash"+(i+1)+".png"));
}
imageA=ImageIO.read(new File("4Resources/Tiles/A.png"));
imageB=ImageIO.read(new File("4Resources/Tiles/B.png"));
imageC=ImageIO.read(new File("4Resources/Tiles/C.png"));
imageD=ImageIO.read(new File("4Resources/Tiles/D.png"));
imageM=ImageIO.read(new File("4Resources/Tiles/M.png"));
imageQ=ImageIO.read(new File("4Resources/Tiles/Q.png"));
imageX=ImageIO.read(new File("4Resources/Tiles/X.png"));
}
catch(IOException ioe)
{
}
}
//timer action
private void timerAction()
{
timeLabel.setText("0:15");//reseting the label for when a players makes their decision
//this is the actionlistener for the timer
ActionListener timePerformer = new ActionListener() {
public void actionPerformed(ActionEvent evt) {
seconds
if(seconds<10)//if our time is 9 or less we have one digit seconds so we need to account for that
{
timeLabel.setText("0:0"+seconds);
if(seconds==3) log.append("Warning - 3 seconds left in the round!\n");
}
else timeLabel.setText("0:"+seconds);
if(seconds==0)
{
seconds=15;
timeLabel.setText("0:15");
if(playerShot==false)log.append("You ran out of time!\n");//player ran out of time
if(compShot==false) log.append("Computer ran out of time\n");//computer ran out of time
compShot=false;
playerShot=false;
round++;
log.append("Round "+ round + "\n");
int randomNum = new Random().nextInt((10 - 0) + 1) + 0;
//within 10 seconds(60% chance)
if(randomNum>=4)computerSeconds=new Random().nextInt((14 - 8) + 1) + 8;//15-8 seconds
//11-25 seconds(30% chance)
else if(randomNum<4 && randomNum>=1)computerSeconds=new Random().nextInt((7 - 0) + 1) + 0;
//>25 seconds (20% chance)
else computerSeconds=-1;//since comp will run out of time under this case we make it 26
}
else
{
//when computer "decides" to make turn
if(compShot==false && computerSeconds==seconds)
{
compShooter();
}
}
}
};
//instantiating the timer to perform every 1000 milliseconds (or 1 sec)
time=new Timer(1000,timePerformer);
time.start();
}
//action listeners for the menus
private void menuListeners()
{
howToMenu.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
new aboutWindow();
}
});
aboutMenu.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
new aboutWindow2();
}
});
}
//fills the userGrid array with x's
private void fillUserGrid()
{
for(int i=0;i<10;i++)
{
for(int j=0;j<10;j++)
{
userGrid[i][j]='X';
}
}
}
//makes the ?s for the left grid
private void setLeftGrid()
{
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
leftGrid[i][j]=new GridLabel(i,j+1);
if(i>0 && j<10)
{
//leftGrid[i][j].setIcon(wave);//initialize all question marks initially
}
}
}
((GridLabel)leftGrid[0][0]).add(new JLabel("A"));
((GridLabel)leftGrid[0][0]).press=false;
((GridLabel)leftGrid[0][1]).add(new JLabel("B"));
((GridLabel)leftGrid[0][1]).press=false;
((GridLabel)leftGrid[0][2]).add(new JLabel("C"));
((GridLabel)leftGrid[0][2]).press=false;
((GridLabel)leftGrid[0][3]).add(new JLabel("D"));
((GridLabel)leftGrid[0][3]).press=false;
((GridLabel)leftGrid[0][4]).add(new JLabel("E"));
((GridLabel)leftGrid[0][4]).press=false;
((GridLabel)leftGrid[0][5]).add(new JLabel("F"));
((GridLabel)leftGrid[0][5]).press=false;
((GridLabel)leftGrid[0][6]).add(new JLabel("G"));
((GridLabel)leftGrid[0][6]).press=false;
((GridLabel)leftGrid[0][7]).add(new JLabel("H"));
((GridLabel)leftGrid[0][7]).press=false;
((GridLabel)leftGrid[0][8]).add(new JLabel("I"));
((GridLabel)leftGrid[0][8]).press=false;
((GridLabel)leftGrid[0][9]).add(new JLabel("J"));
((GridLabel)leftGrid[0][9]).press=false;
((GridLabel)leftGrid[0][10]).add(new JLabel(" "));
((GridLabel)leftGrid[0][10]).press=false;
((GridLabel)leftGrid[1][10]).add(new JLabel("1"));
((GridLabel)leftGrid[1][10]).press=false;
((GridLabel)leftGrid[2][10]).add(new JLabel("2"));
((GridLabel)leftGrid[2][10]).press=false;
((GridLabel)leftGrid[3][10]).add(new JLabel("3"));
((GridLabel)leftGrid[3][10]).press=false;
((GridLabel)leftGrid[4][10]).add(new JLabel("4"));
((GridLabel)leftGrid[4][10]).press=false;
((GridLabel)leftGrid[5][10]).add(new JLabel("5"));
((GridLabel)leftGrid[5][10]).press=false;
((GridLabel)leftGrid[6][10]).add(new JLabel("6"));
((GridLabel)leftGrid[6][10]).press=false;
((GridLabel)leftGrid[7][10]).add(new JLabel("7"));
((GridLabel)leftGrid[7][10]).press=false;
((GridLabel)leftGrid[8][10]).add(new JLabel("8"));
((GridLabel)leftGrid[8][10]).press=false;
((GridLabel)leftGrid[9][10]).add(new JLabel("9"));
((GridLabel)leftGrid[9][10]).press=false;
((GridLabel)leftGrid[10][10]).add(new JLabel("10"));
((GridLabel)leftGrid[10][10]).press=false;
//adding the labels
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
left.add(leftGrid[i][j]);
}
}
}
//makes the ?s for the right grid
private void setRightGrid()
{
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
rightGrid[i][j]=new GridLabel(i,j+1);
if(i>0 && j<10)
{
//rightGrid[i][j].setIcon(wave);//initialize all question marks initially
}
}
}
((GridLabel)rightGrid[0][0]).add(new JLabel("A"));
((GridLabel)rightGrid[0][0]).press=false;
((GridLabel)rightGrid[0][1]).add(new JLabel("B"));
((GridLabel)rightGrid[0][1]).press=false;
((GridLabel)rightGrid[0][2]).add(new JLabel("C"));
((GridLabel)rightGrid[0][2]).press=false;
((GridLabel)rightGrid[0][3]).add(new JLabel("D"));
((GridLabel)rightGrid[0][3]).press=false;
((GridLabel)rightGrid[0][4]).add(new JLabel("E"));
((GridLabel)rightGrid[0][4]).press=false;
((GridLabel)rightGrid[0][5]).add(new JLabel("F"));
((GridLabel)rightGrid[0][5]).press=false;
((GridLabel)rightGrid[0][6]).add(new JLabel("G"));
((GridLabel)rightGrid[0][6]).press=false;
((GridLabel)rightGrid[0][7]).add(new JLabel("H"));
((GridLabel)rightGrid[0][7]).press=false;
((GridLabel)rightGrid[0][8]).add(new JLabel("I"));
((GridLabel)rightGrid[0][8]).press=false;
((GridLabel)rightGrid[0][9]).add(new JLabel("J"));
((GridLabel)rightGrid[0][9]).press=false;
((GridLabel)rightGrid[0][10]).add(new JLabel(" "));
((GridLabel)rightGrid[0][10]).press=false;
((GridLabel)rightGrid[1][10]).add(new JLabel("1"));
((GridLabel)rightGrid[1][10]).press=false;
((GridLabel)rightGrid[2][10]).add(new JLabel("2"));
((GridLabel)rightGrid[2][10]).press=false;
((GridLabel)rightGrid[3][10]).add(new JLabel("3"));
((GridLabel)rightGrid[3][10]).press=false;
((GridLabel)rightGrid[4][10]).add(new JLabel("4"));
((GridLabel)rightGrid[4][10]).press=false;
((GridLabel)rightGrid[5][10]).add(new JLabel("5"));
((GridLabel)rightGrid[5][10]).press=false;
((GridLabel)rightGrid[6][10]).add(new JLabel("6"));
((GridLabel)rightGrid[6][10]).press=false;
((GridLabel)rightGrid[7][10]).add(new JLabel("7"));
((GridLabel)rightGrid[7][10]).press=false;
((GridLabel)rightGrid[8][10]).add(new JLabel("8"));
((GridLabel)rightGrid[8][10]).press=false;
((GridLabel)rightGrid[9][10]).add(new JLabel("9"));
((GridLabel)rightGrid[9][10]).press=false;
((GridLabel)rightGrid[10][10]).add(new JLabel("10"));
((GridLabel)rightGrid[10][10]).press=false;
//adding the labels
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
right.add(rightGrid[i][j]);
}
}
}
private String getCharForNumber2(int i) {
return i > 0 && i < 27 ? String.valueOf((char)(i + 64)) : null;
}
//action listener for the gridlabels
private void gridLabelListener()
{
//listener for right grid
for(int i=0;i<11;i++)
{
for (int j=0;j <11; j++)
{
final int i1=i;
final int j1=j;
rightGrid[i][j].addMouseListener(new MouseListener()
{
public void mouseClicked(MouseEvent e)
{
if(((GridLabel)rightGrid[i1][j1]).press)
{
if(editMode==true || playerShot==true)
{
//do nothing to the right grid in edit mode or not the players turn!!
}
else//when we're in playing mode then we want to play!!!!
{
if(compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]!='X' && compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]!='O')//you hit a ship!!
{
String label=Character.toString(compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]);
((GridLabel)rightGrid[i1][j1]).explode('X', true);
char theChar=compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1];
compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]='O';
compHits++;
Boolean append=true;
String theShip="";
if(theChar=='A')
{
playerCarriers++;
theShip="AirCraft";
if(playerCarriers==5)
{
log.append("Player sank an AircraftCarrier!\n");
playerCarriers++;
append=false;
}
}
else if(theChar=='B')
{
playerBattlships++;
theShip="BattleShip";
if(playerBattlships==4)
{
log.append("Player sank a BattleShip!\n");
playerBattlships++;
append=false;
}
}
else if(theChar=='C')
{
playerCruisers++;
theShip="Carrier";
if(playerCruisers==3)
{
log.append("Player sank a Cruiser!\n");
playerCruisers++;
append=false;
}
}
else if(theChar=='D')
{
playerDestroyers++;
theShip="Destroyer";
if(playerDestroyers==2)
{
log.append("Player sank a Carrier!\n");
playerDestroyers=0;
append=false;
}
}
String theSecond="0:";
if(seconds<10)theSecond="0:0";
else theSecond="0:";
if(append)log.append("Player hit "+getCharForNumber2(j1+1)+i1+" and hit a "+theShip+"!("+theSecond+seconds+")\n");
playersAim= getCharForNumber2(((GridLabel)rightGrid[i1][j1]).y)+((GridLabel)rightGrid[i1][j1]).x;
playerShot=true;
if(compHits>=16)
{
time.stop();
new winnerWindow("You");
}
else
{
playerShot=true;//making it the computer's turn now, player clicks disabled
if(compShot==true)//if computer has already aimed new round
{
seconds=15;//reseting the timer
timeLabel.setText("0:15");
round++;
log.append("Round "+round+"\n");
int randomNum = new Random().nextInt((10 - 0) + 1) + 0;
//within 10 seconds(60% chance)
if(randomNum>=4)computerSeconds=new Random().nextInt((14 - 8) + 1) + 8;//15-8 seconds
//11-25 seconds(30% chance)
else if(randomNum<4 && randomNum>=1)computerSeconds=new Random().nextInt((7 - 0) + 1) + 0;
//>25 seconds (20% chance)
else computerSeconds=-1;//since comp will run out of time under this case we make it 26
//compShooter();//if we haven't won then the computer shoots
compShot=false;
playerShot=false;
}
}
}
else if(compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]=='X')//you did miss
{
((GridLabel)rightGrid[i1][j1]).explode('M', true);
compGrid[((GridLabel)rightGrid[i1][j1]).x-1][((GridLabel)rightGrid[i1][j1]).y-1]='O';
playersAim= getCharForNumber2(((GridLabel)rightGrid[i1][j1]).y)+((GridLabel)rightGrid[i1][j1]).x;
String theTime="0:";
if(seconds<10) theTime="0:0";
log.append("You missed!("+theTime+seconds+")\n");
playerShot=true;//player shot so make true
if(compShot==true)//if computer has already aimed new round
{
round++;
log.append("Round "+round+"\n");
seconds=15;//reseting the timer
timeLabel.setText("0:15");
int randomNum = new Random().nextInt((10 - 0) + 1) + 0;
//within 10 seconds(60% chance)
if(randomNum>=4)computerSeconds=new Random().nextInt((14 - 8) + 1) + 8;//15-8 seconds
//11-25 seconds(30% chance)
else if(randomNum<4 && randomNum>=1)computerSeconds=new Random().nextInt((7 - 0) + 1) + 0;
//>25 seconds (20% chance)
else computerSeconds=-1;//since comp will run out of time under this case we make it 26
//compShooter();//if we haven't won then the computer shoots
compShot=false;
playerShot=false;
}
}
else log.append("You've already aimed here! Aim again!\n");
}
}
}
public void mouseEntered(MouseEvent e){}
public void mouseExited(MouseEvent e){}
public void mousePressed(MouseEvent e){}
public void mouseReleased(MouseEvent e){}
});
}
}
//listener for left grid
for(int i=0;i<11;i++)
{
for (int j=0;j <11; j++)
{
final int i1=i;
final int j1=j;
leftGrid[i][j].addMouseListener(new MouseListener()
{
public void mouseClicked(MouseEvent e)
{
if(((GridLabel)leftGrid[i1][j1]).press)
{
if(editMode==true)//we only want this functionality if we are in edit mode
{
if(userGrid[((GridLabel)leftGrid[i1][j1]).x-1][((GridLabel)leftGrid[i1][j1]).y-1]=='X')//if the coordinate has no ship placed
{
if(carriers+battlships+cruisers+destroyers<5)//if we still have ships to place open the window
new shipPlacerWindow(((GridLabel)leftGrid[i1][j1]).x,((GridLabel)leftGrid[i1][j1]).y);
}
else
{
shipDeleter(((GridLabel)leftGrid[i1][j1]).x-1, ((GridLabel)leftGrid[i1][j1]).y-1);;
}
}
else//if we are in playing mode do this
{
//we shouldn't be able to do anything to the left grid but i'll just do this incase
}
}
}
public void mouseEntered(MouseEvent e){}
public void mouseExited(MouseEvent e){}
public void mousePressed(MouseEvent e){}
public void mouseReleased(MouseEvent e){}
});
}
}
}
//this is the function for the computer guessing coordinates
private void compShooter()
{
Random rand=new Random();
int x=rand.nextInt(9 - 0 + 1) + 0;
int y=rand.nextInt(9 - 0 + 1) + 0;
if(userGrid[x][y]!='X' && userGrid[x][y]!='O')//if comp hits a target
{
((GridLabel)leftGrid[x+1][y]).explode('X', true);;
char theChar=userGrid[x][y];
userGrid[x][y]='O';//marking that computer shot here
userHits++;
computersAim= getCharForNumber2(y+1)+(x+1);
Boolean append=true;
String theShip="";
if(theChar=='A')
{
compCarriers++;
theShip="AirCraft";
if(compCarriers==5)
{
log.append("Computer sank an AircraftCarrier!\n");
compCarriers++;
append=false;
}
}
else if(theChar=='B')
{
compBattlships++;
theShip="BattleShip";
if(compBattlships==4)
{
log.append("Computer sank a BattleShip!\n");
compBattlships++;
append=false;
}
}
else if(theChar=='C')
{
compCruisers++;
theShip="Carrier";
if(compCarriers==3)
{
log.append("Computer sank a Cruiser!\n");
compCruisers++;
append=false;
}
}
else if(theChar=='D')
{
compDestroyers++;
theShip="Destroyer";
if(compDestroyers==2)
{
log.append("Computer sank a Carrier!\n");
compCarriers=0;
append=false;
}
}
String theSecond="0:";
if(seconds<10)theSecond="0:0";
else theSecond="0:";
if(append)log.append("Computer hit "+getCharForNumber2(y+1)+(x+1)+" and hit a "+theShip+"!("+theSecond+seconds+")\n");
compShot=true;
if(userHits==16)//if the computer has hit all ships
{
time.stop();
new winnerWindow("Computer");
}
else
{
compShot=true;//player's turn otherwise
if(playerShot==true)//if the player has shot too
{
seconds=15;
timeLabel.setText("0:15");
int randomNum = new Random().nextInt((10 - 0) + 1) + 0;
//within 10 seconds(60% chance)
if(randomNum>=4)computerSeconds=new Random().nextInt((14 - 8) + 1) + 8;//15-8 seconds
//11-25 seconds(30% chance)
else if(randomNum<4 && randomNum>=1)computerSeconds=new Random().nextInt((7 - 0) + 1) + 0;
//>25 seconds (20% chance)
else computerSeconds=-1;//since comp will run out of time under this case we make it 26
//compShooter();//if we haven't won then the computer shoots
compShot=false;
playerShot=false;
round++;
log.append("Round "+ round +"\n");
}
}
}
else if(userGrid[x][y]=='X')//if the computer misses
{
userGrid[x][y]='O';
((GridLabel)leftGrid[x+1][y]).explode('M',true);
computersAim= getCharForNumber2(y+1)+(x+1);
compShot=true;
String theTime="0:";
if(seconds<10) theTime="0:0";
log.append("Computer missed!("+theTime+seconds+")\n");
if(playerShot==true)//if the player has shot too
{
seconds=15;
timeLabel.setText("0:15");
int randomNum = new Random().nextInt((10 - 0) + 1) + 0;
//within 10 seconds(60% chance)
if(randomNum>=4)computerSeconds=new Random().nextInt((14 - 8) + 1) + 8;//15-8 seconds
//11-25 seconds(30% chance)
else if(randomNum<4 && randomNum>=1)computerSeconds=new Random().nextInt((7 - 0) + 1) + 0;
//>25 seconds (20% chance)
else computerSeconds=-1;//since comp will run out of time under this case we make it 26
//compShooter();//if we haven't won then the computer shoots
compShot=false;
playerShot=false;
round++;
log.append("Round "+ round +"\n");
}
}
else//computer hit's something already hit
{
compShooter();
}
}
//ship deleter
private void shipDeleter(int x, int y)
{
int range=0;
char shipWeWant='X';
if(userGrid[x][y]=='A')
{
range=5;
shipWeWant='A';
carriers
}
else if(userGrid[x][y]=='B')
{
range=4;
shipWeWant='B';
battlships
}
else if(userGrid[x][y]=='C')
{
range=3;
shipWeWant='C';
cruisers
}
else if(userGrid[x][y]=='D')
{
range=2;
shipWeWant='D';
destroyers
}
else if(userGrid[x][y]=='E')
{
range=2;
shipWeWant='E';
destroyers
}
//checking north
if(range<=y+1)//if there is room in the north for the full ship check for it
{
for(int i=0;i<range;i++)
{
if(userGrid[x][y-i]==shipWeWant)//if it is the ship we want
{
userGrid[x][y-i]='X';
leftGrid[x+1][y-i].removeAll();
((GridLabel)leftGrid[x+1][y-i]).add(new JLabel(new ImageIcon(imageQ)));
}
else break;//if it's not stop searching the north
}
}
else //if there isn't room we still want to check incase we pressed the middle of a ship but don't want to go out of bound
{
for(int i=0;i<y+1;i++)
{
if(userGrid[x][y-i]==shipWeWant)//if it is the ship we want
{
userGrid[x][y-i]='X';
leftGrid[x+1][y-i].removeAll();
((GridLabel)leftGrid[x+1][y-i]).add(new JLabel(new ImageIcon(imageQ)));
}
else break;
}
}
//checking south
if(range+(y+1)<11)
{
for(int i=0;i<range;i++)
{
if(userGrid[x][y+i]==shipWeWant)//if it is the ship we want
{
userGrid[x][y+i]='X';
leftGrid[x+1][y+i].removeAll();
((GridLabel)leftGrid[x+1][y+i]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
else //if there isn't room we still want to check incase we pressed the middle of a ship but don't want to go out of bound
{
for(int i=0;i<10-y;i++)
{
if(userGrid[x][y+i]==shipWeWant)//if it is the ship we want
{
userGrid[x][y+i]='X';
leftGrid[x+1][y+i].removeAll();
((GridLabel)leftGrid[x+1][y+i]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
//checking west
if(range<=x+1)
{
for(int i=0;i<range;i++)
{
if(userGrid[x-i][y]==shipWeWant)//if it is the ship we want
{
userGrid[x-i][y]='X';
leftGrid[x-i+1][y].removeAll();
((GridLabel)leftGrid[x-i+1][y]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
else //if there isn't room we still want to check incase we pressed the middle of a ship but don't want to go out of bound
{
for(int i=0;i<x+1;i++)
{
if(userGrid[x-i][y]==shipWeWant)//if it is the ship we want
{
userGrid[x-i][y]='X';
leftGrid[x-i+1][y].removeAll();
((GridLabel)leftGrid[x-i+1][y]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
//checking east
if(range+x<11)
{
for(int i=0;i<range;i++)
{
if(userGrid[x+i][y]==shipWeWant)//if it is the ship we want
{
userGrid[x+i][y]='X';
leftGrid[x+i+1][y].removeAll();
((GridLabel)leftGrid[x+i+1][y]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
else //if there isn't room we still want to check incase we pressed the middle of a ship but don't want to go out of bound
{
for(int i=0;i<10-x;i++)
{
if(userGrid[x+i][y]==shipWeWant)//if it is the ship we want
{
userGrid[x+i][y]='X';
leftGrid[x+i+1][y].removeAll();
((GridLabel)leftGrid[x+i+1][y]).add(new JLabel(new ImageIcon(imageQ)));
}
}
}
//sending new grid with removed ship to opponent
StringBuilder builder = new StringBuilder();
for(int i = 0; i < 10; i++)
{
for(int j = 0; j <10; j++)
{
builder.append(userGrid[i][j]);
}
}
System.out.println(builder.toString());
for(int i =0;i<10; i++){
for (int j = 0; j < 10; j++) {//Iterate rows
System.out.print(userGrid[i][j]);
}
System.out.println("");
}
pw.println("removeCoor:"+builder.toString());
startButton.setEnabled(false);//if you delete a ship you can't start so disabling the button
}
private void startButtonListener()
{
startButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
selectFileButton.setVisible(false);
startButton.setVisible(false);
fileName.setText("");//delete the text instead of setting invisible because then i only have to reset in new game
editMode=false;
timerAction();//timer starts working once we press start
setSize(690,600);//changing the size of the frame for the log
log.setLineWrap(true);
log.setWrapStyleWord(true);
scroll.setPreferredSize(new Dimension(690, 150));
south.setBorder(BorderFactory.createTitledBorder("Game Log"));
south.add(scroll);
south.setVisible(true);
log.append("Round 1\n");
}
});
}
//action listener for select file
private void selectFileListener()
{
selectFileButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
if(e.getSource()==selectFileButton)//if we click that button
{
JFileChooser fc= new JFileChooser();
FileNameExtensionFilter filter = new FileNameExtensionFilter("Battle Files (*.battle)", "battle"); //filter to only allow .battle files
fc.setFileFilter(filter);//making our chooser take that filter
fc.setAcceptAllFileFilterUsed(false);//will only allow battle files
int returnVal=fc.showOpenDialog(selectFileButton);//opens up fileSelector
if(returnVal==fc.APPROVE_OPTION)//if we selected a file
{
selectFileButton.setVisible(false);//removing the select file button
//getting the fileName without extenstion to change JLabel
String fileName=fc.getSelectedFile().getName();
int pos = fileName.lastIndexOf(".");
if (pos > 0) {
fileName = fileName.substring(0, pos);
}
BattleShipServer.this.fileName.setText("File:" + fileName+ " ");
//reading the file
try
{
FileReader fr = new FileReader(fc.getSelectedFile());//make a file object for reading
BufferedReader br = new BufferedReader(fr); //make a buffer to go line by line
//reading in from the buffer
for(int j=0;j<10;j++)
{
String buffer = br.readLine();//reading in line
char[] charArray = buffer.toCharArray();//making it into char array
for(int i=0;i<10;i++)
{
compGrid[i][j]=charArray[i];
}
}
}
catch (FileNotFoundException e1)
{
//we know the file is there so don't worry
}
catch (IOException ioe)
{}
selectedFile=true;
if(carriers+battlships+cruisers+destroyers==5 && selectedFile)//if all ships places and file selected
{
startButton.setEnabled(true);
}
}
}
}
});
}
public class shipPlacerWindow extends JDialog
{
int x;//this will hold the x coordinate of what we're editing
int y;//this will hold y
private JComboBox<String> shipList = new JComboBox<String>();
private JRadioButton North= new JRadioButton("North");
private JRadioButton South= new JRadioButton("South");
private JRadioButton East= new JRadioButton("East");
private JRadioButton West= new JRadioButton("West");
private JButton placeShip=new JButton("Place Ship");
public shipPlacerWindow(int x,int y)
{
this.x=x-1;//making the coordinate into index value so u subtract by one
this.y=y-1;
setTitle("Select ship at "+ getCharForNumber(y)+x);
setSize(300,200);
setLocationRelativeTo(null);
setLayout(new BorderLayout());
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
//making the panel with the combo box
JPanel top = new JPanel();
top.add(new JLabel("Select Ship:"));
if(carriers==0)shipList.addItem("Aircraft Carrier");//if we haven't placed a carrier
if(battlships==0)shipList.addItem("Battleship");
if(cruisers==0)shipList.addItem("Cruiser");
if(destroyers<2)shipList.addItem("Destroyer");
top.add(shipList);
JPanel middle = new JPanel();
middle.setLayout(new GridLayout(2,2));
ButtonGroup directions = new ButtonGroup();
directions.add(North);//adding buttons to group
directions.add(South);
directions.add(East);
directions.add(West);
middle.add(North);
middle.add(South);
middle.add(East);
middle.add(West);
add(top,BorderLayout.NORTH);
add(middle,BorderLayout.CENTER);
add(placeShip,BorderLayout.SOUTH);
IsValid();//be careful you don't use isValid() because that's a another function that belongs to JFrame (this initially disables the button becase no radio buttons are selected
everyThingListener();//this is the listener for everything
setModal(true);//this prevents us from accessing the board behind it
setVisible(true);
}
//this function converts numbers to chars
private String getCharForNumber(int i) {
return i > 0 && i < 27 ? String.valueOf((char)(i + 64)) : null;
}
//this function is decides whether the button is valid or not
private void IsValid()
{
int range=0;//this is the range of the ship selected
placeShip.setEnabled(true);
try
{
String ship=(String) shipList.getSelectedItem();//returns what kind of ship is selected in combobox
//setting the range based on what kind of ship selected
if(ship.equals("Aircraft Carrier")) range=5;
else if(ship.equals("Battleship")) range=4;
else if(ship.equals("Cruiser")) range=3;
else if(ship.equals("Destroyer")) range=2;
if(North.isSelected())
{
if(range>y+1) throw new CantAddShipException();//checking to make sure we're not out of range
else
{
int yTest=y;
for(int i=0;i<range;i++)
{
if(userGrid[x][yTest]!='X')
{
throw new CantAddShipException();//throws an exception if there is a ship already in position
}
yTest
}
}
}
else if(South.isSelected())
{
if(range+y>10)
{
throw new CantAddShipException();
}
else
{
int yTest=y;
for(int i=0;i<range;i++)
{
if(userGrid[x][yTest]!='X') throw new CantAddShipException();//throws an exception if there is a ship already in position
yTest++;
}
}
}
else if(East.isSelected())
{
if(range+x>10)
{
throw new CantAddShipException();
}
else
{
int xTest=x;
for(int i=0;i<range;i++)
{
if(userGrid[xTest][y]!='X')
{
throw new CantAddShipException();//throws an exception if there is a ship already in position
}
xTest++;
}
}
}
else if(West.isSelected())
{
if(range>x+1)throw new CantAddShipException();//x+1 to make up for x being turned into index
else
{
int xTest=x;
for(int i=0;i<range;i++)
{
if(userGrid[xTest][y]!='X')
{
throw new CantAddShipException();//throws an exception if there is a ship already in position
}
xTest
}
}
}
else
{
throw new CantAddShipException();
}
}
catch (CantAddShipException e)
{
placeShip.setEnabled(false);//disabling the button if something is wrong
}
}
//this function adds all the listeners for to see if selection is valid and for button adding ship
private void everyThingListener()
{
//the listener for the JComboBox
shipList.addActionListener (new ActionListener () {
public void actionPerformed(ActionEvent e) {
IsValid();
}
});
North.addActionListener (new ActionListener () {
public void actionPerformed(ActionEvent e) {
IsValid();
}
});
South.addActionListener (new ActionListener () {
public void actionPerformed(ActionEvent e) {
IsValid();
}
});
West.addActionListener (new ActionListener () {
public void actionPerformed(ActionEvent e) {
IsValid();
}
});
East.addActionListener (new ActionListener () {
public void actionPerformed(ActionEvent e) {
IsValid();
}
});
placeShip.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
int range=0;//this is the range of the ship selected
char shipCharacter='X';
String ship=(String) shipList.getSelectedItem();//returns what kind of ship is selected in combobox
//setting the range based on what kind of ship selected
ImageIcon theShip=wave;//image icon that will either be a b c or d
if(ship.equals("Aircraft Carrier"))
{
shipCharacter='A';
theShip=aship;
range=5;
carriers++;
}
else if(ship.equals("Battleship"))
{
shipCharacter='B';
theShip=bship;
range=4;
battlships++;
}
else if(ship.equals("Cruiser"))
{
shipCharacter='C';
theShip=cship;
range=3;
cruisers++;
}
else if(ship.equals("Destroyer"))
{
if(destroyers==0)
{
shipCharacter='D';
}
else if(destroyers==1)
{
shipCharacter='E';//i make it 'E' to be able to distinguish between the two destroyer ships
}
theShip=dship;
range=2;
destroyers++;
}
String shipString=Character.toString(shipCharacter);
//changing the texts in the grid and on the actual layout
if(North.isSelected())
{
int yTest=y;
for(int i=0;i<range;i++)
{
userGrid[x][yTest]=shipCharacter;
((GridLabel)leftGrid[x+1][yTest]).explode(shipCharacter,false);
yTest
}
}
else if(South.isSelected())
{
int yTest=y;
for(int i=0;i<range;i++)
{
userGrid[x][yTest]=shipCharacter;
((GridLabel)leftGrid[x+1][yTest]).explode(shipCharacter,false);
yTest++;
}
}
else if(East.isSelected())
{
int xTest=x;
for(int i=0;i<range;i++)
{
userGrid[xTest][y]=shipCharacter;
((GridLabel)leftGrid[xTest+1][y]).explode(shipCharacter,false);
xTest++;
}
}
else if(West.isSelected())
{
int xTest=x;
for(int i=0;i<range;i++)
{
userGrid[xTest][y]=shipCharacter;
((GridLabel)leftGrid[xTest+1][y]).explode(shipCharacter,false);
xTest
}
}
if(carriers+battlships+cruisers+destroyers==5 && selectedFile)//if they chose all ships
{
startButton.setEnabled(true);
}
//TODO send new coordinates to the other side
StringBuilder builder = new StringBuilder();
for(int i = 0; i < 10; i++)
{
for(int j = 0; j <10; j++)
{
builder.append(userGrid[i][j]);
}
}
System.out.println(builder.toString());
for(int i =0;i<10; i++){
for (int j = 0; j < 10; j++) {//Iterate rows
System.out.print(userGrid[i][j]);
}
System.out.println("");
}
pw.println("placeCoor:"+builder.toString());
shipPlacerWindow.this.dispose();//closes shipplacer window after you you place a ship
}
});
}
}
public class winnerWindow extends JDialog
{
private JButton okButton =new JButton("OK");
private ImageIcon wave1=new ImageIcon("wave.jpg");
public winnerWindow(String winner)
{
setTitle("Game Over");
setSize(300,150);
setLocationRelativeTo(null);
setLayout(new BoxLayout(getContentPane(),BoxLayout.Y_AXIS));
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
JLabel winner1=new JLabel(winner+ " won!");
winner1.setAlignmentX( Component.LEFT_ALIGNMENT);
add(winner1);
add(okButton);
okListener();
addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent windowEvent) {
reset();
}
});
setModal(true);
setVisible(true);
}
public void okListener()
{
okButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent arg0)
{
reset();
}
});
}
//resets the game
private void reset()
{
//reset right grid
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
if(i>0 && j<10)
{
rightGrid[i][j].removeAll();
rightGrid[i][j].add(new JLabel(new ImageIcon(imageQ)));
}
}
}
//reset left grid
for (int j=0;j<11;j++)
{
for(int i=0;i<11;i++)
{
if(i>0 && j<10)
{
leftGrid[i][j].removeAll();
leftGrid[i][j].add(new JLabel(new ImageIcon(imageQ)));
}
}
}
//reseting user and computer grid
for (int j=0;j<10;j++)
{
for(int i=0;i<10;i++)
{
userGrid[i][j]='X';
compGrid[i][j]='X';
}
}
playersAim="N/A";
computersAim="N/A";
selectFileButton.setVisible(true);;
fileName.setText("File: ");
startButton.setEnabled(false);
startButton.setVisible(true);
//for the ship placement
carriers=0;
battlships=0;
cruisers=0;
destroyers=0;
//bools for different modes of game
selectedFile=false;
editMode=true;
int compHits=0;//so if this equals 16 that means that the USER won
int userHits=0;
selectFileButton.setVisible(true);
startButton.setVisible(true);
fileName.setText("File");//delete the text instead of setting invisible because then i only have to reset in new game
editMode=true;
log.setText("");
scroll.setVisible(false);
south.setBorder(null);
BattleShipServer.this.setSize(690,460);
timeLabel.setText("0:15");
seconds=15;
playerShot=false;//the boolean that indicates if the player shot
compShot=false;
computerSeconds=12;//this will be the random time assigned to the computer's turn
round=1;
winnerWindow.this.dispose();
}
}
private class aboutWindow extends JDialog
{
JTextArea infoText=new JTextArea();
public aboutWindow()
{
setTitle("Battleship Instructions");
setSize(300,200);
setLocationRelativeTo(null);
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
JScrollPane sp = new JScrollPane(infoText);
add(sp);
try
{
BufferedReader in = new BufferedReader(new FileReader(new File("howTo.txt")));
String line = in.readLine();
while(line != null){
infoText.append(line + "\n");
line = in.readLine();
}
}
catch (FileNotFoundException e)
{
} catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
infoText.setEditable(false);
setModal(true);
setVisible(true);
}
}
private class aboutWindow2 extends JDialog
{
public aboutWindow2()
{
setTitle("About");
setSize(300,200);
setLocationRelativeTo(null);
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
add(new JLabel("Made by Hooman Zarrabi - 3/01/15"),BorderLayout.NORTH);
add(new JLabel(new ImageIcon("hooman.png")),BorderLayout.CENTER);
add(new JLabel("CSCI201USC: Assignment 3"),BorderLayout.SOUTH);
setModal(true);
setVisible(true);
}
}
private class GridLabel extends JComponent implements Runnable
{
public int x;
public int y;
public boolean press;
BufferedImage currentWave;
int current=0;
//for the explosion thread
Boolean explode;
int counter=0;
ImageIcon blastIcon;
char c;
public GridLabel(int x, int y)
{
this.x=x;
this.y=y;
press=true;
setPreferredSize(new Dimension(30,30));
setLayout(new FlowLayout());
if(x>0 && y<11)
{
setBorder(BorderFactory.createLineBorder(Color.black));
add(new JLabel(new ImageIcon(imageQ)));
new Thread(this).start();
}
}
@Override
protected void paintComponent(Graphics g)
{
super.paintComponent(g);
g.drawImage(currentWave,0,0,null);
};
@Override
public void run()
{
while (true)
{
if(current == 0)
{
currentWave=wave1;
current++;
}
else
{
currentWave=wave2;
current
}
validate();
repaint();
try { Thread.sleep(150); }
catch (InterruptedException e) { }
}
}
public void explode(char c, Boolean b)
{
counter=0;
explode=b;
this.c=c;
if(c=='E')this.c='D';
if(this.c=='A') blastIcon=new ImageIcon(imageA);
else if(this.c=='B') blastIcon=new ImageIcon(imageB);
else if(this.c=='C') blastIcon=new ImageIcon(imageC);
else if(this.c=='D') blastIcon=new ImageIcon(imageD);
else if(this.c=='M') blastIcon=new ImageIcon(imageM);
else if(this.c=='Q') blastIcon=new ImageIcon(imageQ);
else if(this.c=='X') blastIcon=new ImageIcon(imageX);
new Explosion().start();
new Sound().start();
}
public class Explosion extends Thread
{
public void run()
{
counter=0;
while(true)
{
if(c!='M')//we're not idicating a miss
{
if(explode)//if you want explode animation to happen
{
if(counter<5)
{
removeAll();//removes previous icons or labels
add(new JLabel(new ImageIcon(expl[counter])));
counter++;
}
else if(counter==5)
{
removeAll();
add(new JLabel(blastIcon));
counter++;
}
else
{
return;//stop thread
}
}
else
{
removeAll();
add(new JLabel(blastIcon));
counter=0;
return;//stop thread
}
}
else//we're indicating a miss
{
if(counter<7)
{
removeAll();
add(new JLabel(new ImageIcon(splash[counter])));
counter++;
}
else if(counter==7)
{
removeAll();
add(new JLabel(new ImageIcon(imageM)));
counter++;
}
else
{
return;
}
}
try
{sleep(300);}
catch (InterruptedException e)
{}
}
}
}
public class Sound extends Thread
{
String theString;
public Sound()
{
if(c=='M')
{
theString="splash";
}
else if(c=='X') theString="explode";
}
public void run()
{
if(explode)
{
sl.playSound("cannon");
sl.playSound(theString);
}
}
}
}
public static void main(String[] args)
{
System.setProperty("java.util.Arrays.useLegacyMergeSort", "true");
new BattleShip();
}
}
|
package org.navalplanner.business.workreports.entities;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.hibernate.validator.AssertTrue;
import org.hibernate.validator.NotNull;
import org.hibernate.validator.Valid;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
import org.joda.time.Seconds;
import org.navalplanner.business.common.IntegrationEntity;
import org.navalplanner.business.common.Registry;
import org.navalplanner.business.common.exceptions.InstanceNotFoundException;
import org.navalplanner.business.costcategories.entities.TypeOfWorkHours;
import org.navalplanner.business.labels.entities.Label;
import org.navalplanner.business.labels.entities.LabelType;
import org.navalplanner.business.orders.entities.OrderElement;
import org.navalplanner.business.resources.entities.Resource;
import org.navalplanner.business.workingday.EffortDuration;
import org.navalplanner.business.workreports.daos.IWorkReportLineDAO;
import org.navalplanner.business.workreports.valueobjects.DescriptionField;
import org.navalplanner.business.workreports.valueobjects.DescriptionValue;
public class WorkReportLine extends IntegrationEntity implements Comparable {
public static WorkReportLine create(WorkReport workReport) {
return create(new WorkReportLine(workReport));
}
private EffortDuration effort;
private Date date;
private LocalTime clockStart;
private LocalTime clockFinish;
private Resource resource;
private OrderElement orderElement;
private Set<Label> labels = new HashSet<Label>();
private Set<DescriptionValue> descriptionValues = new HashSet<DescriptionValue>();
private WorkReport workReport;
private TypeOfWorkHours typeOfWorkHours;
/**
* Constructor for hibernate. Do not use!
*/
public WorkReportLine() {
}
public WorkReportLine(WorkReport workReport) {
this.setWorkReport(workReport);
}
@NotNull(message = "effort not specified")
public EffortDuration getEffort() {
return effort;
}
public void setEffort(EffortDuration effort) {
this.effort = effort;
if ((workReport != null)
&& (workReport.getWorkReportType() != null)
&& (workReport.getWorkReportType().getHoursManagement()
.equals(HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK))) {
this.effort = getDiferenceBetweenTimeStartAndFinish();
}
}
public LocalTime getClockFinish() {
return clockFinish;
}
public void setClockFinish(Date clockFinish) {
if (clockFinish != null) {
setClockFinish(LocalTime.fromDateFields(clockFinish));
}
}
public void setClockFinish(LocalTime clockFinish) {
this.clockFinish = clockFinish;
updateEffort();
}
public LocalTime getClockStart() {
return clockStart;
}
public void setClockStart(Date clockStart) {
if (clockStart != null) {
setClockStart(LocalTime.fromDateFields(clockStart));
}
}
public void setClockStart(LocalTime clockStart) {
this.clockStart = clockStart;
updateEffort();
}
@NotNull(message = "date not specified")
public Date getDate() {
return date;
}
public LocalDate getLocalDate() {
if (getDate() == null) {
return null;
}
return LocalDate.fromDateFields(getDate());
}
public void setDate(Date date) {
this.date = date;
if ((workReport != null) && (workReport.getWorkReportType() != null)) {
if (workReport.getWorkReportType().getDateIsSharedByLines()) {
this.date = workReport.getDate();
}
}
}
@NotNull(message = "resource not specified")
public Resource getResource() {
return resource;
}
public void setResource(Resource resource) {
this.resource = resource;
if ((workReport != null) && (workReport.getWorkReportType() != null)) {
if (workReport.getWorkReportType().getResourceIsSharedInLines()) {
this.resource = workReport.getResource();
}
}
}
@NotNull(message = "order element not specified")
public OrderElement getOrderElement() {
return orderElement;
}
public void setOrderElement(OrderElement orderElement) {
this.orderElement = orderElement;
if ((workReport != null) && (workReport.getWorkReportType() != null)) {
if (workReport.getWorkReportType().getOrderElementIsSharedInLines()) {
this.orderElement = workReport.getOrderElement();
}
}
}
public Set<Label> getLabels() {
return labels;
}
public void setLabels(Set<Label> labels) {
this.labels = labels;
}
@NotNull(message = "work report not specified")
public WorkReport getWorkReport() {
return workReport;
}
private void setWorkReport(WorkReport workReport) {
this.workReport = workReport;
// update and copy the fields and label for each line
updateItsFieldsAndLabels();
// copy the required fields if these are shared by lines
updatesAllSharedDataByLines();
// update calculated effort
updateEffort();
}
@Valid
public Set<DescriptionValue> getDescriptionValues() {
return descriptionValues;
}
public void setDescriptionValues(Set<DescriptionValue> descriptionValues) {
this.descriptionValues = descriptionValues;
}
@NotNull(message = "type of work hours not specified")
public TypeOfWorkHours getTypeOfWorkHours() {
return typeOfWorkHours;
}
public void setTypeOfWorkHours(TypeOfWorkHours typeOfWorkHours) {
this.typeOfWorkHours = typeOfWorkHours;
}
@Override
public int compareTo(Object arg0) {
if (date != null) {
final WorkReportLine workReportLine = (WorkReportLine) arg0;
return date.compareTo(workReportLine.getDate());
}
return -1;
}
@AssertTrue(message = "closckStart:the clockStart must be not null if number of hours is calcultate by clock")
public boolean checkConstraintClockStartMustBeNotNullIfIsCalculatedByClock() {
if (!firstLevelValidationsPassed()) {
return true;
}
if (workReport.getWorkReportType().getHoursManagement().equals(
HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK)) {
return (getClockStart() != null);
}
return true;
}
@AssertTrue(message = "clockFinish:the clockStart must be not null if number of hours is calcultate by clock")
public boolean checkConstraintClockFinishMustBeNotNullIfIsCalculatedByClock() {
if (!firstLevelValidationsPassed()) {
return true;
}
if (workReport.getWorkReportType().getHoursManagement().equals(
HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK)) {
return (getClockFinish() != null);
}
return true;
}
@AssertTrue(message = "The start hour cannot be higher than finish hour")
public boolean checkCannotBeHigher() {
if (!firstLevelValidationsPassed()) {
return true;
}
if (workReport.getWorkReportType().getHoursManagement().equals(
HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK)) {
return checkCannotBeHigher(this.clockStart, this.clockFinish);
}
return true;
}
public boolean checkCannotBeHigher(LocalTime starting, LocalTime ending) {
return !((ending != null) && (starting != null) && (starting
.compareTo(ending) > 0));
}
void updateItsFieldsAndLabels() {
if (workReport != null) {
assignItsLabels(workReport.getWorkReportType());
assignItsDescriptionValues(workReport.getWorkReportType());
}
}
private void assignItsLabels(WorkReportType workReportType) {
Set<Label> updatedLabels = new HashSet<Label>();
if (workReportType != null) {
for (WorkReportLabelTypeAssigment labelTypeAssigment : workReportType
.getLineLabels()) {
Label label = getLabelBy(labelTypeAssigment);
if (label != null) {
updatedLabels.add(label);
} else {
updatedLabels.add(labelTypeAssigment.getDefaultLabel());
}
}
this.labels = updatedLabels;
}
}
private Label getLabelBy(WorkReportLabelTypeAssigment labelTypeAssigment) {
LabelType type = labelTypeAssigment.getLabelType();
for (Label label : labels) {
if (label.getType().getId().equals(type.getId())) {
return label;
}
}
return null;
}
private void assignItsDescriptionValues(WorkReportType workReportType) {
Set<DescriptionValue> updatedDescriptionValues = new HashSet<DescriptionValue>();
if (workReportType != null) {
for (DescriptionField descriptionField : workReportType
.getLineFields()) {
DescriptionValue descriptionValue;
try {
descriptionValue = this
.getDescriptionValueByFieldName(descriptionField
.getFieldName());
} catch (InstanceNotFoundException e) {
descriptionValue = DescriptionValue.create(
descriptionField.getFieldName(), null);
}
updatedDescriptionValues.add(descriptionValue);
}
this.descriptionValues = updatedDescriptionValues;
}
}
void updatesAllSharedDataByLines() {
// copy the required fields if these are shared by lines
updateSharedDateByLines();
updateSharedResourceByLines();
updateSharedOrderElementByLines();
}
void updateSharedDateByLines() {
if ((workReport != null) && (workReport.getWorkReportType() != null)
&& (workReport.getWorkReportType().getDateIsSharedByLines())) {
setDate(workReport.getDate());
}
}
void updateSharedResourceByLines() {
if ((workReport != null)
&& (workReport.getWorkReportType() != null)
&& (workReport.getWorkReportType().getResourceIsSharedInLines())) {
setResource(workReport.getResource());
}
}
void updateSharedOrderElementByLines() {
if ((workReport != null)
&& (workReport.getWorkReportType() != null)
&& (workReport.getWorkReportType()
.getOrderElementIsSharedInLines())) {
setOrderElement(workReport.getOrderElement());
}
}
private void updateEffort() {
if ((workReport != null)
&& (workReport.getWorkReportType() != null)
&& workReport.getWorkReportType().getHoursManagement().equals(
HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK)) {
setEffort(getDiferenceBetweenTimeStartAndFinish());
}
}
private EffortDuration getDiferenceBetweenTimeStartAndFinish() {
if ((clockStart != null) && (clockFinish != null)) {
return EffortDuration.seconds(Seconds.secondsBetween(clockStart,
clockFinish).getSeconds());
}
return null;
}
@Override
protected IWorkReportLineDAO getIntegrationEntityDAO() {
return Registry.getWorkReportLineDAO();
}
@AssertTrue(message = "fields should match with work report data if are shared by lines")
public boolean checkConstraintFieldsMatchWithWorkReportIfAreSharedByLines() {
if (!firstLevelValidationsPassed()) {
return true;
}
if (workReport.getWorkReportType().getDateIsSharedByLines()) {
if (!workReport.getDate().equals(date)) {
return false;
}
}
if (workReport.getWorkReportType().getOrderElementIsSharedInLines()) {
if (!workReport.getOrderElement().getId().equals(
orderElement.getId())) {
return false;
}
}
if (workReport.getWorkReportType().getResourceIsSharedInLines()) {
if (!workReport.getResource().getId().equals(resource.getId())) {
return false;
}
}
return true;
}
@AssertTrue(message = "number of hours is not properly calculated based on clock")
public boolean checkConstraintHoursCalculatedByClock() {
if (!firstLevelValidationsPassed()) {
return true;
}
if (workReport.getWorkReportType().getHoursManagement().equals(
HoursManagementEnum.HOURS_CALCULATED_BY_CLOCK)) {
if (getDiferenceBetweenTimeStartAndFinish().compareTo(effort) != 0) {
return false;
}
}
return true;
}
private boolean firstLevelValidationsPassed() {
return (workReport != null) && (typeOfWorkHours != null)
&& (effort != null) && (date != null) && (resource != null)
&& (orderElement != null);
}
@AssertTrue(message = "label type:the work report have not assigned this label type")
public boolean checkConstraintAssignedLabelTypes() {
if (this.workReport == null
|| this.workReport.getWorkReportType() == null) {
return true;
}
if (this.workReport.getWorkReportType().getLineLabels().size() != this.labels
.size()) {
return false;
}
for (WorkReportLabelTypeAssigment typeAssigment : this.workReport
.getWorkReportType().getLineLabels()) {
try {
getLabelByType(typeAssigment.getLabelType());
} catch (InstanceNotFoundException e) {
return false;
}
}
return true;
}
@AssertTrue(message = "description value:the work report have not assigned the description field")
public boolean checkConstraintAssignedDescriptionValues() {
if (this.workReport == null
|| this.workReport.getWorkReportType() == null) {
return true;
}
if (this.workReport.getWorkReportType().getLineFields().size() > this.descriptionValues
.size()) {
return false;
}
for (DescriptionField field : this.workReport.getWorkReportType()
.getLineFields()) {
try {
getDescriptionValueByFieldName(field.getFieldName());
} catch (InstanceNotFoundException e) {
return false;
}
}
return true;
}
@AssertTrue(message = "There are repeated description values in the work report line")
public boolean checkConstraintAssignedRepeatedDescriptionValues() {
Set<String> textFields = new HashSet<String>();
for (DescriptionValue v : this.descriptionValues) {
String name = v.getFieldName();
if (!StringUtils.isBlank(name)) {
if (textFields.contains(name.toLowerCase())) {
return false;
} else {
textFields.add(name.toLowerCase());
}
}
}
return true;
}
public DescriptionValue getDescriptionValueByFieldName(String fieldName)
throws InstanceNotFoundException {
if (StringUtils.isBlank(fieldName)) {
throw new InstanceNotFoundException(fieldName,
DescriptionValue.class.getName());
}
for (DescriptionValue v : this.descriptionValues) {
if (v.getFieldName().equalsIgnoreCase(StringUtils.trim(fieldName))) {
return v;
}
}
throw new InstanceNotFoundException(fieldName, DescriptionValue.class
.getName());
}
public Label getLabelByType(LabelType type)
throws InstanceNotFoundException {
if (type == null) {
throw new InstanceNotFoundException(type, LabelType.class.getName());
}
for (Label l : this.labels) {
if (l.getType().getId().equals(type.getId())) {
return l;
}
}
throw new InstanceNotFoundException(type, LabelType.class.getName());
}
@Override
public void setCodeAutogenerated(Boolean codeAutogenerated) {
// do nothing
}
@Override
public Boolean isCodeAutogenerated() {
return getWorkReport() != null ? getWorkReport().isCodeAutogenerated()
: false;
}
/**
* TODO remove this method in order to use
* {@link WorkReportLine#getEffort()}
*
* @deprecated Use {@link WorkReportLine#getEffort()} instead
*/
public Integer getNumHours() {
return (getEffort() == null) ? null : getEffort().getHours();
}
/**
* TODO remove this method in order to use
* {@link WorkReportLine#setEffort()}
*
* @deprecated Use {@link WorkReportLine#setEffort()} instead
*/
public void setNumHours(Integer hours) {
setEffort(EffortDuration.hours(hours));
}
}
|
/**
* <h2>YAML GOlr Configuration</h2>
*
* <p>
* This package supplies the tools necessary to parse, probe, and dump the YAML file configurations
* for BBOP-JS constrained Solr indexes. Specifically, it takes multiple YAML files and turn them
* into a Solr schema that is compatible {@link org.bbop.schema.SolrSchemaXMLWriter}.
* </p>
*/
package org.bbop.schema;
|
package org.davidmoten.hilbert;
import java.util.Collections;
import java.util.List;
import com.github.davidmoten.guavamini.Lists;
public final class Range {
private final long low;
private final long high;
public Range(long low, long high) {
this.low = low;
this.high = high;
}
public static Range create(long low, long high) {
return new Range(low, high);
}
public long low() {
return low;
}
public long high() {
return high;
}
public List<Range> split() {
if (low == high) {
return Collections.singletonList(this);
}
long x = Util.mostSignificantBetween(low + 1, high + 1) - 1;
if (x == low) {
return Lists.newArrayList(Range.create(low, low), Range.create(low + 1, high));
} else {
return Lists.newArrayList(Range.create(low, x), Range.create(x + 1, high));
}
}
public List<Range> split(int n) {
if (n == 0) {
return Collections.singletonList(this);
} else if (n == 1) {
return split();
}
List<Range> split = split();
if (split.size() == 1) {
return split;
} else {
List<Range> result = Lists.newArrayList();
for (Range range : split()) {
result.addAll(range.split(n - 1));
}
return result;
}
}
static List<Range> simplify(List<Range> list) {
// mutates list!
Collections.sort(list, (a, b) -> Long.compare(a.low(), b.low()));
int i = 1;
while (i < list.size()) {
Range previous = list.get(i - 1);
Range current = list.get(i);
if (previous.high() >= current.low() - 1) {
list.set(i - 1, Range.create(previous.low(), current.high()));
list.remove(i);
} else {
i++;
}
}
return list;
}
@Override
public String toString() {
return "Range [low=" + low + ", high=" + high + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (high ^ (high >>> 32));
result = prime * result + (int) (low ^ (low >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Range other = (Range) obj;
if (high != other.high)
return false;
if (low != other.low)
return false;
return true;
}
}
|
package org.eclipse.mylyn.internal.tasks.ui.views;
import java.util.regex.Pattern;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.IColorProvider;
import org.eclipse.jface.viewers.IFontProvider;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.mylyn.internal.tasks.core.OrphanedTasksContainer;
import org.eclipse.mylyn.internal.tasks.core.Person;
import org.eclipse.mylyn.internal.tasks.core.ScheduledTaskContainer;
import org.eclipse.mylyn.internal.tasks.core.TaskActivityManager;
import org.eclipse.mylyn.internal.tasks.core.TaskArchive;
import org.eclipse.mylyn.internal.tasks.core.TaskCategory;
import org.eclipse.mylyn.internal.tasks.core.TaskGroup;
import org.eclipse.mylyn.internal.tasks.core.UnfiledCategory;
import org.eclipse.mylyn.internal.tasks.ui.ITaskHighlighter;
import org.eclipse.mylyn.internal.tasks.ui.TaskListColorsAndFonts;
import org.eclipse.mylyn.internal.tasks.ui.TasksUiImages;
import org.eclipse.mylyn.tasks.core.AbstractRepositoryQuery;
import org.eclipse.mylyn.tasks.core.AbstractTask;
import org.eclipse.mylyn.tasks.core.AbstractTaskContainer;
import org.eclipse.mylyn.tasks.core.TaskRepository;
import org.eclipse.mylyn.tasks.core.AbstractTask.PriorityLevel;
import org.eclipse.mylyn.tasks.core.AbstractTask.RepositoryTaskSyncState;
import org.eclipse.mylyn.tasks.ui.AbstractRepositoryConnectorUi;
import org.eclipse.mylyn.tasks.ui.TasksUiPlugin;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.Image;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.themes.IThemeManager;
/**
* @author Mik Kersten
*/
public class TaskElementLabelProvider extends LabelProvider implements IColorProvider, IFontProvider {
private static final String NO_SUMMARY_AVAILABLE = ": <no summary available>";
private IThemeManager themeManager = PlatformUI.getWorkbench().getThemeManager();
private static final Pattern pattern = Pattern.compile("\\d*: .*");
private boolean wideImages = false;
private class CompositeImageDescriptor {
ImageDescriptor icon;
ImageDescriptor overlayKind;
};
// public TaskElementLabelProvider() {
// super();
public TaskElementLabelProvider(boolean wideImages) {
super();
this.wideImages = wideImages;
}
@Override
public Image getImage(Object element) {
CompositeImageDescriptor compositeDescriptor = getImageDescriptor(element);
if (element instanceof AbstractTask) {
if (compositeDescriptor.overlayKind == null) {
compositeDescriptor.overlayKind = TasksUiImages.OVERLAY_BLANK;
}
return TasksUiImages.getCompositeTaskImage(compositeDescriptor.icon, compositeDescriptor.overlayKind,
wideImages);
} else if (element instanceof AbstractTaskContainer) {
return TasksUiImages.getCompositeTaskImage(compositeDescriptor.icon, TasksUiImages.OVERLAY_BLANK,
wideImages);
} else {
return TasksUiImages.getCompositeTaskImage(compositeDescriptor.icon, null, wideImages);
}
}
private CompositeImageDescriptor getImageDescriptor(Object object) {
CompositeImageDescriptor compositeDescriptor = new CompositeImageDescriptor();
if (object instanceof TaskArchive || object instanceof UnfiledCategory) {
compositeDescriptor.icon = TasksUiImages.CATEGORY_ARCHIVE;
return compositeDescriptor;
} else if (object instanceof TaskCategory) {
compositeDescriptor.icon = TasksUiImages.CATEGORY;
} else if (object instanceof TaskGroup) {
compositeDescriptor.icon = TasksUiImages.TASK_GROUPING;
}
if (object instanceof AbstractTaskContainer) {
AbstractTaskContainer element = (AbstractTaskContainer) object;
AbstractRepositoryConnectorUi connectorUi = null;
if (element instanceof AbstractTask) {
AbstractTask repositoryTask = (AbstractTask) element;
connectorUi = TasksUiPlugin.getConnectorUi(((AbstractTask) element).getConnectorKind());
if (connectorUi != null) {
compositeDescriptor.overlayKind = connectorUi.getTaskKindOverlay(repositoryTask);
}
} else if (element instanceof AbstractRepositoryQuery) {
connectorUi = TasksUiPlugin.getConnectorUi(((AbstractRepositoryQuery) element).getRepositoryKind());
}
if (connectorUi != null) {
compositeDescriptor.icon = connectorUi.getTaskListElementIcon(element);
return compositeDescriptor;
} else {
if (element instanceof OrphanedTasksContainer) {
compositeDescriptor.icon = TasksUiImages.QUERY_UNMATCHED;
} else if (element instanceof AbstractRepositoryQuery || object instanceof OrphanedTasksContainer) {
compositeDescriptor.icon = TasksUiImages.QUERY;
} else if (element instanceof AbstractTask) {
compositeDescriptor.icon = TasksUiImages.TASK;
} else if (element instanceof ScheduledTaskContainer) {
compositeDescriptor.icon = TasksUiImages.CALENDAR;
} else if (element instanceof Person) {
compositeDescriptor.icon = TasksUiImages.PERSON;
TaskRepository repository = TasksUiPlugin.getRepositoryManager().getRepository(
((Person) element).getRepositoryUrl());
// for (TaskRepository repository : TasksUiPlugin.getRepositoryManager().getAllRepositories()) {
if (repository != null
&& !repository.isAnonymous()
&& (repository.getUserName() != null && repository.getUserName().equalsIgnoreCase(
element.getHandleIdentifier()))) {
compositeDescriptor.icon = TasksUiImages.PERSON_ME;
// break;
}
}
return compositeDescriptor;
}
}
return compositeDescriptor;
}
public static ImageDescriptor getSynchronizationImageDescriptor(Object element, boolean synchViewStyle) {
if (element instanceof AbstractTask) {
AbstractTask repositoryTask = (AbstractTask) element;
if (repositoryTask.getSynchronizationState() == RepositoryTaskSyncState.INCOMING
&& repositoryTask.getLastReadTimeStamp() == null) {
if (synchViewStyle) {
return TasksUiImages.OVERLAY_SYNCH_INCOMMING_NEW;
} else {
return TasksUiImages.OVERLAY_INCOMMING_NEW;
}
}
ImageDescriptor imageDescriptor = null;
if (repositoryTask.getSynchronizationState() == RepositoryTaskSyncState.OUTGOING) {
if (synchViewStyle) {
imageDescriptor = TasksUiImages.OVERLAY_SYNCH_OUTGOING;
} else {
imageDescriptor = TasksUiImages.OVERLAY_OUTGOING;
}
} else if (repositoryTask.getSynchronizationState() == RepositoryTaskSyncState.INCOMING) {
if (synchViewStyle) {
imageDescriptor = TasksUiImages.OVERLAY_SYNCH_INCOMMING;
} else {
imageDescriptor = TasksUiImages.OVERLAY_INCOMMING;
}
} else if (repositoryTask.getSynchronizationState() == RepositoryTaskSyncState.CONFLICT) {
imageDescriptor = TasksUiImages.OVERLAY_CONFLICT;
}
if (imageDescriptor == null && repositoryTask.getSynchronizationStatus() != null) {
return TasksUiImages.OVERLAY_WARNING;
} else if (imageDescriptor != null) {
return imageDescriptor;
}
} else if (element instanceof AbstractRepositoryQuery) {
AbstractRepositoryQuery query = (AbstractRepositoryQuery) element;
if (query.getSynchronizationStatus() != null) {
return TasksUiImages.OVERLAY_WARNING;
}
}
// HACK: need a proper blank image
return TasksUiImages.OVERLAY_BLANK;
}
public static ImageDescriptor getPriorityImageDescriptor(Object element) {
AbstractRepositoryConnectorUi connectorUi;
if (element instanceof AbstractTask) {
AbstractTask repositoryTask = (AbstractTask) element;
connectorUi = TasksUiPlugin.getConnectorUi(((AbstractTask) element).getConnectorKind());
if (connectorUi != null) {
return connectorUi.getTaskPriorityOverlay(repositoryTask);
}
}
if (element instanceof AbstractTask) {
AbstractTask task = TaskElementLabelProvider.getCorrespondingTask((AbstractTaskContainer) element);
if (task != null) {
return TasksUiImages.getImageDescriptorForPriority(PriorityLevel.fromString(task.getPriority()));
}
}
return null;
}
@Override
public String getText(Object object) {
if (object instanceof AbstractTask) {
AbstractTask task = (AbstractTask) object;
if (task.getSummary() == null) {
if (task.getTaskKey() != null) {
return task.getTaskKey() + NO_SUMMARY_AVAILABLE;
} else {
return task.getTaskId() + NO_SUMMARY_AVAILABLE;
}
} else if (!pattern.matcher(task.getSummary()).matches()) {
if (task.getTaskKey() != null) {
return task.getTaskKey() + ": " + task.getSummary();
} else {
return task.getSummary();
}
} else {
return task.getSummary();
}
} else if (object instanceof TaskGroup) {
TaskGroup element = (TaskGroup) object;
return element.getSummary();// + " / " + element.getChildren().size();
} else if (object instanceof OrphanedTasksContainer) {
OrphanedTasksContainer container = (OrphanedTasksContainer) object;
String result = container.getSummary();
TaskRepository repository = TasksUiPlugin.getRepositoryManager().getRepository(
container.getConnectorKind(), container.getRepositoryUrl());
if (repository != null) {
result = "Unmatched [" + repository.getRepositoryLabel() + "]";
}
return result;
} else if (object instanceof AbstractTaskContainer) {
AbstractTaskContainer element = (AbstractTaskContainer) object;
return element.getSummary();
} else {
return super.getText(object);
}
}
public Color getForeground(Object object) {
if (object instanceof AbstractTaskContainer && object instanceof AbstractTask) {
AbstractTask task = getCorrespondingTask((AbstractTaskContainer) object);
if (task != null) {
if (TaskActivityManager.getInstance().isCompletedToday(task)) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_TODAY_COMPLETED);
} else if (task.isCompleted()) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_COMPLETED);
} else if (task.isActive()) {
return TaskListColorsAndFonts.COLOR_TASK_ACTIVE;
} else if (TaskActivityManager.getInstance().isOverdue(task)) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_PAST_DUE);
} else if (task.isPastReminder()) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_PAST_SCHEDULED);
} else if (TaskActivityManager.getInstance().isScheduledForToday(task)) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_TODAY_SCHEDULED);
} else if (TaskActivityManager.getInstance().isScheduledForThisWeek(task)) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_THISWEEK_SCHEDULED);
}
}
} else if (object instanceof AbstractTaskContainer) {
for (AbstractTask child : ((AbstractTaskContainer) object).getChildren()) {
if (child.isActive() || showHasActiveChild(child)) {
return TaskListColorsAndFonts.COLOR_TASK_ACTIVE;
} else if (TaskActivityManager.getInstance().isOverdue(child)) {
// } else if ((child.isPastReminder() && !child.isCompleted()) || showHasChildrenPastDue(child)) {
return themeManager.getCurrentTheme().getColorRegistry().get(
TaskListColorsAndFonts.THEME_COLOR_TASK_PAST_DUE);
}
}
}
return null;
}
// private boolean showHasChildrenPastDueHelper(AbstractTaskContainer container) {
// for (AbstractTaskContainer child : container.getChildren()) {
// if (child instanceof AbstractTask && ((AbstractTask) child).isPastReminder()
// && !((AbstractTask) child).isCompleted()) {
// return true;
// } else {
// if (showHasChildrenPastDueHelper(child)) {
// return true;
// return false;
// private boolean showHasChildrenPastDue(AbstractTaskContainer container) {
// if (!TasksUiPlugin.getDefault().groupSubtasks(container)) {
// return false;
// return showHasChildrenPastDueHelper(container);
/**
* TODO: move
*/
public static AbstractTask getCorrespondingTask(AbstractTaskContainer element) {
if (element instanceof AbstractTask) {
return (AbstractTask) element;
} else {
return null;
}
}
public Color getBackground(Object element) {
if (element instanceof AbstractTask) {
AbstractTask task = (AbstractTask) element;
ITaskHighlighter highlighter = TasksUiPlugin.getDefault().getHighlighter();
if (highlighter != null) {
return highlighter.getHighlightColor(task);
}
}
return null;
}
public Font getFont(Object element) {
if (!(element instanceof AbstractTaskContainer)) {
return null;
}
AbstractTask task = getCorrespondingTask((AbstractTaskContainer) element);
if (task != null) {
AbstractTask repositoryTask = task;
if (repositoryTask.isSynchronizing()) {
return TaskListColorsAndFonts.ITALIC;
}
}
if (element instanceof AbstractTaskContainer) {
if (element instanceof AbstractRepositoryQuery) {
if (((AbstractRepositoryQuery) element).isSynchronizing()) {
return TaskListColorsAndFonts.ITALIC;
}
}
for (AbstractTask child : ((AbstractTaskContainer) element).getChildren()) {
if (child.isActive() || showHasActiveChild(child)) {
return TaskListColorsAndFonts.BOLD;
}
}
}
if (task != null) {
if (task.isActive()) {
return TaskListColorsAndFonts.BOLD;
} else if (task.isCompleted()) {
return TaskListColorsAndFonts.STRIKETHROUGH;
}
for (AbstractTask child : ((AbstractTaskContainer) element).getChildren()) {
if (child.isActive() || showHasActiveChild(child)) {
return TaskListColorsAndFonts.BOLD;
}
}
}
return null;
}
private boolean showHasActiveChild(AbstractTaskContainer container) {
if (!TasksUiPlugin.getDefault().groupSubtasks(container)) {
return false;
}
return showHasActiveChildHelper(container);
}
private boolean showHasActiveChildHelper(AbstractTaskContainer container) {
for (AbstractTaskContainer child : container.getChildren()) {
if (child instanceof AbstractTask && ((AbstractTask) child).isActive()) {
return true;
} else {
if (showHasActiveChildHelper(child)) {
return true;
}
}
}
return false;
}
}
|
package github.jcext;
abstract class EnqueuerStats {
/**
* Stats usage only, never use it for the logic of your App.
*/
public int queueSize() {
return enq().queueSize();
}
/**
* user associated id.
*/
public Object id() {
return enq().id();
}
abstract Enqueuer<?> enq();
@Override
public String toString() {
Object id = id();
if (id != null) {
return getClass().getSimpleName() + "@" + id;
}
return super.toString();
}
}
|
package org.jtrfp.trcl.gpu;
import java.awt.Canvas;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.PropertyEditorManager;
import java.beans.PropertyEditorSupport;
import java.io.File;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.concurrent.Callable;
import javax.media.opengl.GL3;
import javax.swing.JFileChooser;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.SwingUtilities;
import javax.swing.filechooser.FileFilter;
import org.jtrfp.trcl.core.RootWindow;
import org.jtrfp.trcl.core.TRFuture;
import org.jtrfp.trcl.core.ThreadManager;
import org.jtrfp.trcl.mem.MemoryManager;
public final class GLTexture {
private final GPU gpu;
private final TRFuture<Integer> textureID;
private int rawSideLength;
private final GL3 gl;
private int bindingTarget = GL3.GL_TEXTURE_2D;
private int internalColorFormat = GL3.GL_RGBA4;
private boolean deleted=false;
private static GLProgram textureRenderProgram;
private String debugName="UNNAMED";
private final double [] expectedMaxValue = new double[]{1,1,1,1};
private final double [] expectedMinValue = new double[]{0,0,0,0};
private int preferredUpdateIntervalMillis = 500;
private int width,height,numComponents;
public GLTexture(final GPU gpu) {
System.out.println("Creating GL Texture...");
this.gpu = gpu;
textureID = gpu.getTr().getThreadManager().submitToGL(new Callable<Integer>(){
@Override
public Integer call() throws Exception {
return gpu.newTextureID();
}});
gl = gpu.getGl();
// Setup the empty rows
System.out.println("...Done.");
}// end constructor
private int numComponentsFromEnum(int glEnum){
switch(glEnum){
case GL3.GL_RGBA:
return 4;
case GL3.GL_RGB:
return 3;
case GL3.GL_RG:
return 2;
case GL3.GL_RED:
return 1;
default:
return 0;
}//end switch(glEnum)
}
public GLTexture setImage(int internalOrder, int width, int height, int colorOrder, int numericalFormat, Buffer pixels){
this.width=width; this.height=height;
setNumComponents(numComponentsFromEnum(colorOrder));
if(pixels==null && width*height*16 < MemoryManager.ZEROES.capacity()){
pixels=MemoryManager.ZEROES;
synchronized(pixels){
pixels.clear();gl.glTexImage2D(bindingTarget, 0, internalOrder, width, height, 0, colorOrder, numericalFormat, pixels);}
}//end if(null)
else gl.glTexImage2D(bindingTarget, 0, internalOrder, width, height, 0, colorOrder, numericalFormat, pixels);
return this;
}
public GLTexture setParameteri(int parameterName, int value){
gl.glTexParameteri(textureID.get(), parameterName, value);
return this;
}
/**
* Takes a square texture in RGBA 8888 format. Automatically determines
* dimensions from buffer size.
*
* @param buf
* Directly-allocated buffer containing the image data.
* @since Dec 11, 2013
*/
public void setTextureImageRGBA(final ByteBuffer buf) {
gpu.getTr().getThreadManager().submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
rawSideLength = (int) Math.sqrt(buf.capacity() / 4);
buf.rewind();
GL3 gl = gpu.getGl();
gl.glBindTexture(bindingTarget, textureID.get());
/*FloatBuffer isoSize = FloatBuffer.wrap(new float[] { 0 });
gl.glGetFloatv(GL3.GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, isoSize);*/
System.out.println("Uploading texture...");
GLTexture.this.width=rawSideLength; GLTexture.this.height=rawSideLength;
setNumComponents(numComponentsFromEnum(GL3.GL_RGBA));
gl.glTexImage2D(bindingTarget, 0, internalColorFormat, rawSideLength,
rawSideLength, 0, GL3.GL_RGBA, GL3.GL_UNSIGNED_BYTE, buf);
gl.glGenerateMipmap(bindingTarget);
System.out.println("\t...Done.");
return null;
}}).get();
}//end setTextureImageRGBA
public void getTextureImageRGBA(final ByteBuffer buf) {
gpu.getTr().getThreadManager().submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
rawSideLength = (int) Math.sqrt(buf.capacity() / 4);
buf.rewind();
GL3 gl = gpu.getGl();
gl.glBindTexture(bindingTarget, textureID.get());
System.out.println("Downloading texture...");
gl.glGetTexImage(bindingTarget, 0, GL3.GL_RGBA, GL3.GL_UNSIGNED_BYTE, buf);
System.out.println("\t...Done.");
return null;
}}).get();
}//end setTextureImageRGBA
public GLTexture configure(int [] sideLengthsInTexels, int numLevels ){
switch(sideLengthsInTexels.length){
case 3:{
gl.glTexStorage3D(bindingTarget, numLevels, internalColorFormat, sideLengthsInTexels[0], sideLengthsInTexels[1], sideLengthsInTexels[2]);
break;
}case 2:{
gl.glTexStorage2D(bindingTarget, numLevels, internalColorFormat, sideLengthsInTexels[0], sideLengthsInTexels[1]);
break;
}case 1:{
gl.glTexStorage1D(bindingTarget, numLevels, internalColorFormat, sideLengthsInTexels[0]);
break;
}
default:{
throw new RuntimeException("Invalid number of dimensions in specified sideLength: "+sideLengthsInTexels.length);
}}
return this;
}//end configureEmpty(...)
public GLTexture subImage(int [] texelCoordinates, int [] sideLengthsInTexels, int format, int level, ByteBuffer texels){
if(texelCoordinates.length!=sideLengthsInTexels.length)
throw new RuntimeException("Texel coordinate dims ("+texelCoordinates.length+") must match sideLength dims ("+sideLengthsInTexels.length+").");
switch(texelCoordinates.length){
case 1:{
gl.glTexSubImage1D(bindingTarget, level, texelCoordinates[0], sideLengthsInTexels[0], GL3.GL_RGBA, GL3.GL_UNSIGNED_BYTE, texels);
break;
}case 2:{
gl.glTexSubImage2D(bindingTarget, level, texelCoordinates[0],texelCoordinates[1], sideLengthsInTexels[0], sideLengthsInTexels[1], GL3.GL_RGBA, GL3.GL_UNSIGNED_BYTE, texels);
break;
}case 3:{
if(level<0)throw new RuntimeException("Level is intolerably negative: "+level);
gl.glTexSubImage3D(bindingTarget, level, texelCoordinates[0],texelCoordinates[1],texelCoordinates[2], sideLengthsInTexels[0], sideLengthsInTexels[1],sideLengthsInTexels[2], GL3.GL_RGBA, GL3.GL_UNSIGNED_BYTE, texels);
break;
}
default:{
throw new RuntimeException("Invalid number of dimensions in specified coordinates: "+texelCoordinates.length);
}}
return this;
}
public void delete() {
if(isDeleted())
return;
gl.glBindTexture(bindingTarget, textureID.get());
gl.glDeleteTextures(1, IntBuffer.wrap(new int[] { textureID.get() }));
deleted=true;
}
int getTextureID() {
return textureID.get();
}
public static void specifyTextureUnit(GL3 gl, int unitNumber) {
gl.glActiveTexture(GL3.GL_TEXTURE0 + unitNumber);
}
public GLTexture bind(){
return bind(gl);
}
public GLTexture bind(GL3 gl) {
gl.glBindTexture(bindingTarget, getTextureID());
return this;
}
public GLTexture bindToTextureUnit(int unitNumber, GL3 gl){
GLTexture.specifyTextureUnit(gl, unitNumber);
bind(gl);
return this;
}
public int getCurrentSideLength() {
return rawSideLength;
}
public GLTexture setMagFilter(int mode) {
gl.glTexParameteri(bindingTarget, GL3.GL_TEXTURE_MAG_FILTER, mode);
return this;
}
public GLTexture setMinFilter(int mode){
gl.glTexParameteri(bindingTarget, GL3.GL_TEXTURE_MIN_FILTER, mode);
return this;
}
public GLTexture setWrapS(int val) {
gl.glTexParameteri(bindingTarget, GL3.GL_TEXTURE_WRAP_S, val);
return this;
}
public GLTexture setWrapT(int val) {
gl.glTexParameteri(bindingTarget, GL3.GL_TEXTURE_WRAP_T, val);
return this;
}
/**
* @return the bindingTarget
*/
public int getBindingTarget() {
return bindingTarget;
}
/**
* @param bindingTarget the bindingTarget to set
*/
public GLTexture setBindingTarget(int bindingTarget) {
this.bindingTarget = bindingTarget;
return this;
}
/**
* @return the internalColorFormat
*/
public int getInternalColorFormat() {
return internalColorFormat;
}
/**
* @param internalColorFormat the internalColorFormat to set
*/
public GLTexture setInternalColorFormat(int internalColorFormat) {
this.internalColorFormat = internalColorFormat;
return this;
}
public GLTexture setImage2DMultisample(int samples,
int internalFormat, int width, int height, boolean fixedSampleLocations) {
this.width=width; this.height=height;
//TODO: Num components
gl.glTexImage2DMultisample(bindingTarget, samples, internalFormat, width, height, fixedSampleLocations);
return this;
}
public GLTexture setImage1D(int internalFormat, int width, int internalOrder, int numericalFormat,
FloatBuffer pixels) {
this.width=width; this.height=1;
setNumComponents(numComponentsFromEnum(internalOrder));
gl.glTexImage1D(bindingTarget, 0, internalFormat, width, 0, internalOrder, numericalFormat, pixels);
return this;
}
public GLTexture readPixels(int pixelFormat, int pixelDataType, ByteBuffer buffer) {
gl.glGetTexImage(bindingTarget, 0, pixelFormat, pixelDataType, buffer);
return this;
}
@Override
public void finalize() throws Throwable{
gpu.getTr().getThreadManager().submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
delete();
return null;
}}).get();
super.finalize();
}
/**
* @return the deleted
*/
public boolean isDeleted() {
return deleted;
}
public GPU getGPU() {
return gpu;
}
private static GLProgram getTextureRenderProgram(GPU gpu){
if(textureRenderProgram!=null)
return textureRenderProgram;
try{
GLShader vs = gpu.newVertexShader().setSourceFromResource("/shader/fullScreenQuadVertexShader.glsl");
GLShader fs = gpu.newFragmentShader().setSourceFromResource("/shader/fullScreenTextureFragShader.glsl");
textureRenderProgram = gpu.newProgram().attachShader(vs).attachShader(fs).link();
textureRenderProgram.validate();
textureRenderProgram.use();
textureRenderProgram.getUniform("textureToUse").set((int)0);
}catch(IOException e){gpu.getTr().showStopper(e);}
return textureRenderProgram;
}//end getTextureRenderProgram(...)
public static final class PropertyEditor extends PropertyEditorSupport{
@Override
public Component getCustomEditor(){
final GLTexture source = (GLTexture)getSource();
final JPanel result = new JPanel();
if(source.getBindingTarget()==GL3.GL_TEXTURE_2D){
result.add(new TextureViewingPanel(source, source.getGPU().getTr().getRootWindow()));
}//TODO: Texture 1D
return result;
}//end getCustomEditor()
}//end PropertyEditor
static{
PropertyEditorManager.registerEditor(GLTexture.class, GLTexture.PropertyEditor.class);
}//end static{}
private static class TextureViewingPanel extends JPanel{
private static final long serialVersionUID = 4580039742312228700L;
private final RootWindow frame;
private GLTexture colorTexture,targetTexture;
private GLFrameBuffer frameBuffer;
private static final Dimension PANEL_SIZE = new Dimension(200,100);
private final ByteBuffer rgbaBytes = ByteBuffer.allocate((int)((PANEL_SIZE.getWidth()*PANEL_SIZE.getHeight()*4*4)))
.order(ByteOrder.nativeOrder());
private final FloatBuffer rgbaFloats = rgbaBytes.asFloatBuffer();
private final Thread updateThread;
private final JPopupMenu popupMenu = new JPopupMenu();
private final JMenuItem dumpToCSV = new JMenuItem("Dump To CSV");
private final ThreadManager threadManager;
public TextureViewingPanel(final GLTexture parent, RootWindow root){
super();
this.targetTexture=parent;
this.setSize(PANEL_SIZE);
this.setPreferredSize(PANEL_SIZE);
this.setMinimumSize(PANEL_SIZE);
this.setAlignmentX(Component.LEFT_ALIGNMENT);
frame = parent.getGPU().getTr().getRootWindow();
threadManager = parent.getGPU().getTr().getThreadManager();
final GPU gpu = parent.getGPU();
final Canvas canvas = frame.getCanvas();
popupMenu.add(dumpToCSV);
updateThread = new Thread(){
@Override
public void run(){
while(true){
try{Thread.currentThread().sleep(parent.getPreferredUpdateIntervalMillis());}
catch(InterruptedException e){e.printStackTrace();}
Window ancestor = SwingUtilities.getWindowAncestor(TextureViewingPanel.this);
if(ancestor!=null)
if(ancestor.isVisible()){
threadManager.submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
GL3 gl = gpu.getGl();
gl.glDepthMask(false);
gl.glViewport(0, 0, getWidth(), getHeight());
gl.glDepthFunc(GL3.GL_ALWAYS);
final double [] min = parent.getExpectedMinValue();
final double [] max = parent.getExpectedMaxValue();
final GLProgram prg = getTextureRenderProgram(gpu);
prg.use();
prg.getUniform("scalar").set(
1f/(float)(max[0]-min[0]),
1f/(float)(max[1]-min[1]),
1f/(float)(max[2]-min[2]),
1f/(float)(max[3]-min[3]));
prg.getUniform("offset").set(
(float)min[0]/(float)(max[0]-min[0]),
(float)min[1]/(float)(max[1]-min[1]),
(float)min[2]/(float)(max[2]-min[2]),
(float)min[3]/(float)(max[3]-min[3]));
colorTexture.bind().readPixels(GL3.GL_RGBA, GL3.GL_FLOAT, rgbaBytes);
frameBuffer.bindToDraw();
parent.bindToTextureUnit(0, gpu.getGl());
gl.glDrawArrays(GL3.GL_TRIANGLES, 0, 6);
rgbaBytes.clear();
//Cleanup
gl.glViewport(0, 0, canvas.getWidth(), canvas.getHeight());
gl.glBindFramebuffer(GL3.GL_FRAMEBUFFER, 0);
TextureViewingPanel.this.repaint();
return null;
}//end call()
}).get();}
}//end while(true)
}//end run()
};
threadManager.submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
colorTexture = gpu
.newTexture()
.bind()
.setMinFilter(GL3.GL_NEAREST)
.setMagFilter(GL3.GL_NEAREST)
.setWrapS(GL3.GL_CLAMP_TO_EDGE)
.setWrapT(GL3.GL_CLAMP_TO_EDGE)
.setImage(GL3.GL_RGBA32F,
(int)PANEL_SIZE.getWidth(),
(int)PANEL_SIZE.getHeight(),
GL3.GL_RGBA,
GL3.GL_FLOAT, null);
frameBuffer = gpu
.newFrameBuffer()
.bindToDraw()
.attachDrawTexture(colorTexture, GL3.GL_COLOR_ATTACHMENT0)
.setDrawBufferList(GL3.GL_COLOR_ATTACHMENT0);
if(gpu.getGl().glCheckFramebufferStatus(GL3.GL_FRAMEBUFFER) != GL3.GL_FRAMEBUFFER_COMPLETE){
throw new RuntimeException("Texture display frame buffer setup failure. OpenGL code "+gpu.getGl().glCheckFramebufferStatus(GL3.GL_FRAMEBUFFER));
}
updateThread.start();
return null;
}});
this.addMouseListener(new MouseAdapter(){
@Override
public void mouseClicked(MouseEvent evt) {
if(evt.getButton()==MouseEvent.BUTTON3)
popupMenu.show(evt.getComponent(),evt.getX(),evt.getY());
else popupMenu.setVisible(false);
}//end mouseClicked(...)
});
dumpToCSV.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent evt) {
final JFileChooser fc = new JFileChooser();
fc.setFileFilter(new FileFilter(){
@Override
public boolean accept(File f) {
return f.getAbsolutePath().toUpperCase().endsWith(".CSV");
}
@Override
public String getDescription() {
return "Comma-Separated Values (.CSV)";
}});
final int result = fc.showSaveDialog(TextureViewingPanel.this);
if(result==JFileChooser.APPROVE_OPTION)
writeTextureToCSV(fc.getSelectedFile());
}});
}//end constructor
final float [] val = new float[4];
private void writeTextureToCSV(File destFile){
threadManager.submitToThreadPool(new Callable<Void>(){
@Override
public Void call() throws Exception {
final ByteBuffer dest = ByteBuffer.allocateDirect(
4*targetTexture.getNumComponents()*
targetTexture.getWidth()*targetTexture.getHeight());
threadManager.submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
//TODO
return null;//REMOVE
//targetTexture.readPixels(targetTexture.getPixelFormat(), targetTexture.getPixelDataType(), dest);
}}).get();
return null;
}});
}//end writeTextureToCSV(...)
@Override
public void paint(Graphics g){
super.paint(g);
rgbaFloats.clear();
for(int y=getHeight()-1; y>=0;y
for(int x=0; x<getWidth();x++){
rgbaFloats.get(val);
g.setColor(new Color(val[0],val[1],val[2],1));
g.fillRect(x, y, 1, 1);}
}//end paint(...)
}//end TextureViewingCanvas
/**
* @return the debugName
*/
public String getDebugName() {
return debugName;
}
/**
* @param debugName the debugName to set
*/
public GLTexture setDebugName(String debugName) {
this.debugName = debugName;
return this;
}
/**
* @return the expectedMaxValue
*/
public double[] getExpectedMaxValue() {
return expectedMaxValue;
}
/**
* @return the expectedMinValue
*/
public double[] getExpectedMinValue() {
return expectedMinValue;
}
/**
* @param expectedMaxValue the expectedMaxValue to set
*/
public GLTexture setExpectedMaxValue(double r, double g, double b, double a) {
this.expectedMaxValue[0]=r;
this.expectedMaxValue[1]=g;
this.expectedMaxValue[2]=b;
this.expectedMaxValue[3]=a;
return this;
}
/**
* @param expectedMinValue the expectedMinValue to set
*/
public GLTexture setExpectedMinValue(double r, double g, double b, double a) {
this.expectedMaxValue[0]=r;
this.expectedMaxValue[1]=g;
this.expectedMaxValue[2]=b;
this.expectedMaxValue[3]=a;
return this;
}
/**
* @return the preferredUpdateIntervalMillis
*/
public int getPreferredUpdateIntervalMillis() {
return preferredUpdateIntervalMillis;
}
/**
* @param preferredUpdateIntervalMillis the preferredUpdateIntervalMillis to set
*/
public GLTexture setPreferredUpdateIntervalMillis(int preferredUpdateIntervalMillis) {
this.preferredUpdateIntervalMillis = preferredUpdateIntervalMillis;
return this;
}
/**
* @return the width
*/
public int getWidth() {
return width;
}
/**
* @return the height
*/
public int getHeight() {
return height;
}
/**
* @return the numComponents
*/
public int getNumComponents() {
return numComponents;
}
/**
* @param numComponents the numComponents to set
*/
private void setNumComponents(int numComponents) {
this.numComponents = numComponents;
}
public static class Format extends InternalFormat{//enums can't be extended. ):
public static final Format
RED = new Format(GL3.GL_RED,R,8),
RG = new Format(GL3.GL_RG,R,8,G,8),
RGB = new Format(GL3.GL_RGB,R,8,G,8,B,8),
BGR = new Format(GL3.GL_BGR,B,8,G,8,R,8),
RGBA = new Format(GL3.GL_RGBA,R,8,G,8,B,8,A,8),
BGRA = new Format(GL3.GL_BGRA,B,8,G,8,R,8,A,8);
public Format(int glEnum, int ... order) {
super(glEnum, order);
}
}//end Format()
public static class InternalFormat {
protected static final int R=0,G=1,B=2,A=3,N=-1;
public static final InternalFormat
R8=new InternalFormat(GL3.GL_R8,R,8),
R8_SNORM=new InternalFormat(GL3.GL_R8_SNORM,R,8),
R16=new InternalFormat(GL3.GL_R16,16,0,0,0,R,16),
R16_SNORM=new InternalFormat(GL3.GL_R16_SNORM,R,16),
RG8=new InternalFormat(GL3.GL_RG8,R,8,G,8),
RG8_SNORM=new InternalFormat(GL3.GL_RG8_SNORM,R,8,G,8),
RG16=new InternalFormat(GL3.GL_RG16,R,16,G,16),
RG16_SNORM=new InternalFormat(GL3.GL_RG16_SNORM,R,16,G,16),
R3_G3_B2=new InternalFormat(GL3.GL_R3_G3_B2,R,3,G,3,B,2),
RGB4=new InternalFormat(GL3.GL_RGB4,R,4,G,4,B,4),
RGB5=new InternalFormat(GL3.GL_RGB5,R,5,G,5,B,5),
RGB8=new InternalFormat(GL3.GL_RGB8,R,8,G,8,B,8),
RGB8_SNORM=new InternalFormat(GL3.GL_RGB8_SNORM,R,8,G,8,B,8),
RGB10=new InternalFormat(GL3.GL_RGB10,R,10,G,10,B,10),
RGB12=new InternalFormat(GL3.GL_RGB12,R,12,G,12,B,12),
RGB16=new InternalFormat(GL3.GL_RGB16,R,16,G,16,B,16),
RGB16_SNORM=new InternalFormat(GL3.GL_RGB16_SNORM,R,16,G,16,B,16),
RGBA2=new InternalFormat(GL3.GL_RGBA2,R,2,G,2,B,2,A,2),
RGBA4=new InternalFormat(GL3.GL_RGBA4,R,4,G,4,B,4,A,4),
RGB5_A1=new InternalFormat(GL3.GL_RGB5_A1,R,5,G,5,B,5,A,1),
RGBA8=new InternalFormat(GL3.GL_RGBA8,R,8,G,8,B,8,A,8),
RGBA8_SNORM=new InternalFormat(GL3.GL_RGBA8_SNORM,R,8,G,8,B,8,A,8),
RGB10_A2UI=new InternalFormat(GL3.GL_RGB10_A2UI,R,10,G,10,B,10,A,2),
RGBA12=new InternalFormat(GL3.GL_RGBA12,R,12,G,12,B,12,A,12),
RGBA16=new InternalFormat(GL3.GL_RGBA16,R,16,G,16,B,16,A,16),
RGBA16_SNORM=new InternalFormat(GL3.GL_RGBA16_SNORM,R,16,G,16,B,16,A,16),
SRGB8=new InternalFormat(GL3.GL_SRGB8,R,8,G,8,B,8),
SRGB8_ALPHA8=new InternalFormat(GL3.GL_SRGB8_ALPHA8,R,8,G,8,B,8),
R16F=new InternalFormat(GL3.GL_R16F,R,16),
RG16F=new InternalFormat(GL3.GL_RG16F,R,16,G,16),
RGB16F=new InternalFormat(GL3.GL_RGB16F,R,16,G,16,B,16),
R32F=new InternalFormat(GL3.GL_R32F,R,32),
RG32F=new InternalFormat(GL3.GL_RG32F,R,32,G,32),
RGB32F=new InternalFormat(GL3.GL_RGB32F,R,32,G,32,B,32),
RGBA32F=new InternalFormat(GL3.GL_RGBA32F,R,32,G,32,B,32,A,32),
R11F_G11F_B10F=new InternalFormat(GL3.GL_R11F_G11F_B10F,R,11,G,11,B,10),
RGB9_E5=new InternalFormat(GL3.GL_RGB9_E5,R,9,G,9,B,9),//TODO
R8I=new InternalFormat(GL3.GL_R8I,R,8),
R8UI=new InternalFormat(GL3.GL_R8UI,R,8),
R16I=new InternalFormat(GL3.GL_R16I,R,16),
R16UI=new InternalFormat(GL3.GL_R16UI,R,16),
R32I=new InternalFormat(GL3.GL_R32I,R,32),
R32UI=new InternalFormat(GL3.GL_R32UI,R,32),
RG8I=new InternalFormat(GL3.GL_RG8I,R,8,G,8),
RG8UI=new InternalFormat(GL3.GL_RG8UI,R,8,G,8),
RG16I=new InternalFormat(GL3.GL_RG16I,R,16,G,16),
RG16UI=new InternalFormat(GL3.GL_RG16UI,R,16,G,16),
RG32I=new InternalFormat(GL3.GL_RG32I,R,32,G,32),
RG32UI=new InternalFormat(GL3.GL_RG32UI,R,32,G,32),
RGB8I=new InternalFormat(GL3.GL_RGB8I,R,8,G,8,B,8),
RGB8UI=new InternalFormat(GL3.GL_RGB8UI,R,8,G,8,B,8),
RGB16I=new InternalFormat(GL3.GL_RGB16I,R,16,G,16,B,16),
RGB16UI=new InternalFormat(GL3.GL_RGB16UI,R,16,G,16,B,16),
RGB32I=new InternalFormat(GL3.GL_RGB32I,R,32,G,32,B,32),
RGB32UI=new InternalFormat(GL3.GL_RGB32UI,R,32,G,32,B,32),
RGBA8I=new InternalFormat(GL3.GL_RGBA8I,R,8,G,8,B,8,A,8),
RGBA8UI=new InternalFormat(GL3.GL_RGBA8UI,R,8,G,8,B,8,A,8),
RGBA16I=new InternalFormat(GL3.GL_RGBA16I,R,16,G,16,B,16,A,16),
RGBA16UI=new InternalFormat(GL3.GL_RGBA16UI,R,16,G,16,B,16,A,16),
RGBA32I=new InternalFormat(GL3.GL_RGBA32I,R,32,G,32,B,32,A,32),
RGBA32UI=new InternalFormat(GL3.GL_RGBA32UI,R,32,G,32,B,32,A,32);
private final int glEnum;
private final int [] order;
public InternalFormat(int glEnum, int ... order){
this.glEnum=glEnum;
this.order = order;
}//end constructor
public int getDestComponent(int index){
return order[index*2];
}
/**
* @return the glEnum
*/
public int getGlEnum() {
return glEnum;
}
/**
* @return the order
*/
public int[] getOrder() {
return order;
}
};
}// end GLTexture
|
package org.osgi.impl.service.application;
import java.io.*;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import org.osgi.service.application.*;
import org.osgi.service.application.ApplicationDescriptor.Delegate;
public class ApplicationDescriptorImpl implements Delegate {
private ApplicationDescriptor descriptor;
private static Properties locks;
private String pid;
public synchronized void setApplicationDescriptor(ApplicationDescriptor d, String pid ) {
descriptor = d;
this.pid = pid;
}
public boolean isLocked() {
return doLock(true, false );
}
synchronized boolean doLock(final boolean query, final boolean newState) {
try {
return ((Boolean)AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws Exception {
File f = Activator.bc.getDataFile("locks");
if ( locks == null ) {
locks = new Properties();
if ( f.exists() )
locks.load( new FileInputStream(f));
}
boolean current = locks.containsKey( pid );
if ( query || newState == current )
return new Boolean( current );
if ( current )
locks.remove( pid );
else
locks.put( pid, "locked");
locks.save(new FileOutputStream(f), "Saved " + new Date());
return new Boolean( newState );
}
})).booleanValue();
} catch( PrivilegedActionException pe ) {
pe.printStackTrace();
}
return false;
}
public void lock() {
SecurityManager sm = System.getSecurityManager();
if( sm != null )
sm.checkPermission( new ApplicationAdminPermission( descriptor, ApplicationAdminPermission.LOCK_ACTION ) );
doLock(false, true);
}
public void unlock() {
SecurityManager sm = System.getSecurityManager();
if( sm != null )
sm.checkPermission( new ApplicationAdminPermission( descriptor, ApplicationAdminPermission.LOCK_ACTION ) );
doLock(false, false);
}
public ScheduledApplication schedule(Map args, String topic, String filter, boolean recurs) {
return Activator.scheduler.addScheduledApplication( descriptor, args, topic, filter, recurs );
}
public void launch(Map arguments) throws Exception {
SecurityManager sm = System.getSecurityManager();
if( sm != null )
sm.checkPermission( new ApplicationAdminPermission( descriptor, ApplicationAdminPermission.LIFECYCLE_ACTION ) );
if ( isLocked() )
throw new Exception("Application is locked, can't launch!");
}
}
|
package hangman.core.secret;
import hangman.core.guess.Guess;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.Validate;
public final class Secret implements Serializable {
private static final long serialVersionUID = 7431377069243119939L;
/** Space separator between words */
public static final char SPACE_SEPARATOR = ' ';
/** Unmodifiable set of allowed characters */
public static final Set<Character> ALLOWED_CHARACTERS = Collections.unmodifiableSet(new HashSet<Character>() {
private static final long serialVersionUID = 5939962907815431449L;
{
// enums would be slight over-engineering
for(char allowed='a'; allowed<='z'; allowed++) {
add(allowed);
}
for(char allowed='A'; allowed<='Z'; allowed++) {
add(allowed);
}
}
});
private final String value;
private final Category category;
public String getValue() {
return value;
}
public Category getCategory() {
return category;
}
/**
*
* @return how many guess does it take to know the secret
*/
public int getGuessesToKnowMeNo() {
final Set<Guess> uniqueGuesses = new HashSet<>();
for(char c : getValue().toCharArray()) {
if (Guess.isValidGuessCharacter(c)) { // be careful about space separators
uniqueGuesses.add(Guess.newFor(c));
}
}
return uniqueGuesses.size();
}
public static boolean isValidSecretCharacter(char value) {
return value == SPACE_SEPARATOR || ALLOWED_CHARACTERS.contains(value);
}
// for convenience
public static Secret newSecret(String value, Category category){
return new Secret(value, category);
}
// private constructor
private Secret(String value, Category category) {
Validate.notBlank(value);
Validate.notNull(category);
this.value = value;
this.category = category;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((category == null) ? 0 : category.hashCode());
result = prime * result + ((value == null) ? 0 : value.toLowerCase().hashCode()); // toLowerCase is important here
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) { return true; }
if (obj == null) { return false; }
if (obj.getClass() != getClass()) {
return false;
}
Secret other = (Secret) obj;
if (category != other.category) {
return false;
}
if (value == null) {
if (other.value != null) { return false; }
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
/**
* Available categories
*/
public static enum Category {
ANIMALS, FRUITS, VEGETABLES;
public static Category findByNameIgnoreCase(String value) {
Validate.notNull(value);
for(Category category: Category.values()) {
if(category.name().equalsIgnoreCase(value)) {
return category;
}
}
return null;
}
}
@Override
public String toString() {
return "Secret [value=" + value + ", category=" + category + "]";
}
}
|
package org.lightmare.jpa;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.apache.log4j.Logger;
import org.lightmare.cache.ConnectionSemaphore;
import org.lightmare.jndi.NamingUtils;
import org.lightmare.jpa.jta.HibernateConfig;
import org.lightmare.utils.ObjectUtils;
/**
* Creates and caches {@link EntityManagerFactory} for each ejb bean
* {@link Class}'s appropriate field (annotated by @PersistenceContext)
*
* @author Levan
*
*/
public class JPAManager {
// Keeps unique EntityManagerFactories builded by unit names
private static final ConcurrentMap<String, ConnectionSemaphore> CONNECTIONS = new ConcurrentHashMap<String, ConnectionSemaphore>();
private List<String> classes;
private String path;
private URL url;
private Map<Object, Object> properties;
private boolean swapDataSource;
private boolean scanArchives;
public static boolean pooledDataSource;
private static final Logger LOG = Logger.getLogger(JPAManager.class);
private JPAManager() {
}
public static boolean checkForEmf(String unitName) {
boolean check = ObjectUtils.available(unitName);
if (check) {
check = CONNECTIONS.containsKey(unitName);
}
return check;
}
public static ConnectionSemaphore getSemaphore(String unitName) {
return CONNECTIONS.get(unitName);
}
private static ConnectionSemaphore createSemaphore(String unitName) {
ConnectionSemaphore semaphore = CONNECTIONS.get(unitName);
ConnectionSemaphore current = null;
if (semaphore == null) {
semaphore = new ConnectionSemaphore();
semaphore.setUnitName(unitName);
semaphore.setInProgress(Boolean.TRUE);
semaphore.setCached(Boolean.TRUE);
current = CONNECTIONS.putIfAbsent(unitName, semaphore);
}
if (current == null) {
current = semaphore;
}
current.incrementUser();
return current;
}
public static ConnectionSemaphore setSemaphore(String unitName,
String jndiName) {
ConnectionSemaphore semaphore = null;
if (ObjectUtils.available(unitName)) {
semaphore = createSemaphore(unitName);
if (ObjectUtils.available(jndiName)) {
ConnectionSemaphore existent = CONNECTIONS.putIfAbsent(
jndiName, semaphore);
if (existent == null) {
semaphore.setJndiName(jndiName);
}
}
}
return semaphore;
}
private static void awaitConnection(ConnectionSemaphore semaphore) {
synchronized (semaphore) {
boolean inProgress = semaphore.isInProgress()
&& !semaphore.isBound();
while (inProgress) {
try {
semaphore.wait();
inProgress = semaphore.isInProgress()
&& !semaphore.isBound();
} catch (InterruptedException ex) {
inProgress = Boolean.FALSE;
LOG.error(ex.getMessage(), ex);
}
}
}
}
public static boolean isInProgress(String jndiName) {
ConnectionSemaphore semaphore = CONNECTIONS.get(jndiName);
boolean inProgress = ObjectUtils.notNull(semaphore);
if (inProgress) {
inProgress = semaphore.isInProgress() && !semaphore.isBound();
if (inProgress) {
awaitConnection(semaphore);
}
}
return inProgress;
}
private void addTransactionManager() {
if (properties == null) {
properties = new HashMap<Object, Object>();
}
properties.put(HibernateConfig.FACTORY_KEY,
HibernateConfig.FACTORY_VALUE);
properties.put(HibernateConfig.PLATFORM_KEY,
HibernateConfig.PLATFORM_VALUE);
}
/**
* Creates {@link EntityManagerFactory} by hibernate or by extended builder
* {@link Ejb3ConfigurationImpl} if entity classes or persistence.xml file
* path are provided
*
* @see Ejb3ConfigurationImpl#configure(String, Map) and
* Ejb3ConfigurationImpl#createEntityManagerFactory()
*
* @param unitName
* @return {@link EntityManagerFactory}
*/
@SuppressWarnings("deprecation")
private EntityManagerFactory buildEntityManagerFactory(String unitName)
throws IOException {
EntityManagerFactory emf;
Ejb3ConfigurationImpl cfg;
boolean checkForPath = ObjectUtils.available(path);
boolean checkForURL = checkForURL();
Ejb3ConfigurationImpl.Builder builder = new Ejb3ConfigurationImpl.Builder();
if (ObjectUtils.available(classes)) {
builder.setClasses(classes);
}
if (checkForPath || checkForURL) {
Enumeration<URL> xmls;
ConfigLoader configLoader = new ConfigLoader();
if (checkForPath) {
xmls = configLoader.readFile(path);
} else {
xmls = configLoader.readURL(url);
}
builder.setXmls(xmls);
String shortPath = configLoader.getShortPath();
builder.setShortPath(shortPath);
}
builder.setSwapDataSource(swapDataSource);
builder.setScanArchives(scanArchives);
cfg = builder.build();
if (!swapDataSource) {
addTransactionManager();
}
Ejb3ConfigurationImpl configured = cfg.configure(unitName, properties);
emf = ObjectUtils.notNull(configured) ? configured
.buildEntityManagerFactory() : null;
return emf;
}
/**
* Checks if entity persistence.xml {@link URL} is provided
*
* @return boolean
*/
private boolean checkForURL() {
return ObjectUtils.notNull(url)
&& ObjectUtils.available(url.toString());
}
/**
* Checks if entity classes or persistence.xml path are provided
*
* @param classes
* @return boolean
*/
private boolean checkForBuild() {
return ObjectUtils.available(classes) || ObjectUtils.available(path)
|| checkForURL() || swapDataSource || scanArchives;
}
/**
* Checks if entity classes or persistence.xml file path are provided to
* create {@link EntityManagerFactory}
*
* @see #buildEntityManagerFactory(String, String, Map, List)
*
* @param unitName
* @param properties
* @param path
* @param classes
* @return {@link EntityManagerFactory}
* @throws IOException
*/
private EntityManagerFactory createEntityManagerFactory(String unitName)
throws IOException {
EntityManagerFactory emf;
if (checkForBuild()) {
emf = buildEntityManagerFactory(unitName);
} else if (properties == null) {
emf = Persistence.createEntityManagerFactory(unitName);
} else {
emf = Persistence.createEntityManagerFactory(unitName, properties);
}
return emf;
}
/**
* Binds {@link EntityManagerFactory} to {@link javax.naming.InitialContext}
*
* @param jndiName
* @param unitName
* @param emf
* @throws IOException
*/
private void bindJndiName(ConnectionSemaphore semaphore) throws IOException {
boolean bound = semaphore.isBound();
if (!bound) {
String jndiName = semaphore.getJndiName();
if (ObjectUtils.available(jndiName)) {
NamingUtils namingUtils = new NamingUtils();
try {
Context context = namingUtils.getContext();
String fullJndiName = NamingUtils
.createJpaJndiName(jndiName);
if (context.lookup(fullJndiName) == null) {
namingUtils.getContext().rebind(fullJndiName,
semaphore.getEmf());
}
semaphore.setBound(Boolean.TRUE);
} catch (NamingException ex) {
throw new IOException(String.format(
"could not bind connection %s",
semaphore.getUnitName()), ex);
}
} else {
semaphore.setBound(Boolean.TRUE);
}
}
}
public void setConnection(String unitName) throws IOException {
ConnectionSemaphore semaphore = CONNECTIONS.get(unitName);
if (semaphore.isInProgress()) {
EntityManagerFactory emf = createEntityManagerFactory(unitName);
semaphore.setEmf(emf);
semaphore.setInProgress(Boolean.FALSE);
bindJndiName(semaphore);
} else if (semaphore.getEmf() == null) {
throw new IOException(String.format(
"Connection %s was not in progress", unitName));
} else {
bindJndiName(semaphore);
}
}
/**
* Gets {@link ConnectionSemaphore} from cache, awaits if connection
* instantiation is in progress
*
* @param unitName
* @return {@link ConnectionSemaphore}
* @throws IOException
*/
public static ConnectionSemaphore getConnection(String unitName)
throws IOException {
ConnectionSemaphore semaphore = CONNECTIONS.get(unitName);
if (ObjectUtils.notNull(semaphore)) {
awaitConnection(semaphore);
}
return semaphore;
}
/**
* Gets {@link EntityManagerFactory} from {@link ConnectionSemaphore},
* awaits if connection
*
* @param unitName
* @return {@link EntityManagerFactory}
* @throws IOException
*/
public static EntityManagerFactory getEntityManagerFactory(String unitName)
throws IOException {
EntityManagerFactory emf = null;
ConnectionSemaphore semaphore = CONNECTIONS.get(unitName);
if (ObjectUtils.notNull(semaphore)) {
awaitConnection(semaphore);
emf = semaphore.getEmf();
}
return emf;
}
/**
* Unbinds connection from {@link javax.naming.Context}
*
* @param semaphore
*/
private static void unbindConnection(ConnectionSemaphore semaphore) {
String jndiName = semaphore.getJndiName();
if (ObjectUtils.notNull(jndiName) && semaphore.isBound()) {
NamingUtils namingUtils = new NamingUtils();
try {
Context context = namingUtils.getContext();
String fullJndiName = NamingUtils.createJpaJndiName(jndiName);
if (ObjectUtils.notNull(context.lookup(fullJndiName))) {
context.unbind(fullJndiName);
}
} catch (NamingException ex) {
LOG.error(String.format(
"Could not unbind jndi name %s cause %s", jndiName,
ex.getMessage()), ex);
} catch (IOException ex) {
LOG.error(String.format(
"Could not unbind jndi name %s cause %s", jndiName,
ex.getMessage()), ex);
}
}
}
/**
* Closes connection ({@link EntityManagerFactory}) in passed
* {@link ConnectionSemaphore}
*
* @param semaphore
*/
private static void closeConnection(ConnectionSemaphore semaphore) {
int users = semaphore.decrementUser();
if (users <= 0) {
EntityManagerFactory emf = semaphore.getEmf();
closeEntityManagerFactory(emf);
unbindConnection(semaphore);
CONNECTIONS.remove(semaphore.getUnitName());
String jndiName = semaphore.getJndiName();
if (ObjectUtils.available(jndiName)) {
CONNECTIONS.remove(jndiName);
semaphore.setBound(Boolean.FALSE);
semaphore.setCached(Boolean.FALSE);
}
}
}
/**
* Removes {@link ConnectionSemaphore} from cache and unbinds name from
* {@link javax.naming.Context}
*
* @param unitName
*/
public static void removeConnection(String unitName) {
ConnectionSemaphore semaphore = CONNECTIONS.get(unitName);
if (ObjectUtils.notNull(semaphore)) {
awaitConnection(semaphore);
unbindConnection(semaphore);
closeConnection(semaphore);
}
}
/**
* Closes passed {@link EntityManagerFactory}
*
* @param emf
*/
private static void closeEntityManagerFactory(EntityManagerFactory emf) {
if (ObjectUtils.notNull(emf) && emf.isOpen()) {
emf.close();
}
}
/**
* Closes all existing {@link EntityManagerFactory} instances kept in cache
*/
public static void closeEntityManagerFactories() {
Collection<ConnectionSemaphore> semaphores = CONNECTIONS.values();
EntityManagerFactory emf;
for (ConnectionSemaphore semaphore : semaphores) {
emf = semaphore.getEmf();
closeEntityManagerFactory(emf);
}
CONNECTIONS.clear();
}
/**
* Builder class to create {@link JPAManager} class object
*
* @author Levan
*
*/
public static class Builder {
private JPAManager manager;
public Builder() {
manager = new JPAManager();
manager.scanArchives = Boolean.TRUE;
}
public Builder setClasses(List<String> classes) {
manager.classes = classes;
return this;
}
public Builder setURL(URL url) {
manager.url = url;
return this;
}
public Builder setPath(String path) {
manager.path = path;
return this;
}
public Builder setProperties(Map<Object, Object> properties) {
manager.properties = properties;
return this;
}
public Builder setSwapDataSource(boolean swapDataSource) {
manager.swapDataSource = swapDataSource;
return this;
}
public Builder setScanArchives(boolean scanArchives) {
manager.scanArchives = scanArchives;
return this;
}
public Builder setDataSourcePooledType(boolean dsPooledType) {
JPAManager.pooledDataSource = dsPooledType;
return this;
}
public JPAManager build() {
return manager;
}
}
}
|
package org.osgi.impl.service.application;
import java.io.*;
import java.util.*;
import java.security.*;
import org.osgi.framework.*;
import org.osgi.service.application.*;
import org.osgi.service.application.ApplicationDescriptor.Delegate;
public class ApplicationDescriptorImpl implements Delegate {
private ApplicationDescriptor descriptor;
private boolean locked;
private static Properties locks;
private BundleContext bc;
private Scheduler scheduler;
public ApplicationDescriptorImpl( BundleContext bc, Scheduler scheduler ) {
this.bc = bc;
locks = null;
}
public synchronized void setApplicationDescriptor(ApplicationDescriptor d) {
descriptor = d;
}
public boolean isLocked() {
return doLock(true, false );
}
synchronized boolean doLock(boolean query, boolean newState) {
try {
File f = bc.getDataFile("locks");
if ( locks == null ) {
locks = new Properties();
if ( f.exists() )
locks.load( new FileInputStream(f));
}
boolean current = locks.containsKey(descriptor.getPID());
if ( query || newState == current )
return current;
if ( current )
locks.remove(descriptor.getPID());
else
locks.put(descriptor.getPID(), "locked");
locks.save(new FileOutputStream(f), "Saved " + new Date());
return newState;
} catch( IOException ioe ) {
ioe.printStackTrace();
// TODO log this
}
return false;
}
public void lock() {
doLock(false, true);
}
public void unlock() {
doLock(false, false);
}
public ServiceReference schedule(Map args, String topic, String filter, boolean recurs) {
return scheduler.addScheduledApplication( descriptor.getPID(), args, topic, filter, recurs );
}
public void launch(Map arguments) throws Exception {
AccessController.checkPermission( new ApplicationAdminPermission(
descriptor.getPID(), ApplicationAdminPermission.LAUNCH ) );
Map props = descriptor.getProperties("en");
String isLocked = (String)props.get("application.locked");
if (isLocked != null && isLocked.equalsIgnoreCase("true"))
throw new Exception("Application is locked, can't launch!");
String isLaunchable = (String)props.get("application.launchable");
if (isLaunchable == null || !isLaunchable.equalsIgnoreCase("true"))
throw new Exception("Cannot launch the application!");
String isSingleton = (String)props.get("application.singleton");
if (isSingleton == null || isSingleton.equalsIgnoreCase("true")) {
ServiceReference[] appHandles = bc.getServiceReferences(
"org.osgi.service.application.ApplicationHandle", null);
if (appHandles != null)
for (int k = 0; k != appHandles.length; k++) {
ApplicationHandle handle = (ApplicationHandle) bc
.getService(appHandles[k]);
ApplicationDescriptor appDesc = handle.getApplicationDescriptor();
bc.ungetService(appHandles[k]);
if ( appDesc == descriptor )
throw new Exception("Singleton Exception!");
}
}
}
}
|
package io.yancey.menufetcher;
import io.yancey.menufetcher.data.*;
import io.yancey.menufetcher.fetchers.*;
import java.io.*;
import java.nio.file.*;
import java.time.*;
import java.util.*;
import joptsimple.*;
public class Main {
public static void main(String[] stringArgs) throws IOException {
OptionParser parser = new OptionParser();
OptionSpec<File> basedirOpt = parser.acceptsAll(
Arrays.asList("basedir", "f"),
"The base directory for the webpages and `api` folder")
.withRequiredArg().ofType(File.class)
.defaultsTo(new File("."));
OptionSpec<LocalDate> dateListOpt = parser.acceptsAll(
Arrays.asList("dates", "date", "d"),
"A list of dates to generate")
.withRequiredArg()
.withValuesSeparatedBy(',')
.withValuesConvertedBy(new LocalDateValueConverter());
OptionSpec<LocalDate> startDateOpt = parser.acceptsAll(
Arrays.asList("from", "startDate", "s"),
"The starting date to generate")
.availableUnless(dateListOpt)
.withRequiredArg()
.withValuesConvertedBy(new LocalDateValueConverter())
.defaultsTo(LocalDate.now());
OptionSpec<LocalDate> endDateOpt = parser.acceptsAll(
Arrays.asList("to", "endDate", "e"),
"The ending date to generate")
.availableUnless(dateListOpt)
.withRequiredArg()
.withValuesConvertedBy(new LocalDateValueConverter());
OptionSpec<Integer> numDaysOpt = parser.acceptsAll(
Arrays.asList("numDays", "n"),
"How many days to generate")
.availableUnless(dateListOpt, endDateOpt)
.withRequiredArg()
.ofType(Integer.class)
.defaultsTo(1);
OptionSpec<Void> webOpt = parser.acceptsAll(
Arrays.asList("web", "w"),
"Generate the webpage");
OptionSpec<LocalDate> indexOpt = parser.acceptsAll(
Arrays.asList("index", "i"),
"Generate the index, optionally at a given date")
.availableIf(webOpt)
.withOptionalArg()
.withValuesConvertedBy(new LocalDateValueConverter())
.defaultsTo(LocalDate.now());
OptionSpec<Void> apiOpt = parser.acceptsAll(
Arrays.asList("api", "a"),
"Generate the api");
OptionSpec<Void> helpOpt = parser.acceptsAll(
Arrays.asList("help", "h", "?"),
"Get help")
.forHelp();
OptionSet args;
try {
args = parser.parse(stringArgs);
} catch(OptionException e) {
System.err.println("Error parsing arguments: " + e);
System.err.println();
parser.printHelpOn(System.err);
return;
}
if(args.has(helpOpt)) {
try {
parser.printHelpOn(System.out);
} catch (IOException e) {
throw new RuntimeException(e);
}
return;
}
List<LocalDate> dates = getDates(args, dateListOpt, startDateOpt, endDateOpt, numDaysOpt);
String baseDir = args.valueOf(basedirOpt).getAbsolutePath();
if(args.has(basedirOpt)) Files.createDirectories(args.valueOf(basedirOpt).toPath());
generateStuff(args, dates, baseDir, webOpt, indexOpt, apiOpt);
}
private static void generateStuff(OptionSet args, List<LocalDate> dates, String baseDir,
OptionSpec<Void> webOpt, OptionSpec<LocalDate> indexOpt, OptionSpec<Void> apiOpt) {
List<MenuFetcher> menuFetchers = MenuFetcher.getAllMenuFetchers();
for(LocalDate day: dates) {
List<Menu> menus = MenuFetcher.fetchAllMenus(menuFetchers, day);
if(args.has(webOpt)) {
WebpageCreator.createAndSaveWebpage(baseDir, day, menus);
if(args.has(indexOpt)) {
try {
WebpageCreator.createIndex(baseDir, args.valueOf(indexOpt));
} catch (IOException e) {
System.err.println("error creating index:");
e.printStackTrace();
}
}
}
if(args.has(apiOpt)) {
try {
ApiCreator.createAPI(baseDir, day, menus);
} catch (IOException e) {
System.err.println("error creating api:");
e.printStackTrace();
}
}
}
}
private static List<LocalDate> getDates(OptionSet args,
OptionSpec<LocalDate> dateListOpt,
OptionSpec<LocalDate> startDateOpt, OptionSpec<LocalDate> endDateOpt,
OptionSpec<Integer> numDaysOpt) {
if(args.has(dateListOpt)) {
return args.valuesOf(dateListOpt);
} else {
List<LocalDate> dates = new ArrayList<>();
LocalDate startDate;
if(args.has(startDateOpt)) {
startDate = args.valueOf(startDateOpt);
} else {
startDate = LocalDate.now();
}
if(args.has(endDateOpt)) {
for(LocalDate day = args.valueOf(startDateOpt);
!day.isAfter(args.valueOf(endDateOpt));
day = day.plusDays(1)) {
dates.add(day);
}
} else {
LocalDate day = args.valueOf(startDateOpt);
for(int i = 0; i < args.valueOf(numDaysOpt); i++, day = day.plusDays(1)) {
dates.add(day);
}
}
return dates;
}
}
}
|
package org.lightmare.jpa;
import java.io.IOException;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.apache.log4j.Logger;
import org.lightmare.cache.ConnectionContainer;
import org.lightmare.cache.ConnectionSemaphore;
import org.lightmare.config.Configuration;
import org.lightmare.jndi.JndiManager;
import org.lightmare.jpa.jta.HibernateConfig;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.utils.CollectionUtils;
import org.lightmare.utils.NamingUtils;
import org.lightmare.utils.ObjectUtils;
import org.lightmare.utils.StringUtils;
/**
* Creates and caches {@link EntityManagerFactory} for each EJB bean
* {@link Class}'s appropriate field (annotated by @PersistenceContext)
*
* @author Levan
*
*/
public class JpaManager {
// Entity classes
private List<String> classes;
private String path;
private URL url;
// Overriden properties
private Map<Object, Object> properties;
private boolean swapDataSource;
private boolean scanArchives;
private ClassLoader loader;
private static final String COULD_NOT_BIND_JNDI_ERROR = "could not bind connection";
private static final Logger LOG = Logger.getLogger(JpaManager.class);
private JpaManager() {
}
/**
* Checks if entity persistence.xml {@link URL} is provided
*
* @return boolean
*/
private boolean checkForURL() {
return ObjectUtils.notNull(url) && StringUtils.valid(url.toString());
}
/**
* Checks if entity classes or persistence.xml path are provided
*
* @param classes
* @return boolean
*/
private boolean checkForBuild() {
return CollectionUtils.valid(classes) || StringUtils.valid(path)
|| checkForURL() || swapDataSource || scanArchives;
}
/**
* Added transaction properties for JTA data sources
*/
private void addTransactionManager() {
if (properties == null) {
properties = new HashMap<Object, Object>();
}
HibernateConfig[] hibernateConfigs = HibernateConfig.values();
for (HibernateConfig hibernateConfig : hibernateConfigs) {
properties.put(hibernateConfig.key, hibernateConfig.value);
}
}
/**
* Creates {@link EntityManagerFactory} by hibernate or by extended builder
* {@link Ejb3ConfigurationImpl} if entity classes or persistence.xml file
* path are provided
*
* @see Ejb3ConfigurationImpl#configure(String, Map) and
* Ejb3ConfigurationImpl#createEntityManagerFactory()
*
* @param unitName
* @return {@link EntityManagerFactory}
*/
@SuppressWarnings("deprecation")
private EntityManagerFactory buildEntityManagerFactory(String unitName)
throws IOException {
EntityManagerFactory emf;
Ejb3ConfigurationImpl cfg;
boolean pathCheck = StringUtils.valid(path);
boolean urlCheck = checkForURL();
Ejb3ConfigurationImpl.Builder builder = new Ejb3ConfigurationImpl.Builder();
if (loader == null) {
loader = LibraryLoader.getContextClassLoader();
}
if (CollectionUtils.valid(classes)) {
builder.setClasses(classes);
// Loads entity classes to current ClassLoader instance
LibraryLoader.loadClasses(classes, loader);
}
if (pathCheck || urlCheck) {
Enumeration<URL> xmls;
ConfigLoader configLoader = new ConfigLoader();
if (pathCheck) {
xmls = configLoader.readFile(path);
} else {
xmls = configLoader.readURL(url);
}
builder.setXmls(xmls);
String shortPath = configLoader.getShortPath();
builder.setShortPath(shortPath);
}
builder.setSwapDataSource(swapDataSource);
builder.setScanArchives(scanArchives);
builder.setOverridenClassLoader(loader);
cfg = builder.build();
if (ObjectUtils.notTrue(swapDataSource)) {
addTransactionManager();
}
Ejb3ConfigurationImpl configured = cfg.configure(unitName, properties);
if (ObjectUtils.notNull(configured)) {
emf = configured.buildEntityManagerFactory();
} else {
emf = null;
}
return emf;
}
/**
* Checks if entity classes or persistence.xml file path are provided to
* create {@link EntityManagerFactory}
*
* @see #buildEntityManagerFactory(String, String, Map, List)
*
* @param unitName
* @param properties
* @param path
* @param classes
* @return {@link EntityManagerFactory}
* @throws IOException
*/
private EntityManagerFactory createEntityManagerFactory(String unitName)
throws IOException {
EntityManagerFactory emf;
if (checkForBuild()) {
emf = buildEntityManagerFactory(unitName);
} else if (properties == null) {
emf = Persistence.createEntityManagerFactory(unitName);
} else {
emf = Persistence.createEntityManagerFactory(unitName, properties);
}
return emf;
}
/**
* Binds {@link EntityManagerFactory} to {@link javax.naming.InitialContext}
*
* @param jndiName
* @param unitName
* @param emf
* @throws IOException
*/
private void bindJndiName(ConnectionSemaphore semaphore) throws IOException {
boolean bound = semaphore.isBound();
if (ObjectUtils.notTrue(bound)) {
String jndiName = semaphore.getJndiName();
if (StringUtils.valid(jndiName)) {
JndiManager jndiManager = new JndiManager();
try {
String fullJndiName = NamingUtils
.createJpaJndiName(jndiName);
if (jndiManager.lookup(fullJndiName) == null) {
jndiManager.rebind(fullJndiName, semaphore.getEmf());
}
} catch (IOException ex) {
LOG.error(ex.getMessage(), ex);
String errorMessage = StringUtils.concat(
COULD_NOT_BIND_JNDI_ERROR, semaphore.getUnitName());
throw new IOException(errorMessage, ex);
}
}
}
semaphore.setBound(Boolean.TRUE);
}
/**
* Builds connection, wraps it in {@link ConnectionSemaphore} locks and
* caches appropriate instance
*
* @param unitName
* @throws IOException
*/
public void create(String unitName) throws IOException {
ConnectionSemaphore semaphore = ConnectionContainer
.getSemaphore(unitName);
if (semaphore.isInProgress()) {
EntityManagerFactory emf = createEntityManagerFactory(unitName);
semaphore.setEmf(emf);
semaphore.setInProgress(Boolean.FALSE);
bindJndiName(semaphore);
} else if (semaphore.getEmf() == null) {
String errorMessage = String.format(
"Connection %s was not in progress", unitName);
throw new IOException(errorMessage);
} else {
bindJndiName(semaphore);
}
}
/**
* Closes passed {@link EntityManagerFactory}
*
* @param emf
*/
public static void closeEntityManagerFactory(EntityManagerFactory emf) {
if (ObjectUtils.notNull(emf) && emf.isOpen()) {
emf.close();
}
}
public static void closeEntityManager(EntityManager em) {
if (ObjectUtils.notNull(em) && em.isOpen()) {
em.close();
}
}
/**
* Builder class to create {@link JpaManager} class object
*
* @author Levan
*
*/
public static class Builder {
private JpaManager manager;
public Builder() {
manager = new JpaManager();
manager.scanArchives = Boolean.TRUE;
}
/**
* Sets {@link javax.persistence.Entity} class names to initialize
*
* @param classes
* @return {@link Builder}
*/
public Builder setClasses(List<String> classes) {
manager.classes = classes;
return this;
}
/**
* Sets {@link URL} for persistence.xml file
*
* @param url
* @return {@link Builder}
*/
public Builder setURL(URL url) {
manager.url = url;
return this;
}
/**
* Sets path for persistence.xml file
*
* @param path
* @return {@link Builder}
*/
public Builder setPath(String path) {
manager.path = path;
return this;
}
/**
* Sets additional persistence properties
*
* @param properties
* @return {@link Builder}
*/
public Builder setProperties(Map<Object, Object> properties) {
manager.properties = properties;
return this;
}
/**
* Sets boolean check property to swap jta data source value with non
* jta data source value
*
* @param swapDataSource
* @return {@link Builder}
*/
public Builder setSwapDataSource(boolean swapDataSource) {
manager.swapDataSource = swapDataSource;
return this;
}
/**
* Sets boolean check to scan deployed archive files for
* {@link javax.persistence.Entity} annotated classes
*
* @param scanArchives
* @return {@link Builder}
*/
public Builder setScanArchives(boolean scanArchives) {
manager.scanArchives = scanArchives;
return this;
}
/**
* Sets {@link ClassLoader} for persistence classes
*
* @param loader
* @return {@link Builder}
*/
public Builder setClassLoader(ClassLoader loader) {
manager.loader = loader;
return this;
}
/**
* Sets all parameters from passed {@link Configuration} instance
*
* @param configuration
* @return {@link Builder}
*/
public Builder configure(Configuration configuration) {
setPath(configuration.getPersXmlPath())
.setProperties(configuration.getPersistenceProperties())
.setSwapDataSource(configuration.isSwapDataSource())
.setScanArchives(configuration.isScanArchives());
return this;
}
public JpaManager build() {
return manager;
}
}
}
|
package org.ontoware.rdf2go.impl.jena24;
import org.ontoware.rdf2go.exception.ModelRuntimeException;
import org.ontoware.rdf2go.model.Statement;
import org.ontoware.rdf2go.model.impl.AbstractStatement;
import org.ontoware.rdf2go.model.node.Resource;
import org.ontoware.rdf2go.model.node.URI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Model;
public class StatementJena24Impl extends AbstractStatement implements Statement {
@SuppressWarnings("unused")
private static Logger log = LoggerFactory.getLogger( StatementJena24Impl.class);
private Node s, p, o;
private org.ontoware.rdf2go.model.Model model;
public StatementJena24Impl(org.ontoware.rdf2go.model.Model model, Node s, Node p, Node o) {
this.model = model;
assert s != null;
assert p != null;
assert o != null;
this.s = s;
this.p = p;
this.o = o;
}
public com.hp.hpl.jena.rdf.model.Statement toJenaStatement(Model jenaModel) {
Triple t = new Triple(s, p, o);
return jenaModel.asStatement(t);
}
public Resource getSubject() {
try {
return (Resource) TypeConversion.toRDF2Go(s);
} catch (ModelRuntimeException e) {
throw new ModelRuntimeException(e);
}
}
public URI getPredicate() {
try {
return (URI) TypeConversion.toRDF2Go(p);
} catch (ModelRuntimeException e) {
throw new ModelRuntimeException(e);
}
}
public org.ontoware.rdf2go.model.node.Node getObject() {
try {
return (org.ontoware.rdf2go.model.node.Node) TypeConversion.toRDF2Go(o);
} catch (ModelRuntimeException e) {
throw new ModelRuntimeException(e);
}
}
public String toString() {
return getSubject()+"--"+getPredicate()+"--"+getObject();
}
public org.ontoware.rdf2go.model.Model getModel() {
return this.model;
}
@Override
public boolean equals(Object o)
{
if (o instanceof Statement)
{
Statement stmt = (Statement) o;
boolean e = super.equals(stmt);
log.debug("statements are equal? "+e+" now the context");
if (e && stmt.getContext() != null && this.getModel().getContextURI() != null)
return stmt.getContext().equals(this.getModel().getContextURI());
else return e;
}
return false;
}
public int hashCode() {
return super.hashCode();
}
public URI getContext() {
return this.model.getContextURI();
}
}
|
package javax.jmdns.impl;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.net.SocketException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.Timer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.jmdns.JmDNS;
import javax.jmdns.ServiceEvent;
import javax.jmdns.ServiceInfo;
import javax.jmdns.ServiceInfo.Fields;
import javax.jmdns.ServiceListener;
import javax.jmdns.ServiceTypeListener;
import javax.jmdns.impl.ListenerStatus.ServiceListenerStatus;
import javax.jmdns.impl.ListenerStatus.ServiceTypeListenerStatus;
import javax.jmdns.impl.constants.DNSConstants;
import javax.jmdns.impl.constants.DNSRecordClass;
import javax.jmdns.impl.constants.DNSRecordType;
import javax.jmdns.impl.constants.DNSState;
import javax.jmdns.impl.tasks.DNSTask;
import javax.jmdns.impl.tasks.RecordReaper;
import javax.jmdns.impl.tasks.Responder;
import javax.jmdns.impl.tasks.resolver.ServiceInfoResolver;
import javax.jmdns.impl.tasks.resolver.ServiceResolver;
import javax.jmdns.impl.tasks.resolver.TypeResolver;
import javax.jmdns.impl.tasks.state.Announcer;
import javax.jmdns.impl.tasks.state.Canceler;
import javax.jmdns.impl.tasks.state.Prober;
import javax.jmdns.impl.tasks.state.Renewer;
// REMIND: multiple IP addresses
/**
* mDNS implementation in Java.
*
* @author Arthur van Hoff, Rick Blair, Jeff Sonstein, Werner Randelshofer, Pierre Frisch, Scott Lewis
*/
public class JmDNSImpl extends JmDNS implements DNSStatefulObject {
private static Logger logger = Logger.getLogger(JmDNSImpl.class.getName());
public enum Operation {
Remove, Update, Add, RegisterServiceType, Noop
}
/**
* This is the multicast group, we are listening to for multicast DNS messages.
*/
private volatile InetAddress _group;
/**
* This is our multicast socket.
*/
private volatile MulticastSocket _socket;
/**
* Used to fix live lock problem on unregister.
*/
private volatile boolean _closed = false;
/**
* Holds instances of JmDNS.DNSListener. Must by a synchronized collection, because it is updated from concurrent threads.
*/
private final List<DNSListener> _listeners;
/**
* Holds instances of ServiceListener's. Keys are Strings holding a fully qualified service type. Values are LinkedList's of ServiceListener's.
*/
private final ConcurrentMap<String, List<ServiceListenerStatus>> _serviceListeners;
/**
* Holds instances of ServiceTypeListener's.
*/
private final Set<ServiceTypeListenerStatus> _typeListeners;
/**
* Cache for DNSEntry's.
*/
private final DNSCache _cache;
/**
* This hashtable holds the services that have been registered. Keys are instances of String which hold an all lower-case version of the fully qualified service name. Values are instances of ServiceInfo.
*/
private final ConcurrentMap<String, ServiceInfo> _services;
/**
* This hashtable holds the service types that have been registered or that have been received in an incoming datagram.<br/>
* Keys are instances of String which hold an all lower-case version of the fully qualified service type.<br/>
* Values hold the fully qualified service type.
*/
private final ConcurrentMap<String, ServiceTypeEntry> _serviceTypes;
/**
* This is used to store type entries. The type is stored as a call variable and the map support the subtypes.<br/>
* The key is the lowercase version as the value is the case preserved version.
*/
public static class ServiceTypeEntry extends AbstractMap<String, String> implements Cloneable {
private final Set<Map.Entry<String, String>> _entrySet;
private final String _type;
private static class SubTypeEntry extends SimpleImmutableEntry<String, String> {
private static final long serialVersionUID = 9188503522395855322L;
public SubTypeEntry(String subtype) {
super(subtype.toLowerCase(), subtype);
}
}
public ServiceTypeEntry(String type) {
super();
this._type = type;
this._entrySet = new HashSet<Map.Entry<String, String>>();
}
/**
* The type associated with this entry.
*
* @return the type
*/
public String getType() {
return _type;
}
/*
* (non-Javadoc)
* @see java.util.AbstractMap#entrySet()
*/
@Override
public Set<Map.Entry<String, String>> entrySet() {
return _entrySet;
}
/**
* Returns <code>true</code> if this set contains the specified element. More formally, returns <code>true</code> if and only if this set contains an element <code>e</code> such that
* <code>(o==null ? e==null : o.equals(e))</code>.
*
* @param subtype
* element whose presence in this set is to be tested
* @return <code>true</code> if this set contains the specified element
*/
public boolean contains(String subtype) {
return subtype != null && this.containsKey(subtype.toLowerCase());
}
/**
* Adds the specified element to this set if it is not already present. More formally, adds the specified element <code>e</code> to this set if this set contains no element <code>e2</code> such that
* <code>(e==null ? e2==null : e.equals(e2))</code>. If this set already contains the element, the call leaves the set unchanged and returns <code>false</code>.
*
* @param subtype
* element to be added to this set
* @return <code>true</code> if this set did not already contain the specified element
*/
public boolean add(String subtype) {
if (subtype == null || this.contains(subtype)) {
return false;
}
_entrySet.add(new SubTypeEntry(subtype));
return true;
}
/**
* Returns an iterator over the elements in this set. The elements are returned in no particular order (unless this set is an instance of some class that provides a guarantee).
*
* @return an iterator over the elements in this set
*/
public Iterator<String> iterator() {
return this.keySet().iterator();
}
/*
* (non-Javadoc)
* @see java.util.AbstractMap#toString()
*/
@Override
public String toString() {
final StringBuilder aLog = new StringBuilder(200);
if (this.isEmpty()) {
aLog.append("empty");
} else {
for (String value : this.values()) {
aLog.append(value);
aLog.append(", ");
}
aLog.setLength(aLog.length() - 2);
}
return aLog.toString();
}
}
/**
* This is the shutdown hook, we registered with the java runtime.
*/
protected Thread _shutdown;
/**
* Handle on the local host
*/
private HostInfo _localHost;
private Thread _incomingListener;
/**
* Throttle count. This is used to count the overall number of probes sent by JmDNS. When the last throttle increment happened .
*/
private int _throttle;
/**
* Last throttle increment.
*/
private long _lastThrottleIncrement;
private final ExecutorService _executor = Executors.newSingleThreadExecutor();
// 2009-09-16 ldeck: adding docbug patch with slight ammendments
// 'Fixes two deadlock conditions involving JmDNS.close() - ID: 1473279'
/**
* The timer that triggers our announcements. We can't use the main timer object, because that could cause a deadlock where Prober waits on JmDNS.this lock held by close(), close() waits for us to finish, and we wait for Prober to give us back
* the timer thread so we can announce. (Patch from docbug in 2006-04-19 still wasn't patched .. so I'm doing it!)
*/
// private final Timer _cancelerTimer;
/**
* The timer is used to dispatch all outgoing messages of JmDNS. It is also used to dispatch maintenance tasks for the DNS cache.
*/
private final Timer _timer;
/**
* The timer is used to dispatch maintenance tasks for the DNS cache.
*/
private final Timer _stateTimer;
/**
* The source for random values. This is used to introduce random delays in responses. This reduces the potential for collisions on the network.
*/
private final static Random _random = new Random();
/**
* This lock is used to coordinate processing of incoming and outgoing messages. This is needed, because the Rendezvous Conformance Test does not forgive race conditions.
*/
private final ReentrantLock _ioLock = new ReentrantLock();
/**
* If an incoming package which needs an answer is truncated, we store it here. We add more incoming DNSRecords to it, until the JmDNS.Responder timer picks it up.<br/>
* FIXME [PJYF June 8 2010]: This does not work well with multiple planned answers for packages that came in from different clients.
*/
private DNSIncoming _plannedAnswer;
// State machine
/**
* This hashtable is used to maintain a list of service types being collected by this JmDNS instance. The key of the hashtable is a service type name, the value is an instance of JmDNS.ServiceCollector.
*
* @see #list
*/
private final ConcurrentMap<String, ServiceCollector> _serviceCollectors;
private final String _name;
/**
* Main method to display API information if run from java -jar
* <p>
*
* @param argv
* the command line arguments
*/
public static void main(String[] argv) {
String version = null;
try {
final Properties pomProperties = new Properties();
pomProperties.load(JmDNSImpl.class.getResourceAsStream("/META-INF/maven/javax.jmdns/jmdns/pom.properties"));
version = pomProperties.getProperty("version");
} catch (Exception e) {
version = "RUNNING.IN.IDE.FULL";
}
System.out.println("JmDNS version \"" + version + "\"");
System.out.println(" ");
System.out.println("Running on java version \"" + System.getProperty("java.version") + "\"" + " (build " + System.getProperty("java.runtime.version") + ")" + " from " + System.getProperty("java.vendor"));
System.out.println("Operating environment \"" + System.getProperty("os.name") + "\"" + " version " + System.getProperty("os.version") + " on " + System.getProperty("os.arch"));
System.out.println("For more information on JmDNS please visit https://sourceforge.net/projects/jmdns/");
}
/**
* Create an instance of JmDNS and bind it to a specific network interface given its IP-address.
*
* @param address
* IP address to bind to.
* @param name
* name of the newly created JmDNS
* @throws IOException
*/
public JmDNSImpl(InetAddress address, String name) throws IOException {
super();
if (logger.isLoggable(Level.FINER)) {
logger.finer("JmDNS instance created");
}
_cache = new DNSCache(100);
_listeners = Collections.synchronizedList(new ArrayList<DNSListener>());
_serviceListeners = new ConcurrentHashMap<String, List<ServiceListenerStatus>>();
_typeListeners = Collections.synchronizedSet(new HashSet<ServiceTypeListenerStatus>());
_serviceCollectors = new ConcurrentHashMap<String, ServiceCollector>();
_services = new ConcurrentHashMap<String, ServiceInfo>(20);
_serviceTypes = new ConcurrentHashMap<String, ServiceTypeEntry>(20);
_localHost = HostInfo.newHostInfo(address, this, name);
_name = (name != null ? name : _localHost.getName());
_timer = new Timer("JmDNS(" + _name + ").Timer", true);
_stateTimer = new Timer("JmDNS(" + _name + ").State.Timer", false);
// _cancelerTimer = new Timer("JmDNS.cancelerTimer");
// (ldeck 2.1.1) preventing shutdown blocking thread
// _shutdown = new Thread(new Shutdown(), "JmDNS.Shutdown");
// Runtime.getRuntime().addShutdownHook(_shutdown);
// Bind to multicast socket
this.openMulticastSocket(this.getLocalHost());
this.start(this.getServices().values());
new RecordReaper(this).start(_timer);
}
private void start(Collection<? extends ServiceInfo> serviceInfos) {
if (_incomingListener == null) {
_incomingListener = new SocketListener(this);
_incomingListener.start();
}
this.startProber();
for (ServiceInfo info : serviceInfos) {
try {
this.registerService(new ServiceInfoImpl(info));
} catch (final Exception exception) {
logger.log(Level.WARNING, "start() Registration exception ", exception);
}
}
}
private void openMulticastSocket(HostInfo hostInfo) throws IOException {
if (_group == null) {
_group = InetAddress.getByName(DNSConstants.MDNS_GROUP);
}
if (_socket != null) {
this.closeMulticastSocket();
}
_socket = new MulticastSocket(DNSConstants.MDNS_PORT);
if ((hostInfo != null) && (hostInfo.getInterface() != null)) {
try {
_socket.setNetworkInterface(hostInfo.getInterface());
} catch (SocketException e) {
if (logger.isLoggable(Level.FINE)) {
logger.fine("openMulticastSocket() Set network interface exception: " + e.getMessage());
}
}
}
_socket.setTimeToLive(255);
_socket.joinGroup(_group);
}
private void closeMulticastSocket() {
// jP: 20010-01-18. See below. We'll need this monitor...
// assert (Thread.holdsLock(this));
if (logger.isLoggable(Level.FINER)) {
logger.finer("closeMulticastSocket()");
}
if (_socket != null) {
// close socket
try {
try {
_socket.leaveGroup(_group);
} catch (SocketException exception) {
}
_socket.close();
// jP: 20010-01-18. It isn't safe to join() on the listener
// thread - it attempts to lock the IoLock object, and deadlock
// ensues. Per issue #2933183, changed this to wait on the JmDNS
// monitor, checking on each notify (or timeout) that the
// listener thread has stopped.
while (_incomingListener != null && _incomingListener.isAlive()) {
synchronized (this) {
try {
if (_incomingListener != null && _incomingListener.isAlive()) {
// wait time is arbitrary, we're really expecting notification.
if (logger.isLoggable(Level.FINER)) {
logger.finer("closeMulticastSocket(): waiting for jmDNS monitor");
}
this.wait(1000);
}
} catch (InterruptedException ignored) {
// Ignored
}
}
}
_incomingListener = null;
} catch (final Exception exception) {
logger.log(Level.WARNING, "closeMulticastSocket() Close socket exception ", exception);
}
_socket = null;
}
}
// State machine
/**
* {@inheritDoc}
*/
@Override
public boolean advanceState(DNSTask task) {
return this._localHost.advanceState(task);
}
/**
* {@inheritDoc}
*/
@Override
public boolean revertState() {
return this._localHost.revertState();
}
/**
* {@inheritDoc}
*/
@Override
public boolean cancelState() {
return this._localHost.cancelState();
}
/**
* {@inheritDoc}
*/
@Override
public boolean recoverState() {
return this._localHost.recoverState();
}
/**
* {@inheritDoc}
*/
@Override
public JmDNSImpl getDns() {
return this;
}
/**
* {@inheritDoc}
*/
@Override
public void associateWithTask(DNSTask task, DNSState state) {
this._localHost.associateWithTask(task, state);
}
/**
* {@inheritDoc}
*/
@Override
public void removeAssociationWithTask(DNSTask task) {
this._localHost.removeAssociationWithTask(task);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAssociatedWithTask(DNSTask task, DNSState state) {
return this._localHost.isAssociatedWithTask(task, state);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isProbing() {
return this._localHost.isProbing();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAnnouncing() {
return this._localHost.isAnnouncing();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAnnounced() {
return this._localHost.isAnnounced();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCanceling() {
return this._localHost.isCanceling();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCanceled() {
return this._localHost.isCanceled();
}
/**
* {@inheritDoc}
*/
@Override
public boolean waitForAnnounced(long timeout) {
return this._localHost.waitForAnnounced(timeout);
}
/**
* {@inheritDoc}
*/
@Override
public boolean waitForCanceled(long timeout) {
return this._localHost.waitForCanceled(timeout);
}
/**
* Return the DNSCache associated with the cache variable
*
* @return DNS cache
*/
public DNSCache getCache() {
return _cache;
}
/**
* {@inheritDoc}
*/
@Override
public String getName() {
return _name;
}
/**
* {@inheritDoc}
*/
@Override
public String getHostName() {
return _localHost.getName();
}
/**
* Returns the local host info
*
* @return local host info
*/
public HostInfo getLocalHost() {
return _localHost;
}
/**
* {@inheritDoc}
*/
@Override
public InetAddress getInterface() throws IOException {
return _socket.getInterface();
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name) {
return this.getServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, long timeout) {
return this.getServiceInfo(type, name, false, timeout);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, boolean persistent) {
return this.getServiceInfo(type, name, persistent, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, boolean persistent, long timeout) {
final ServiceInfoImpl info = this.resolveServiceInfo(type, name, "", persistent);
this.waitForInfoData(info, timeout);
return (info.hasData() ? info : null);
}
ServiceInfoImpl resolveServiceInfo(String type, String name, String subtype, boolean persistent) {
this.cleanCache();
String lotype = type.toLowerCase();
this.registerServiceType(lotype);
if (_serviceCollectors.putIfAbsent(lotype, new ServiceCollector(type)) == null) {
this.addServiceListener(lotype, _serviceCollectors.get(lotype), ListenerStatus.SYNCHONEOUS);
}
// Check if the answer is in the cache.
final ServiceInfoImpl info = this.getServiceInfoFromCache(type, name, subtype, persistent);
// We still run the resolver to do the dispatch but if the info is already there it will quit immediately
new ServiceInfoResolver(this, info).start(_timer);
return info;
}
ServiceInfoImpl getServiceInfoFromCache(String type, String name, String subtype, boolean persistent) {
// Check if the answer is in the cache.
ServiceInfoImpl info = new ServiceInfoImpl(type, name, subtype, 0, 0, 0, persistent, (byte[]) null);
DNSEntry pointerEntry = this.getCache().getDNSEntry(new DNSRecord.Pointer(type, DNSRecordClass.CLASS_ANY, false, 0, info.getQualifiedName()));
if (pointerEntry instanceof DNSRecord) {
ServiceInfoImpl cachedInfo = (ServiceInfoImpl) ((DNSRecord) pointerEntry).getServiceInfo(persistent);
if (cachedInfo != null) {
// To get a complete info record we need to retrieve the service, address and the text bytes.
Map<Fields, String> map = cachedInfo.getQualifiedNameMap();
byte[] srvBytes = null;
String server = "";
DNSEntry serviceEntry = this.getCache().getDNSEntry(info.getQualifiedName(), DNSRecordType.TYPE_SRV, DNSRecordClass.CLASS_ANY);
if (serviceEntry instanceof DNSRecord) {
ServiceInfo cachedServiceEntryInfo = ((DNSRecord) serviceEntry).getServiceInfo(persistent);
if (cachedServiceEntryInfo != null) {
cachedInfo = new ServiceInfoImpl(map, cachedServiceEntryInfo.getPort(), cachedServiceEntryInfo.getWeight(), cachedServiceEntryInfo.getPriority(), persistent, (byte[]) null);
srvBytes = cachedServiceEntryInfo.getTextBytes();
server = cachedServiceEntryInfo.getServer();
}
}
DNSEntry addressEntry = this.getCache().getDNSEntry(server, DNSRecordType.TYPE_A, DNSRecordClass.CLASS_ANY);
if (addressEntry instanceof DNSRecord) {
ServiceInfo cachedAddressInfo = ((DNSRecord) addressEntry).getServiceInfo(persistent);
if (cachedAddressInfo != null) {
cachedInfo.setAddress(cachedAddressInfo.getInet4Address());
cachedInfo._setText(cachedAddressInfo.getTextBytes());
}
}
addressEntry = this.getCache().getDNSEntry(server, DNSRecordType.TYPE_AAAA, DNSRecordClass.CLASS_ANY);
if (addressEntry instanceof DNSRecord) {
ServiceInfo cachedAddressInfo = ((DNSRecord) addressEntry).getServiceInfo(persistent);
if (cachedAddressInfo != null) {
cachedInfo.setAddress(cachedAddressInfo.getInet6Address());
cachedInfo._setText(cachedAddressInfo.getTextBytes());
}
}
DNSEntry textEntry = this.getCache().getDNSEntry(cachedInfo.getQualifiedName(), DNSRecordType.TYPE_TXT, DNSRecordClass.CLASS_ANY);
if (textEntry instanceof DNSRecord) {
ServiceInfo cachedTextInfo = ((DNSRecord) textEntry).getServiceInfo(persistent);
if (cachedTextInfo != null) {
cachedInfo._setText(cachedTextInfo.getTextBytes());
}
}
if (cachedInfo.getTextBytes().length == 0) {
cachedInfo._setText(srvBytes);
}
if (cachedInfo.hasData()) {
info = cachedInfo;
}
}
}
return info;
}
private void waitForInfoData(ServiceInfo info, long timeout) {
synchronized (info) {
long loops = (timeout / 200L);
if (loops < 1) {
loops = 1;
}
for (int i = 0; i < loops; i++) {
if (info.hasData()) {
break;
}
try {
info.wait(200);
} catch (final InterruptedException e) {
/* Stub */
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name) {
this.requestServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, boolean persistent) {
this.requestServiceInfo(type, name, persistent, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, long timeout) {
this.requestServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, boolean persistent, long timeout) {
final ServiceInfoImpl info = this.resolveServiceInfo(type, name, "", persistent);
this.waitForInfoData(info, timeout);
}
void handleServiceResolved(ServiceEvent event) {
List<ServiceListenerStatus> list = _serviceListeners.get(event.getType().toLowerCase());
final List<ServiceListenerStatus> listCopy;
if ((list != null) && (!list.isEmpty())) {
if ((event.getInfo() != null) && event.getInfo().hasData()) {
final ServiceEvent localEvent = event;
synchronized (list) {
listCopy = new ArrayList<ServiceListenerStatus>(list);
}
for (final ServiceListenerStatus listener : listCopy) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceResolved(localEvent);
}
});
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void addServiceTypeListener(ServiceTypeListener listener) throws IOException {
ServiceTypeListenerStatus status = new ServiceTypeListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
_typeListeners.add(status);
// report cached service types
for (String type : _serviceTypes.keySet()) {
status.serviceTypeAdded(new ServiceEventImpl(this, type, "", null));
}
new TypeResolver(this).start(_timer);
}
/**
* {@inheritDoc}
*/
@Override
public void removeServiceTypeListener(ServiceTypeListener listener) {
ServiceTypeListenerStatus status = new ServiceTypeListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
_typeListeners.remove(status);
}
/**
* {@inheritDoc}
*/
@Override
public void addServiceListener(String type, ServiceListener listener) {
this.addServiceListener(type, listener, ListenerStatus.ASYNCHONEOUS);
}
private void addServiceListener(String type, ServiceListener listener, boolean synch) {
ServiceListenerStatus status = new ServiceListenerStatus(listener, synch);
final String lotype = type.toLowerCase();
List<ServiceListenerStatus> list = _serviceListeners.get(lotype);
if (list == null) {
if (_serviceListeners.putIfAbsent(lotype, new LinkedList<ServiceListenerStatus>()) == null) {
if (_serviceCollectors.putIfAbsent(lotype, new ServiceCollector(type)) == null) {
// We have a problem here. The service collectors must be called synchronously so that their cache get cleaned up immediately or we will report .
this.addServiceListener(lotype, _serviceCollectors.get(lotype), ListenerStatus.SYNCHONEOUS);
}
}
list = _serviceListeners.get(lotype);
}
if (list != null) {
synchronized (list) {
if (!list.contains(listener)) {
list.add(status);
}
}
}
// report cached service types
final List<ServiceEvent> serviceEvents = new ArrayList<ServiceEvent>();
Collection<DNSEntry> dnsEntryLits = this.getCache().allValues();
for (DNSEntry entry : dnsEntryLits) {
final DNSRecord record = (DNSRecord) entry;
if (record.getRecordType() == DNSRecordType.TYPE_SRV) {
if (record.getKey().endsWith(lotype)) {
// Do not used the record embedded method for generating event this will not work.
// serviceEvents.add(record.getServiceEvent(this));
serviceEvents.add(new ServiceEventImpl(this, type, toUnqualifiedName(type, record.getName()), record.getServiceInfo()));
}
}
}
// Actually call listener with all service events added above
for (ServiceEvent serviceEvent : serviceEvents) {
status.serviceAdded(serviceEvent);
}
// Create/start ServiceResolver
new ServiceResolver(this, type).start(_timer);
}
/**
* {@inheritDoc}
*/
@Override
public void removeServiceListener(String type, ServiceListener listener) {
String lotype = type.toLowerCase();
List<ServiceListenerStatus> list = _serviceListeners.get(lotype);
if (list != null) {
synchronized (list) {
ServiceListenerStatus status = new ServiceListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
list.remove(status);
if (list.isEmpty()) {
_serviceListeners.remove(lotype, list);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void registerService(ServiceInfo infoAbstract) throws IOException {
final ServiceInfoImpl info = (ServiceInfoImpl) infoAbstract;
if ((info.getDns() != null) && (info.getDns() != this)) {
throw new IllegalStateException("This service information is already registered with another DNS.");
}
info.setDns(this);
this.registerServiceType(info.getTypeWithSubtype());
// bind the service to this address
info.setServer(_localHost.getName());
info.setAddress(_localHost.getInet4Address());
info.setAddress(_localHost.getInet6Address());
this.waitForAnnounced(0);
this.makeServiceNameUnique(info);
while (_services.putIfAbsent(info.getKey(), info) != null) {
this.makeServiceNameUnique(info);
}
this.startProber();
info.waitForAnnounced(0);
if (logger.isLoggable(Level.FINE)) {
logger.fine("registerService() JmDNS registered service as " + info);
}
}
/**
* {@inheritDoc}
*/
@Override
public void unregisterService(ServiceInfo infoAbstract) {
final ServiceInfoImpl info = (ServiceInfoImpl) _services.get(infoAbstract.getKey());
if (info != null) {
info.cancelState();
this.startCanceler();
// Remind: We get a deadlock here, if the Canceler does not run!
info.waitForCanceled(0);
_services.remove(info.getKey(), info);
if (logger.isLoggable(Level.FINE)) {
logger.fine("unregisterService() JmDNS unregistered service as " + info);
}
} else {
logger.warning("Removing unregistered service info: " + infoAbstract.getKey());
}
}
/**
* {@inheritDoc}
*/
@Override
public void unregisterAllServices() {
if (logger.isLoggable(Level.FINER)) {
logger.finer("unregisterAllServices()");
}
for (String name : _services.keySet()) {
ServiceInfoImpl info = (ServiceInfoImpl) _services.get(name);
if (info != null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("Cancelling service info: " + info);
}
info.cancelState();
}
}
this.startCanceler();
for (String name : _services.keySet()) {
ServiceInfoImpl info = (ServiceInfoImpl) _services.get(name);
if (info != null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("Wait for service info cancel: " + info);
}
info.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
_services.remove(name, info);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean registerServiceType(String type) {
boolean typeAdded = false;
Map<Fields, String> map = ServiceInfoImpl.decodeQualifiedNameMapForType(type);
String domain = map.get(Fields.Domain);
String protocol = map.get(Fields.Protocol);
String application = map.get(Fields.Application);
String subtype = map.get(Fields.Subtype);
final String name = (application.length() > 0 ? "_" + application + "." : "") + (protocol.length() > 0 ? "_" + protocol + "." : "") + domain + ".";
final String loname = name.toLowerCase();
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + ".registering service type: " + type + " as: " + name + (subtype.length() > 0 ? " subtype: " + subtype : ""));
}
if (!_serviceTypes.containsKey(loname) && !application.equals("dns-sd") && !domain.endsWith("in-addr.arpa") && !domain.endsWith("ip6.arpa")) {
typeAdded = _serviceTypes.putIfAbsent(loname, new ServiceTypeEntry(name)) == null;
if (typeAdded) {
final ServiceTypeListenerStatus[] list = _typeListeners.toArray(new ServiceTypeListenerStatus[_typeListeners.size()]);
final ServiceEvent event = new ServiceEventImpl(this, name, "", null);
for (final ServiceTypeListenerStatus status : list) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
status.serviceTypeAdded(event);
}
});
}
}
}
if (subtype.length() > 0) {
ServiceTypeEntry subtypes = _serviceTypes.get(loname);
if ((subtypes != null) && (!subtypes.contains(subtype))) {
synchronized (subtypes) {
if (!subtypes.contains(subtype)) {
typeAdded = true;
subtypes.add(subtype);
final ServiceTypeListenerStatus[] list = _typeListeners.toArray(new ServiceTypeListenerStatus[_typeListeners.size()]);
final ServiceEvent event = new ServiceEventImpl(this, "_" + subtype + "._sub." + name, "", null);
for (final ServiceTypeListenerStatus status : list) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
status.subTypeForServiceTypeAdded(event);
}
});
}
}
}
}
}
return typeAdded;
}
/**
* Generate a possibly unique name for a service using the information we have in the cache.
*
* @return returns true, if the name of the service info had to be changed.
*/
private boolean makeServiceNameUnique(ServiceInfoImpl info) {
final String originalQualifiedName = info.getKey();
final long now = System.currentTimeMillis();
boolean collision;
do {
collision = false;
// Check for collision in cache
Collection<? extends DNSEntry> entryList = this.getCache().getDNSEntryList(info.getKey());
if (entryList != null) {
for (DNSEntry dnsEntry : entryList) {
if (DNSRecordType.TYPE_SRV.equals(dnsEntry.getRecordType()) && !dnsEntry.isExpired(now)) {
final DNSRecord.Service s = (DNSRecord.Service) dnsEntry;
if (s.getPort() != info.getPort() || !s.getServer().equals(_localHost.getName())) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("makeServiceNameUnique() JmDNS.makeServiceNameUnique srv collision:" + dnsEntry + " s.server=" + s.getServer() + " " + _localHost.getName() + " equals:" + (s.getServer().equals(_localHost.getName())));
}
info.setName(incrementName(info.getName()));
collision = true;
break;
}
}
}
}
// Check for collision with other service infos published by JmDNS
final ServiceInfo selfService = _services.get(info.getKey());
if (selfService != null && selfService != info) {
info.setName(incrementName(info.getName()));
collision = true;
}
}
while (collision);
return !(originalQualifiedName.equals(info.getKey()));
}
String incrementName(String name) {
String aName = name;
try {
final int l = aName.lastIndexOf('(');
final int r = aName.lastIndexOf(')');
if ((l >= 0) && (l < r)) {
aName = aName.substring(0, l) + "(" + (Integer.parseInt(aName.substring(l + 1, r)) + 1) + ")";
} else {
aName += " (2)";
}
} catch (final NumberFormatException e) {
aName += " (2)";
}
return aName;
}
/**
* Add a listener for a question. The listener will receive updates of answers to the question as they arrive, or from the cache if they are already available.
*
* @param listener
* DSN listener
* @param question
* DNS query
*/
public void addListener(DNSListener listener, DNSQuestion question) {
final long now = System.currentTimeMillis();
// add the new listener
_listeners.add(listener);
// report existing matched records
if (question != null) {
Collection<? extends DNSEntry> entryList = this.getCache().getDNSEntryList(question.getName().toLowerCase());
if (entryList != null) {
synchronized (entryList) {
for (DNSEntry dnsEntry : entryList) {
if (question.answeredBy(dnsEntry) && !dnsEntry.isExpired(now)) {
listener.updateRecord(this.getCache(), now, dnsEntry);
}
}
}
}
}
}
/**
* Remove a listener from all outstanding questions. The listener will no longer receive any updates.
*
* @param listener
* DSN listener
*/
public void removeListener(DNSListener listener) {
_listeners.remove(listener);
}
/**
* Renew a service when the record become stale. If there is no service collector for the type this method does nothing.
*
* @param record
* DNS record
*/
public void renewServiceCollector(DNSRecord record) {
ServiceInfo info = record.getServiceInfo();
if (_serviceCollectors.containsKey(info.getType().toLowerCase())) {
// Create/start ServiceResolver
new ServiceResolver(this, info.getType()).start(_timer);
}
}
// Remind: Method updateRecord should receive a better name.
/**
* Notify all listeners that a record was updated.
*
* @param now
* update date
* @param rec
* DNS record
* @param operation
* DNS cache operation
*/
public void updateRecord(long now, DNSRecord rec, Operation operation) {
// We do not want to block the entire DNS while we are updating the record for each listener (service info)
{
List<DNSListener> listenerList = null;
synchronized (_listeners) {
listenerList = new ArrayList<DNSListener>(_listeners);
}
for (DNSListener listener : listenerList) {
listener.updateRecord(this.getCache(), now, rec);
}
}
if (DNSRecordType.TYPE_PTR.equals(rec.getRecordType()))
// if (DNSRecordType.TYPE_PTR.equals(rec.getRecordType()) || DNSRecordType.TYPE_SRV.equals(rec.getRecordType()))
{
ServiceEvent event = rec.getServiceEvent(this);
if ((event.getInfo() == null) || !event.getInfo().hasData()) {
// We do not care about the subtype because the info is only used if complete and the subtype will then be included.
ServiceInfo info = this.getServiceInfoFromCache(event.getType(), event.getName(), "", false);
if (info.hasData()) {
event = new ServiceEventImpl(this, event.getType(), event.getName(), info);
}
}
List<ServiceListenerStatus> list = _serviceListeners.get(event.getType().toLowerCase());
final List<ServiceListenerStatus> serviceListenerList;
if (list != null) {
synchronized (list) {
serviceListenerList = new ArrayList<ServiceListenerStatus>(list);
}
} else {
serviceListenerList = Collections.emptyList();
}
if (logger.isLoggable(Level.FINEST)) {
logger.finest(this.getName() + ".updating record for event: " + event + " list " + serviceListenerList + " operation: " + operation);
}
if (!serviceListenerList.isEmpty()) {
final ServiceEvent localEvent = event;
switch (operation) {
case Add:
for (final ServiceListenerStatus listener : serviceListenerList) {
if (listener.isSynchronous()) {
listener.serviceAdded(localEvent);
} else {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceAdded(localEvent);
}
});
}
}
break;
case Remove:
for (final ServiceListenerStatus listener : serviceListenerList) {
if (listener.isSynchronous()) {
listener.serviceRemoved(localEvent);
} else {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceRemoved(localEvent);
}
});
}
}
break;
default:
break;
}
}
}
}
void handleRecord(DNSRecord record, long now) {
DNSRecord newRecord = record;
Operation cacheOperation = Operation.Noop;
final boolean expired = newRecord.isExpired(now);
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + " handle response: " + newRecord);
}
// update the cache
if (!newRecord.isServicesDiscoveryMetaQuery() && !newRecord.isDomainDiscoveryQuery()) {
final boolean unique = newRecord.isUnique();
final DNSRecord cachedRecord = (DNSRecord) this.getCache().getDNSEntry(newRecord);
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + " handle response cached record: " + cachedRecord);
}
if (unique) {
Collection<? extends DNSEntry> entries = this.getCache().getDNSEntryList(newRecord.getKey());
if (entries != null) {
for (DNSEntry entry : entries) {
if (newRecord.getRecordType().equals(entry.getRecordType()) && newRecord.getRecordClass().equals(entry.getRecordClass()) && (entry != cachedRecord)) {
((DNSRecord) entry).setWillExpireSoon(now);
}
}
}
}
if (cachedRecord != null) {
if (expired) {
// if the record has a 0 ttl that means we have a cancel record we need to delay the removal by 1s
if (newRecord.getTTL() == 0) {
cacheOperation = Operation.Noop;
cachedRecord.setWillExpireSoon(now);
// the actual record will be disposed of by the record reaper.
} else {
cacheOperation = Operation.Remove;
this.getCache().removeDNSEntry(cachedRecord);
}
} else {
// If the record content has changed we need to inform our listeners.
if (!newRecord.sameValue(cachedRecord) || (!newRecord.sameSubtype(cachedRecord) && (newRecord.getSubtype().length() > 0))) {
cacheOperation = Operation.Update;
this.getCache().replaceDNSEntry(newRecord, cachedRecord);
} else {
cachedRecord.resetTTL(newRecord);
newRecord = cachedRecord;
}
}
} else {
if (!expired) {
cacheOperation = Operation.Add;
this.getCache().addDNSEntry(newRecord);
}
}
}
// Register new service types
if (newRecord.getRecordType() == DNSRecordType.TYPE_PTR) {
// handle DNSConstants.DNS_META_QUERY records
boolean typeAdded = false;
if (newRecord.isServicesDiscoveryMetaQuery()) {
// The service names are in the alias.
if (!expired) {
typeAdded = this.registerServiceType(((DNSRecord.Pointer) newRecord).getAlias());
}
return;
}
typeAdded |= this.registerServiceType(newRecord.getName());
if (typeAdded && (cacheOperation == Operation.Noop)) {
cacheOperation = Operation.RegisterServiceType;
}
}
// notify the listeners
if (cacheOperation != Operation.Noop) {
this.updateRecord(now, newRecord, cacheOperation);
}
}
/**
* Handle an incoming response. Cache answers, and pass them on to the appropriate questions.
*
* @throws IOException
*/
void handleResponse(DNSIncoming msg) throws IOException {
final long now = System.currentTimeMillis();
boolean hostConflictDetected = false;
boolean serviceConflictDetected = false;
for (DNSRecord newRecord : msg.getAllAnswers()) {
this.handleRecord(newRecord, now);
if (DNSRecordType.TYPE_A.equals(newRecord.getRecordType()) || DNSRecordType.TYPE_AAAA.equals(newRecord.getRecordType())) {
hostConflictDetected |= newRecord.handleResponse(this);
} else {
serviceConflictDetected |= newRecord.handleResponse(this);
}
}
if (hostConflictDetected || serviceConflictDetected) {
this.startProber();
}
}
/**
* Handle an incoming query. See if we can answer any part of it given our service infos.
*
* @param in
* @param addr
* @param port
* @throws IOException
*/
void handleQuery(DNSIncoming in, InetAddress addr, int port) throws IOException {
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + ".handle query: " + in);
}
// Track known answers
boolean conflictDetected = false;
final long expirationTime = System.currentTimeMillis() + DNSConstants.KNOWN_ANSWER_TTL;
for (DNSRecord answer : in.getAllAnswers()) {
conflictDetected |= answer.handleQuery(this, expirationTime);
}
_ioLock.lock();
try {
if (_plannedAnswer != null) {
_plannedAnswer.append(in);
} else {
if (in.isTruncated()) {
_plannedAnswer = in;
}
new Responder(this, in, port).start(_timer);
}
} finally {
_ioLock.unlock();
}
final long now = System.currentTimeMillis();
for (DNSRecord answer : in.getAnswers()) {
this.handleRecord(answer, now);
}
if (conflictDetected) {
this.startProber();
}
}
public void respondToQuery(DNSIncoming in) {
_ioLock.lock();
try {
if (_plannedAnswer == in) {
_plannedAnswer = null;
}
} finally {
_ioLock.unlock();
}
}
/**
* Add an answer to a question. Deal with the case when the outgoing packet overflows
*
* @param in
* @param addr
* @param port
* @param out
* @param rec
* @return outgoing answer
* @throws IOException
*/
public DNSOutgoing addAnswer(DNSIncoming in, InetAddress addr, int port, DNSOutgoing out, DNSRecord rec) throws IOException {
DNSOutgoing newOut = out;
if (newOut == null) {
newOut = new DNSOutgoing(DNSConstants.FLAGS_QR_RESPONSE | DNSConstants.FLAGS_AA, false, in.getSenderUDPPayload());
}
try {
newOut.addAnswer(in, rec);
} catch (final IOException e) {
newOut.setFlags(newOut.getFlags() | DNSConstants.FLAGS_TC);
newOut.setId(in.getId());
send(newOut);
newOut = new DNSOutgoing(DNSConstants.FLAGS_QR_RESPONSE | DNSConstants.FLAGS_AA, false, in.getSenderUDPPayload());
newOut.addAnswer(in, rec);
}
return newOut;
}
/**
* Send an outgoing multicast DNS message.
*
* @param out
* @throws IOException
*/
public void send(DNSOutgoing out) throws IOException {
if (!out.isEmpty()) {
byte[] message = out.data();
final DatagramPacket packet = new DatagramPacket(message, message.length, _group, DNSConstants.MDNS_PORT);
if (logger.isLoggable(Level.FINEST)) {
try {
final DNSIncoming msg = new DNSIncoming(packet);
if (logger.isLoggable(Level.FINEST)) {
logger.finest("send(" + this.getName() + ") JmDNS out:" + msg.print(true));
}
} catch (final IOException e) {
logger.throwing(getClass().toString(), "send(" + this.getName() + ") - JmDNS can not parse what it sends!!!", e);
}
}
final MulticastSocket ms = _socket;
if (ms != null && !ms.isClosed()) {
ms.send(packet);
}
}
}
public void startProber() {
new Prober(this).start(_stateTimer);
}
public void startAnnouncer() {
new Announcer(this).start(_stateTimer);
}
public void startRenewer() {
new Renewer(this).start(_stateTimer);
}
public void startCanceler() {
new Canceler(this).start(_stateTimer);
}
// REMIND: Why is this not an anonymous inner class?
/**
* Shutdown operations.
*/
protected class Shutdown implements Runnable {
/** {@inheritDoc} */
@Override
public void run() {
try {
_shutdown = null;
close();
} catch (Throwable exception) {
System.err.println("Error while shuting down. " + exception);
}
}
}
/**
* Recover jmdns when there is an error.
*/
public void recover() {
logger.finer("recover()");
// We have an IO error so lets try to recover if anything happens lets close it.
// This should cover the case of the IP address changing under our feet
if (this.isCanceling() || this.isCanceled()) {
return;
}
// Stop JmDNS
// This protects against recursive calls
if (this.cancelState()) {
// Synchronize only if we are not already in process to prevent dead locks
if (logger.isLoggable(Level.FINER)) {
logger.finer("recover() Cleanning up");
}
// Purge the timer
_timer.purge();
// We need to keep a copy for reregistration
final Collection<ServiceInfo> oldServiceInfos = new ArrayList<ServiceInfo>(getServices().values());
// Cancel all services
this.unregisterAllServices();
this.disposeServiceCollectors();
this.waitForCanceled(0);
// Purge the canceler timer
_stateTimer.purge();
// close multicast socket
this.closeMulticastSocket();
this.getCache().clear();
if (logger.isLoggable(Level.FINER)) {
logger.finer("recover() All is clean");
}
// All is clear now start the services
for (ServiceInfo info : oldServiceInfos) {
((ServiceInfoImpl) info).recoverState();
}
this.recoverState();
try {
this.openMulticastSocket(this.getLocalHost());
this.start(oldServiceInfos);
} catch (final Exception exception) {
logger.log(Level.WARNING, "recover() Start services exception ", exception);
}
logger.log(Level.WARNING, "recover() We are back!");
}
}
public void cleanCache() {
long now = System.currentTimeMillis();
for (DNSEntry entry : this.getCache().allValues()) {
try {
DNSRecord record = (DNSRecord) entry;
if (record.isExpired(now)) {
this.updateRecord(now, record, Operation.Remove);
this.getCache().removeDNSEntry(record);
} else if (record.isStale(now)) {
// we should query for the record we care about i.e. those in the service collectors
this.renewServiceCollector(record);
}
} catch (Exception exception) {
logger.log(Level.SEVERE, this.getName() + ".Error while reaping records: " + entry, exception);
logger.severe(this.toString());
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
if (this.isCanceling() || this.isCanceled()) {
return;
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("Cancelling JmDNS: " + this);
}
// Stop JmDNS
// This protects against recursive calls
if (this.cancelState()) {
// We got the tie break now clean up
// Stop the timer
_timer.cancel();
// Cancel all services
this.unregisterAllServices();
this.disposeServiceCollectors();
if (logger.isLoggable(Level.FINER)) {
logger.finer("Wait for JmDNS cancel: " + this);
}
this.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
// Stop the canceler timer
_stateTimer.cancel();
// Stop the executor
_executor.shutdown();
// close socket
this.closeMulticastSocket();
// remove the shutdown hook
if (_shutdown != null) {
Runtime.getRuntime().removeShutdownHook(_shutdown);
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("JmDNS closed.");
}
}
}
/**
* {@inheritDoc}
*/
@Override
@Deprecated
public void printServices() {
System.err.println(toString());
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
final StringBuilder aLog = new StringBuilder(2048);
aLog.append("\t
aLog.append("\n\t");
aLog.append(_localHost);
aLog.append("\n\t
for (String key : _services.keySet()) {
aLog.append("\n\t\tService: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_services.get(key));
}
aLog.append("\n");
aLog.append("\t
for (String key : _serviceTypes.keySet()) {
ServiceTypeEntry subtypes = _serviceTypes.get(key);
aLog.append("\n\t\tType: ");
aLog.append(subtypes.getType());
aLog.append(": ");
aLog.append(subtypes.isEmpty() ? "no subtypes" : subtypes);
}
aLog.append("\n");
aLog.append(_cache.toString());
aLog.append("\n");
aLog.append("\t
for (String key : _serviceCollectors.keySet()) {
aLog.append("\n\t\tService Collector: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_serviceCollectors.get(key));
}
aLog.append("\n");
aLog.append("\t
for (String key : _serviceListeners.keySet()) {
aLog.append("\n\t\tService Listener: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_serviceListeners.get(key));
}
return aLog.toString();
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo[] list(String type) {
return this.list(type, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo[] list(String type, long timeout) {
this.cleanCache();
// Implementation note: The first time a list for a given type is
// requested, a ServiceCollector is created which collects service
// infos. This greatly speeds up the performance of subsequent calls
// to this method. The caveats are, that 1) the first call to this
// method for a given type is slow, and 2) we spawn a ServiceCollector
// instance for each service type which increases network traffic a
// little.
String aType = type.toLowerCase();
boolean newCollectorCreated = false;
if (this.isCanceling() || this.isCanceled()) {
return new ServiceInfo[0];
}
ServiceCollector collector = _serviceCollectors.get(aType);
if (collector == null) {
newCollectorCreated = _serviceCollectors.putIfAbsent(aType, new ServiceCollector(type)) == null;
collector = _serviceCollectors.get(aType);
if (newCollectorCreated) {
this.addServiceListener(aType, collector, ListenerStatus.SYNCHONEOUS);
}
}
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.getName() + ".collector: " + collector);
}
// At this stage the collector should never be null but it keeps findbugs happy.
return (collector != null ? collector.list(timeout) : new ServiceInfo[0]);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, ServiceInfo[]> listBySubtype(String type) {
return this.listBySubtype(type, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, ServiceInfo[]> listBySubtype(String type, long timeout) {
Map<String, List<ServiceInfo>> map = new HashMap<String, List<ServiceInfo>>(5);
for (ServiceInfo info : this.list(type, timeout)) {
String subtype = info.getSubtype().toLowerCase();
if (!map.containsKey(subtype)) {
map.put(subtype, new ArrayList<ServiceInfo>(10));
}
map.get(subtype).add(info);
}
Map<String, ServiceInfo[]> result = new HashMap<String, ServiceInfo[]>(map.size());
for (String subtype : map.keySet()) {
List<ServiceInfo> infoForSubType = map.get(subtype);
result.put(subtype, infoForSubType.toArray(new ServiceInfo[infoForSubType.size()]));
}
return result;
}
private void disposeServiceCollectors() {
if (logger.isLoggable(Level.FINER)) {
logger.finer("disposeServiceCollectors()");
}
for (String type : _serviceCollectors.keySet()) {
ServiceCollector collector = _serviceCollectors.get(type);
if (collector != null) {
this.removeServiceListener(type, collector);
_serviceCollectors.remove(type, collector);
}
}
}
/**
* Instances of ServiceCollector are used internally to speed up the performance of method <code>list(type)</code>.
*
* @see #list
*/
private static class ServiceCollector implements ServiceListener {
// private static Logger logger = Logger.getLogger(ServiceCollector.class.getName());
/**
* A set of collected service instance names.
*/
private final ConcurrentMap<String, ServiceInfo> _infos;
/**
* A set of collected service event waiting to be resolved.
*/
private final ConcurrentMap<String, ServiceEvent> _events;
/**
* This is the type we are listening for (only used for debugging).
*/
private final String _type;
/**
* This is used to force a wait on the first invocation of list.
*/
private volatile boolean _needToWaitForInfos;
public ServiceCollector(String type) {
super();
_infos = new ConcurrentHashMap<String, ServiceInfo>();
_events = new ConcurrentHashMap<String, ServiceEvent>();
_type = type;
_needToWaitForInfos = true;
}
/**
* A service has been added.
*
* @param event
* service event
*/
@Override
public void serviceAdded(ServiceEvent event) {
synchronized (this) {
ServiceInfo info = event.getInfo();
if ((info != null) && (info.hasData())) {
_infos.put(event.getName(), info);
} else {
String subtype = (info != null ? info.getSubtype() : "");
info = ((JmDNSImpl) event.getDNS()).resolveServiceInfo(event.getType(), event.getName(), subtype, true);
if (info != null) {
_infos.put(event.getName(), info);
} else {
_events.put(event.getName(), event);
}
}
}
}
/**
* A service has been removed.
*
* @param event
* service event
*/
@Override
public void serviceRemoved(ServiceEvent event) {
synchronized (this) {
_infos.remove(event.getName());
_events.remove(event.getName());
}
}
/**
* A service has been resolved. Its details are now available in the ServiceInfo record.
*
* @param event
* service event
*/
@Override
public void serviceResolved(ServiceEvent event) {
synchronized (this) {
_infos.put(event.getName(), event.getInfo());
_events.remove(event.getName());
}
}
/**
* Returns an array of all service infos which have been collected by this ServiceCollector.
*
* @param timeout
* timeout if the info list is empty.
* @return Service Info array
*/
public ServiceInfo[] list(long timeout) {
if (_infos.isEmpty() || !_events.isEmpty() || _needToWaitForInfos) {
long loops = (timeout / 200L);
if (loops < 1) {
loops = 1;
}
for (int i = 0; i < loops; i++) {
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
/* Stub */
}
if (_events.isEmpty() && !_infos.isEmpty() && !_needToWaitForInfos) {
break;
}
}
}
_needToWaitForInfos = false;
return _infos.values().toArray(new ServiceInfo[_infos.size()]);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
final StringBuffer aLog = new StringBuffer();
aLog.append("\n\tType: ");
aLog.append(_type);
if (_infos.isEmpty()) {
aLog.append("\n\tNo services collected.");
} else {
aLog.append("\n\tServices");
for (String key : _infos.keySet()) {
aLog.append("\n\t\tService: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_infos.get(key));
}
}
if (_events.isEmpty()) {
aLog.append("\n\tNo event queued.");
} else {
aLog.append("\n\tEvents");
for (String key : _events.keySet()) {
aLog.append("\n\t\tEvent: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_events.get(key));
}
}
return aLog.toString();
}
}
static String toUnqualifiedName(String type, String qualifiedName) {
if (qualifiedName.endsWith(type) && !(qualifiedName.equals(type))) {
return qualifiedName.substring(0, qualifiedName.length() - type.length() - 1);
}
return qualifiedName;
}
public Map<String, ServiceInfo> getServices() {
return _services;
}
public void setLastThrottleIncrement(long lastThrottleIncrement) {
this._lastThrottleIncrement = lastThrottleIncrement;
}
public long getLastThrottleIncrement() {
return _lastThrottleIncrement;
}
public void setThrottle(int throttle) {
this._throttle = throttle;
}
public int getThrottle() {
return _throttle;
}
public static Random getRandom() {
return _random;
}
public void ioLock() {
_ioLock.lock();
}
public void ioUnlock() {
_ioLock.unlock();
}
public void setPlannedAnswer(DNSIncoming plannedAnswer) {
this._plannedAnswer = plannedAnswer;
}
public DNSIncoming getPlannedAnswer() {
return _plannedAnswer;
}
void setLocalHost(HostInfo localHost) {
this._localHost = localHost;
}
public Map<String, ServiceTypeEntry> getServiceTypes() {
return _serviceTypes;
}
public void setClosed(boolean closed) {
this._closed = closed;
}
public boolean isClosed() {
return _closed;
}
public MulticastSocket getSocket() {
return _socket;
}
public InetAddress getGroup() {
return _group;
}
}
|
package com.intellij.ide.actions.searcheverywhere;
import com.intellij.codeInsight.navigation.NavigationUtil;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.actions.CopyReferenceAction;
import com.intellij.ide.actions.GotoClassPresentationUpdater;
import com.intellij.ide.structureView.StructureView;
import com.intellij.ide.structureView.StructureViewBuilder;
import com.intellij.ide.structureView.StructureViewTreeElement;
import com.intellij.ide.util.gotoByName.FilteringGotoByModel;
import com.intellij.ide.util.gotoByName.GotoClassModel2;
import com.intellij.ide.util.gotoByName.GotoClassSymbolConfiguration;
import com.intellij.ide.util.gotoByName.LanguageRef;
import com.intellij.ide.util.treeView.smartTree.TreeElement;
import com.intellij.lang.LanguageStructureViewBuilder;
import com.intellij.lang.PsiStructureViewFactory;
import com.intellij.navigation.AnonymousElementProvider;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiElement;
import com.intellij.psi.codeStyle.MinusculeMatcher;
import com.intellij.psi.codeStyle.NameUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.IdeUICustomization;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Konstantin Bulenkov
*/
public class ClassSearchEverywhereContributor extends AbstractGotoSEContributor {
private static final Pattern ourPatternToDetectAnonymousClasses = Pattern.compile("([.\\w]+)((\\$[\\d]+)*(\\$)?)");
private static final Pattern ourPatternToDetectMembers = Pattern.compile("(.+)(
private final PersistentSearchEverywhereContributorFilter<LanguageRef> myFilter;
public ClassSearchEverywhereContributor(@NotNull AnActionEvent event) {
super(event);
myFilter = createLanguageFilter(event.getRequiredData(CommonDataKeys.PROJECT));
}
@NotNull
@Override
@Nls
public String getGroupName() {
return GotoClassPresentationUpdater.getTabTitlePluralized();
}
@NotNull
@Override
public String getFullGroupName() {
return String.join("/", GotoClassPresentationUpdater.getActionTitlePluralized());
}
@NotNull
@NlsContexts.Checkbox
public String includeNonProjectItemsText() {
return IdeUICustomization.getInstance().projectMessage("checkbox.include.non.project.items");
}
@Override
public int getSortWeight() {
return 100;
}
@NotNull
@Override
protected FilteringGotoByModel<LanguageRef> createModel(@NotNull Project project) {
GotoClassModel2 model = new GotoClassModel2(project);
if (myFilter != null) {
model.setFilterItems(myFilter.getSelectedElements());
}
return model;
}
@Override
protected @Nullable SearchEverywhereCommandInfo getFilterCommand() {
return new SearchEverywhereCommandInfo("c", IdeBundle.message("search.everywhere.filter.classes.description"), this);
}
@NotNull
@Override
public List<AnAction> getActions(@NotNull Runnable onChanged) {
return doGetActions(includeNonProjectItemsText(), myFilter, onChanged);
}
@NotNull
@Override
public String filterControlSymbols(@NotNull String pattern) {
if (pattern.indexOf('
pattern = applyPatternFilter(pattern, ourPatternToDetectMembers);
}
if (pattern.indexOf('$') != -1) {
pattern = applyPatternFilter(pattern, ourPatternToDetectAnonymousClasses);
}
return super.filterControlSymbols(pattern);
}
@Override
public int getElementPriority(@NotNull Object element, @NotNull String searchPattern) {
return super.getElementPriority(element, searchPattern) + 5;
}
@Override
protected PsiElement preparePsi(PsiElement psiElement, int modifiers, String searchText) {
String path = pathToAnonymousClass(searchText);
if (path != null) {
psiElement = getElement(psiElement, path);
}
return super.preparePsi(psiElement, modifiers, searchText);
}
@Nullable
@Override
protected Navigatable createExtendedNavigatable(PsiElement psi, String searchText, int modifiers) {
Navigatable res = super.createExtendedNavigatable(psi, searchText, modifiers);
if (res != null) {
return res;
}
VirtualFile file = PsiUtilCore.getVirtualFile(psi);
String memberName = getMemberName(searchText);
if (file != null && memberName != null) {
Navigatable delegate = findMember(memberName, searchText, psi, file);
if (delegate != null) {
return new Navigatable() {
@Override
public void navigate(boolean requestFocus) {
NavigationUtil.activateFileWithPsiElement(psi, openInCurrentWindow(modifiers));
delegate.navigate(true);
}
@Override
public boolean canNavigate() {
return delegate.canNavigate();
}
@Override
public boolean canNavigateToSource() {
return delegate.canNavigateToSource();
}
};
}
}
return null;
}
private static String pathToAnonymousClass(String searchedText) {
return pathToAnonymousClass(ourPatternToDetectAnonymousClasses.matcher(searchedText));
}
@Nullable
public static String pathToAnonymousClass(Matcher matcher) {
if (matcher.matches()) {
String path = matcher.group(2);
if (path != null) {
path = path.trim();
if (path.endsWith("$") && path.length() >= 2) {
path = path.substring(0, path.length() - 2);
}
if (!path.isEmpty()) return path;
}
}
return null;
}
private static String getMemberName(String searchedText) {
final int index = searchedText.lastIndexOf('
if (index == -1) {
return null;
}
String name = searchedText.substring(index + 1).trim();
return StringUtil.isEmpty(name) ? null : name;
}
@Nullable
public static Navigatable findMember(String memberPattern, String fullPattern, PsiElement psiElement, VirtualFile file) {
final PsiStructureViewFactory factory = LanguageStructureViewBuilder.INSTANCE.forLanguage(psiElement.getLanguage());
final StructureViewBuilder builder = factory == null ? null : factory.getStructureViewBuilder(psiElement.getContainingFile());
final FileEditor[] editors = FileEditorManager.getInstance(psiElement.getProject()).getEditors(file);
if (builder == null || editors.length == 0) {
return null;
}
final StructureView view = builder.createStructureView(editors[0], psiElement.getProject());
try {
final StructureViewTreeElement element = findElement(view.getTreeModel().getRoot(), psiElement, 4);
if (element == null) {
return null;
}
MinusculeMatcher matcher = NameUtil.buildMatcher(memberPattern).build();
int max = Integer.MIN_VALUE;
Object target = null;
for (TreeElement treeElement : element.getChildren()) {
if (treeElement instanceof StructureViewTreeElement) {
Object value = ((StructureViewTreeElement)treeElement).getValue();
if (value instanceof PsiElement && value instanceof Navigatable &&
fullPattern.equals(CopyReferenceAction.elementToFqn((PsiElement)value))) {
return (Navigatable)value;
}
String presentableText = treeElement.getPresentation().getPresentableText();
if (presentableText != null) {
final int degree = matcher.matchingDegree(presentableText);
if (degree > max) {
max = degree;
target = ((StructureViewTreeElement)treeElement).getValue();
}
}
}
}
return target instanceof Navigatable ? (Navigatable)target : null;
}
finally {
Disposer.dispose(view);
}
}
@Nullable
private static StructureViewTreeElement findElement(StructureViewTreeElement node, PsiElement element, int hopes) {
final Object value = node.getValue();
if (value instanceof PsiElement) {
if (((PsiElement)value).isEquivalentTo(element)) return node;
if (hopes != 0) {
for (TreeElement child : node.getChildren()) {
if (child instanceof StructureViewTreeElement) {
final StructureViewTreeElement e = findElement((StructureViewTreeElement)child, element, hopes - 1);
if (e != null) {
return e;
}
}
}
}
}
return null;
}
@NotNull
public static PsiElement getElement(@NotNull PsiElement element, @NotNull String path) {
final String[] classes = path.split("\\$");
List<Integer> indexes = new ArrayList<>();
for (String cls : classes) {
if (cls.isEmpty()) continue;
try {
indexes.add(Integer.parseInt(cls) - 1);
}
catch (Exception e) {
return element;
}
}
PsiElement current = element;
for (int index : indexes) {
final PsiElement[] anonymousClasses = getAnonymousClasses(current);
if (index >= 0 && index < anonymousClasses.length) {
current = anonymousClasses[index];
}
else {
return current;
}
}
return current;
}
private static PsiElement @NotNull [] getAnonymousClasses(@NotNull PsiElement element) {
for (AnonymousElementProvider provider : AnonymousElementProvider.EP_NAME.getExtensionList()) {
final PsiElement[] elements = provider.getAnonymousElements(element);
if (elements.length > 0) {
return elements;
}
}
return PsiElement.EMPTY_ARRAY;
}
public static class Factory implements SearchEverywhereContributorFactory<Object> {
@NotNull
@Override
public SearchEverywhereContributor<Object> createContributor(@NotNull AnActionEvent initEvent) {
return new ClassSearchEverywhereContributor(initEvent);
}
}
@NotNull
static PersistentSearchEverywhereContributorFilter<LanguageRef> createLanguageFilter(@NotNull Project project) {
List<LanguageRef> items = LanguageRef.forAllLanguages();
GotoClassSymbolConfiguration persistentConfig = GotoClassSymbolConfiguration.getInstance(project);
return new PersistentSearchEverywhereContributorFilter<>(items, persistentConfig, LanguageRef::getDisplayName, LanguageRef::getIcon);
}
}
|
package javax.jmdns.impl;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.net.SocketException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.jmdns.JmDNS;
import javax.jmdns.ServiceEvent;
import javax.jmdns.ServiceInfo;
import javax.jmdns.ServiceInfo.Fields;
import javax.jmdns.ServiceListener;
import javax.jmdns.ServiceTypeListener;
import javax.jmdns.impl.ListenerStatus.ServiceListenerStatus;
import javax.jmdns.impl.ListenerStatus.ServiceTypeListenerStatus;
import javax.jmdns.impl.constants.DNSConstants;
import javax.jmdns.impl.constants.DNSRecordClass;
import javax.jmdns.impl.constants.DNSRecordType;
import javax.jmdns.impl.constants.DNSState;
import javax.jmdns.impl.tasks.DNSTask;
// REMIND: multiple IP addresses
/**
* mDNS implementation in Java.
*
* @author Arthur van Hoff, Rick Blair, Jeff Sonstein, Werner Randelshofer, Pierre Frisch, Scott Lewis
*/
public class JmDNSImpl extends JmDNS implements DNSStatefulObject, DNSTaskStarter {
private static Logger logger = Logger.getLogger(JmDNSImpl.class.getName());
public enum Operation {
Remove, Update, Add, RegisterServiceType, Noop
}
/**
* This is the multicast group, we are listening to for multicast DNS messages.
*/
private volatile InetAddress _group;
/**
* This is our multicast socket.
*/
private volatile MulticastSocket _socket;
/**
* Holds instances of JmDNS.DNSListener. Must by a synchronized collection, because it is updated from concurrent threads.
*/
private final List<DNSListener> _listeners;
/**
* Holds instances of ServiceListener's. Keys are Strings holding a fully qualified service type. Values are LinkedList's of ServiceListener's.
*/
private final ConcurrentMap<String, List<ServiceListenerStatus>> _serviceListeners;
/**
* Holds instances of ServiceTypeListener's.
*/
private final Set<ServiceTypeListenerStatus> _typeListeners;
/**
* Cache for DNSEntry's.
*/
private final DNSCache _cache;
/**
* This hashtable holds the services that have been registered. Keys are instances of String which hold an all lower-case version of the fully qualified service name. Values are instances of ServiceInfo.
*/
private final ConcurrentMap<String, ServiceInfo> _services;
/**
* This hashtable holds the service types that have been registered or that have been received in an incoming datagram.<br/>
* Keys are instances of String which hold an all lower-case version of the fully qualified service type.<br/>
* Values hold the fully qualified service type.
*/
private final ConcurrentMap<String, ServiceTypeEntry> _serviceTypes;
private volatile Delegate _delegate;
/**
* This is used to store type entries. The type is stored as a call variable and the map support the subtypes.
* <p>
* The key is the lowercase version as the value is the case preserved version.
* </p>
*/
public static class ServiceTypeEntry extends AbstractMap<String, String> implements Cloneable {
private final Set<Map.Entry<String, String>> _entrySet;
private final String _type;
private static class SubTypeEntry implements Entry<String, String>, java.io.Serializable, Cloneable {
private static final long serialVersionUID = 9188503522395855322L;
private final String _key;
private final String _value;
public SubTypeEntry(String subtype) {
super();
_value = (subtype != null ? subtype : "");
_key = _value.toLowerCase();
}
/**
* {@inheritDoc}
*/
@Override
public String getKey() {
return _key;
}
/**
* {@inheritDoc}
*/
@Override
public String getValue() {
return _value;
}
/**
* Replaces the value corresponding to this entry with the specified value (optional operation). This implementation simply throws <tt>UnsupportedOperationException</tt>, as this class implements an <i>immutable</i> map entry.
*
* @param value
* new value to be stored in this entry
* @return (Does not return)
* @exception UnsupportedOperationException
* always
*/
@Override
public String setValue(String value) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object entry) {
if (!(entry instanceof Map.Entry)) {
return false;
}
return this.getKey().equals(((Map.Entry<?, ?>) entry).getKey()) && this.getValue().equals(((Map.Entry<?, ?>) entry).getValue());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return (_key == null ? 0 : _key.hashCode()) ^ (_value == null ? 0 : _value.hashCode());
}
/*
* (non-Javadoc)
* @see java.lang.Object#clone()
*/
@Override
public SubTypeEntry clone() {
// Immutable object
return this;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return _key + "=" + _value;
}
}
public ServiceTypeEntry(String type) {
super();
this._type = type;
this._entrySet = new HashSet<Map.Entry<String, String>>();
}
/**
* The type associated with this entry.
*
* @return the type
*/
public String getType() {
return _type;
}
/*
* (non-Javadoc)
* @see java.util.AbstractMap#entrySet()
*/
@Override
public Set<Map.Entry<String, String>> entrySet() {
return _entrySet;
}
/**
* Returns <code>true</code> if this set contains the specified element. More formally, returns <code>true</code> if and only if this set contains an element <code>e</code> such that
* <code>(o==null ? e==null : o.equals(e))</code>.
*
* @param subtype
* element whose presence in this set is to be tested
* @return <code>true</code> if this set contains the specified element
*/
public boolean contains(String subtype) {
return subtype != null && this.containsKey(subtype.toLowerCase());
}
/**
* Adds the specified element to this set if it is not already present. More formally, adds the specified element <code>e</code> to this set if this set contains no element <code>e2</code> such that
* <code>(e==null ? e2==null : e.equals(e2))</code>. If this set already contains the element, the call leaves the set unchanged and returns <code>false</code>.
*
* @param subtype
* element to be added to this set
* @return <code>true</code> if this set did not already contain the specified element
*/
public boolean add(String subtype) {
if (subtype == null || this.contains(subtype)) {
return false;
}
_entrySet.add(new SubTypeEntry(subtype));
return true;
}
/**
* Returns an iterator over the elements in this set. The elements are returned in no particular order (unless this set is an instance of some class that provides a guarantee).
*
* @return an iterator over the elements in this set
*/
public Iterator<String> iterator() {
return this.keySet().iterator();
}
/*
* (non-Javadoc)
* @see java.util.AbstractMap#clone()
*/
@Override
public ServiceTypeEntry clone() {
ServiceTypeEntry entry = new ServiceTypeEntry(this.getType());
for (Map.Entry<String, String> subTypeEntry : this.entrySet()) {
entry.add(subTypeEntry.getValue());
}
return entry;
}
/*
* (non-Javadoc)
* @see java.util.AbstractMap#toString()
*/
@Override
public String toString() {
final StringBuilder aLog = new StringBuilder(200);
if (this.isEmpty()) {
aLog.append("empty");
} else {
for (String value : this.values()) {
aLog.append(value);
aLog.append(", ");
}
aLog.setLength(aLog.length() - 2);
}
return aLog.toString();
}
}
/**
* This is the shutdown hook, we registered with the java runtime.
*/
protected Thread _shutdown;
/**
* Handle on the local host
*/
private HostInfo _localHost;
private Thread _incomingListener;
/**
* Throttle count. This is used to count the overall number of probes sent by JmDNS. When the last throttle increment happened .
*/
private int _throttle;
/**
* Last throttle increment.
*/
private long _lastThrottleIncrement;
private final ExecutorService _executor = Executors.newSingleThreadExecutor();
// 2009-09-16 ldeck: adding docbug patch with slight ammendments
// 'Fixes two deadlock conditions involving JmDNS.close() - ID: 1473279'
/**
* The timer that triggers our announcements. We can't use the main timer object, because that could cause a deadlock where Prober waits on JmDNS.this lock held by close(), close() waits for us to finish, and we wait for Prober to give us back
* the timer thread so we can announce. (Patch from docbug in 2006-04-19 still wasn't patched .. so I'm doing it!)
*/
// private final Timer _cancelerTimer;
/**
* The source for random values. This is used to introduce random delays in responses. This reduces the potential for collisions on the network.
*/
private final static Random _random = new Random();
/**
* This lock is used to coordinate processing of incoming and outgoing messages. This is needed, because the Rendezvous Conformance Test does not forgive race conditions.
*/
private final ReentrantLock _ioLock = new ReentrantLock();
/**
* If an incoming package which needs an answer is truncated, we store it here. We add more incoming DNSRecords to it, until the JmDNS.Responder timer picks it up.<br/>
* FIXME [PJYF June 8 2010]: This does not work well with multiple planned answers for packages that came in from different clients.
*/
private DNSIncoming _plannedAnswer;
// State machine
/**
* This hashtable is used to maintain a list of service types being collected by this JmDNS instance. The key of the hashtable is a service type name, the value is an instance of JmDNS.ServiceCollector.
*
* @see #list
*/
private final ConcurrentMap<String, ServiceCollector> _serviceCollectors;
private final String _name;
/**
* Main method to display API information if run from java -jar
*
* @param argv
* the command line arguments
*/
public static void main(String[] argv) {
String version = null;
try {
final Properties pomProperties = new Properties();
pomProperties.load(JmDNSImpl.class.getResourceAsStream("/META-INF/maven/javax.jmdns/jmdns/pom.properties"));
version = pomProperties.getProperty("version");
} catch (Exception e) {
version = "RUNNING.IN.IDE.FULL";
}
System.out.println("JmDNS version \"" + version + "\"");
System.out.println(" ");
System.out.println("Running on java version \"" + System.getProperty("java.version") + "\"" + " (build " + System.getProperty("java.runtime.version") + ")" + " from " + System.getProperty("java.vendor"));
System.out.println("Operating environment \"" + System.getProperty("os.name") + "\"" + " version " + System.getProperty("os.version") + " on " + System.getProperty("os.arch"));
System.out.println("For more information on JmDNS please visit https://sourceforge.net/projects/jmdns/");
}
/**
* Create an instance of JmDNS and bind it to a specific network interface given its IP-address.
*
* @param address
* IP address to bind to.
* @param name
* name of the newly created JmDNS
* @exception IOException
*/
public JmDNSImpl(InetAddress address, String name) throws IOException {
super();
if (logger.isLoggable(Level.FINER)) {
logger.finer("JmDNS instance created");
}
_cache = new DNSCache(100);
_listeners = Collections.synchronizedList(new ArrayList<DNSListener>());
_serviceListeners = new ConcurrentHashMap<String, List<ServiceListenerStatus>>();
_typeListeners = Collections.synchronizedSet(new HashSet<ServiceTypeListenerStatus>());
_serviceCollectors = new ConcurrentHashMap<String, ServiceCollector>();
_services = new ConcurrentHashMap<String, ServiceInfo>(20);
_serviceTypes = new ConcurrentHashMap<String, ServiceTypeEntry>(20);
_localHost = HostInfo.newHostInfo(address, this, name);
_name = (name != null ? name : _localHost.getName());
// _cancelerTimer = new Timer("JmDNS.cancelerTimer");
// (ldeck 2.1.1) preventing shutdown blocking thread
// _shutdown = new Thread(new Shutdown(), "JmDNS.Shutdown");
// Runtime.getRuntime().addShutdownHook(_shutdown);
// Bind to multicast socket
this.openMulticastSocket(this.getLocalHost());
this.start(this.getServices().values());
this.startReaper();
}
private void start(Collection<? extends ServiceInfo> serviceInfos) {
if (_incomingListener == null) {
_incomingListener = new SocketListener(this);
_incomingListener.start();
}
this.startProber();
for (ServiceInfo info : serviceInfos) {
try {
this.registerService(new ServiceInfoImpl(info));
} catch (final Exception exception) {
logger.log(Level.WARNING, "start() Registration exception ", exception);
}
}
}
private void openMulticastSocket(HostInfo hostInfo) throws IOException {
if (_group == null) {
if (hostInfo.getInetAddress() instanceof Inet6Address) {
_group = InetAddress.getByName(DNSConstants.MDNS_GROUP_IPV6);
} else {
_group = InetAddress.getByName(DNSConstants.MDNS_GROUP);
}
}
if (_socket != null) {
this.closeMulticastSocket();
}
// SocketAddress address = new InetSocketAddress((hostInfo != null ? hostInfo.getInetAddress() : null), DNSConstants.MDNS_PORT);
// System.out.println("Socket Address: " + address);
// try {
// _socket = new MulticastSocket(address);
// } catch (Exception exception) {
// logger.log(Level.WARNING, "openMulticastSocket() Open socket exception Address: " + address + ", ", exception);
// // The most likely cause is a duplicate address lets open without specifying the address
// _socket = new MulticastSocket(DNSConstants.MDNS_PORT);
_socket = new MulticastSocket(DNSConstants.MDNS_PORT);
if ((hostInfo != null) && (hostInfo.getInterface() != null)) {
try {
_socket.setNetworkInterface(hostInfo.getInterface());
} catch (SocketException e) {
if (logger.isLoggable(Level.FINE)) {
logger.fine("openMulticastSocket() Set network interface exception: " + e.getMessage());
}
}
}
_socket.setTimeToLive(255);
_socket.joinGroup(_group);
}
private void closeMulticastSocket() {
// jP: 20010-01-18. See below. We'll need this monitor...
// assert (Thread.holdsLock(this));
if (logger.isLoggable(Level.FINER)) {
logger.finer("closeMulticastSocket()");
}
if (_socket != null) {
// close socket
try {
try {
_socket.leaveGroup(_group);
} catch (SocketException exception) {
}
_socket.close();
// jP: 20010-01-18. It isn't safe to join() on the listener
// thread - it attempts to lock the IoLock object, and deadlock
// ensues. Per issue #2933183, changed this to wait on the JmDNS
// monitor, checking on each notify (or timeout) that the
// listener thread has stopped.
while (_incomingListener != null && _incomingListener.isAlive()) {
synchronized (this) {
try {
if (_incomingListener != null && _incomingListener.isAlive()) {
// wait time is arbitrary, we're really expecting notification.
if (logger.isLoggable(Level.FINER)) {
logger.finer("closeMulticastSocket(): waiting for jmDNS monitor");
}
this.wait(1000);
}
} catch (InterruptedException ignored) {
// Ignored
}
}
}
_incomingListener = null;
} catch (final Exception exception) {
logger.log(Level.WARNING, "closeMulticastSocket() Close socket exception ", exception);
}
_socket = null;
}
}
// State machine
/**
* {@inheritDoc}
*/
@Override
public boolean advanceState(DNSTask task) {
return this._localHost.advanceState(task);
}
/**
* {@inheritDoc}
*/
@Override
public boolean revertState() {
return this._localHost.revertState();
}
/**
* {@inheritDoc}
*/
@Override
public boolean cancelState() {
return this._localHost.cancelState();
}
/**
* {@inheritDoc}
*/
@Override
public boolean closeState() {
return this._localHost.closeState();
}
/**
* {@inheritDoc}
*/
@Override
public boolean recoverState() {
return this._localHost.recoverState();
}
/**
* {@inheritDoc}
*/
@Override
public JmDNSImpl getDns() {
return this;
}
/**
* {@inheritDoc}
*/
@Override
public void associateWithTask(DNSTask task, DNSState state) {
this._localHost.associateWithTask(task, state);
}
/**
* {@inheritDoc}
*/
@Override
public void removeAssociationWithTask(DNSTask task) {
this._localHost.removeAssociationWithTask(task);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAssociatedWithTask(DNSTask task, DNSState state) {
return this._localHost.isAssociatedWithTask(task, state);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isProbing() {
return this._localHost.isProbing();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAnnouncing() {
return this._localHost.isAnnouncing();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAnnounced() {
return this._localHost.isAnnounced();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCanceling() {
return this._localHost.isCanceling();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCanceled() {
return this._localHost.isCanceled();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isClosing() {
return this._localHost.isClosing();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isClosed() {
return this._localHost.isClosed();
}
/**
* {@inheritDoc}
*/
@Override
public boolean waitForAnnounced(long timeout) {
return this._localHost.waitForAnnounced(timeout);
}
/**
* {@inheritDoc}
*/
@Override
public boolean waitForCanceled(long timeout) {
return this._localHost.waitForCanceled(timeout);
}
/**
* Return the DNSCache associated with the cache variable
*
* @return DNS cache
*/
public DNSCache getCache() {
return _cache;
}
/**
* {@inheritDoc}
*/
@Override
public String getName() {
return _name;
}
/**
* {@inheritDoc}
*/
@Override
public String getHostName() {
return _localHost.getName();
}
/**
* Returns the local host info
*
* @return local host info
*/
public HostInfo getLocalHost() {
return _localHost;
}
/**
* {@inheritDoc}
*/
@Override
public InetAddress getInetAddress() throws IOException {
return _localHost.getInetAddress();
}
/**
* {@inheritDoc}
*/
@Override
@Deprecated
public InetAddress getInterface() throws IOException {
return _socket.getInterface();
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name) {
return this.getServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, long timeout) {
return this.getServiceInfo(type, name, false, timeout);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, boolean persistent) {
return this.getServiceInfo(type, name, persistent, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo getServiceInfo(String type, String name, boolean persistent, long timeout) {
final ServiceInfoImpl info = this.resolveServiceInfo(type, name, "", persistent);
this.waitForInfoData(info, timeout);
return (info.hasData() ? info : null);
}
ServiceInfoImpl resolveServiceInfo(String type, String name, String subtype, boolean persistent) {
this.cleanCache();
String loType = type.toLowerCase();
this.registerServiceType(type);
if (_serviceCollectors.putIfAbsent(loType, new ServiceCollector(type)) == null) {
this.addServiceListener(loType, _serviceCollectors.get(loType), ListenerStatus.SYNCHONEOUS);
}
// Check if the answer is in the cache.
final ServiceInfoImpl info = this.getServiceInfoFromCache(type, name, subtype, persistent);
// We still run the resolver to do the dispatch but if the info is already there it will quit immediately
this.startServiceInfoResolver(info);
return info;
}
ServiceInfoImpl getServiceInfoFromCache(String type, String name, String subtype, boolean persistent) {
// Check if the answer is in the cache.
ServiceInfoImpl info = new ServiceInfoImpl(type, name, subtype, 0, 0, 0, persistent, (byte[]) null);
DNSEntry pointerEntry = this.getCache().getDNSEntry(new DNSRecord.Pointer(type, DNSRecordClass.CLASS_ANY, false, 0, info.getQualifiedName()));
if (pointerEntry instanceof DNSRecord) {
ServiceInfoImpl cachedInfo = (ServiceInfoImpl) ((DNSRecord) pointerEntry).getServiceInfo(persistent);
if (cachedInfo != null) {
// To get a complete info record we need to retrieve the service, address and the text bytes.
Map<Fields, String> map = cachedInfo.getQualifiedNameMap();
byte[] srvBytes = null;
String server = "";
DNSEntry serviceEntry = this.getCache().getDNSEntry(info.getQualifiedName(), DNSRecordType.TYPE_SRV, DNSRecordClass.CLASS_ANY);
if (serviceEntry instanceof DNSRecord) {
ServiceInfo cachedServiceEntryInfo = ((DNSRecord) serviceEntry).getServiceInfo(persistent);
if (cachedServiceEntryInfo != null) {
cachedInfo = new ServiceInfoImpl(map, cachedServiceEntryInfo.getPort(), cachedServiceEntryInfo.getWeight(), cachedServiceEntryInfo.getPriority(), persistent, (byte[]) null);
srvBytes = cachedServiceEntryInfo.getTextBytes();
server = cachedServiceEntryInfo.getServer();
}
}
DNSEntry addressEntry = this.getCache().getDNSEntry(server, DNSRecordType.TYPE_A, DNSRecordClass.CLASS_ANY);
if (addressEntry instanceof DNSRecord) {
ServiceInfo cachedAddressInfo = ((DNSRecord) addressEntry).getServiceInfo(persistent);
if (cachedAddressInfo != null) {
for (Inet4Address address : cachedAddressInfo.getInet4Addresses()) {
cachedInfo.addAddress(address);
}
cachedInfo._setText(cachedAddressInfo.getTextBytes());
}
}
addressEntry = this.getCache().getDNSEntry(server, DNSRecordType.TYPE_AAAA, DNSRecordClass.CLASS_ANY);
if (addressEntry instanceof DNSRecord) {
ServiceInfo cachedAddressInfo = ((DNSRecord) addressEntry).getServiceInfo(persistent);
if (cachedAddressInfo != null) {
for (Inet6Address address : cachedAddressInfo.getInet6Addresses()) {
cachedInfo.addAddress(address);
}
cachedInfo._setText(cachedAddressInfo.getTextBytes());
}
}
DNSEntry textEntry = this.getCache().getDNSEntry(cachedInfo.getQualifiedName(), DNSRecordType.TYPE_TXT, DNSRecordClass.CLASS_ANY);
if (textEntry instanceof DNSRecord) {
ServiceInfo cachedTextInfo = ((DNSRecord) textEntry).getServiceInfo(persistent);
if (cachedTextInfo != null) {
cachedInfo._setText(cachedTextInfo.getTextBytes());
}
}
if (cachedInfo.getTextBytes().length == 0) {
cachedInfo._setText(srvBytes);
}
if (cachedInfo.hasData()) {
info = cachedInfo;
}
}
}
return info;
}
private void waitForInfoData(ServiceInfo info, long timeout) {
synchronized (info) {
long loops = (timeout / 200L);
if (loops < 1) {
loops = 1;
}
for (int i = 0; i < loops; i++) {
if (info.hasData()) {
break;
}
try {
info.wait(200);
} catch (final InterruptedException e) {
/* Stub */
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name) {
this.requestServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, boolean persistent) {
this.requestServiceInfo(type, name, persistent, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, long timeout) {
this.requestServiceInfo(type, name, false, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public void requestServiceInfo(String type, String name, boolean persistent, long timeout) {
final ServiceInfoImpl info = this.resolveServiceInfo(type, name, "", persistent);
this.waitForInfoData(info, timeout);
}
void handleServiceResolved(ServiceEvent event) {
List<ServiceListenerStatus> list = _serviceListeners.get(event.getType().toLowerCase());
final List<ServiceListenerStatus> listCopy;
if ((list != null) && (!list.isEmpty())) {
if ((event.getInfo() != null) && event.getInfo().hasData()) {
final ServiceEvent localEvent = event;
synchronized (list) {
listCopy = new ArrayList<ServiceListenerStatus>(list);
}
for (final ServiceListenerStatus listener : listCopy) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceResolved(localEvent);
}
});
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void addServiceTypeListener(ServiceTypeListener listener) throws IOException {
ServiceTypeListenerStatus status = new ServiceTypeListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
_typeListeners.add(status);
// report cached service types
for (String type : _serviceTypes.keySet()) {
status.serviceTypeAdded(new ServiceEventImpl(this, type, "", null));
}
this.startTypeResolver();
}
/**
* {@inheritDoc}
*/
@Override
public void removeServiceTypeListener(ServiceTypeListener listener) {
ServiceTypeListenerStatus status = new ServiceTypeListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
_typeListeners.remove(status);
}
/**
* {@inheritDoc}
*/
@Override
public void addServiceListener(String type, ServiceListener listener) {
this.addServiceListener(type, listener, ListenerStatus.ASYNCHONEOUS);
}
private void addServiceListener(String type, ServiceListener listener, boolean synch) {
ServiceListenerStatus status = new ServiceListenerStatus(listener, synch);
final String loType = type.toLowerCase();
List<ServiceListenerStatus> list = _serviceListeners.get(loType);
if (list == null) {
if (_serviceListeners.putIfAbsent(loType, new LinkedList<ServiceListenerStatus>()) == null) {
if (_serviceCollectors.putIfAbsent(loType, new ServiceCollector(type)) == null) {
// We have a problem here. The service collectors must be called synchronously so that their cache get cleaned up immediately or we will report .
this.addServiceListener(loType, _serviceCollectors.get(loType), ListenerStatus.SYNCHONEOUS);
}
}
list = _serviceListeners.get(loType);
}
if (list != null) {
synchronized (list) {
if (!list.contains(listener)) {
list.add(status);
}
}
}
// report cached service types
final List<ServiceEvent> serviceEvents = new ArrayList<ServiceEvent>();
Collection<DNSEntry> dnsEntryLits = this.getCache().allValues();
for (DNSEntry entry : dnsEntryLits) {
final DNSRecord record = (DNSRecord) entry;
if (record.getRecordType() == DNSRecordType.TYPE_SRV) {
if (record.getKey().endsWith(loType)) {
// Do not used the record embedded method for generating event this will not work.
// serviceEvents.add(record.getServiceEvent(this));
serviceEvents.add(new ServiceEventImpl(this, record.getType(), toUnqualifiedName(record.getType(), record.getName()), record.getServiceInfo()));
}
}
}
// Actually call listener with all service events added above
for (ServiceEvent serviceEvent : serviceEvents) {
status.serviceAdded(serviceEvent);
}
// Create/start ServiceResolver
this.startServiceResolver(type);
}
/**
* {@inheritDoc}
*/
@Override
public void removeServiceListener(String type, ServiceListener listener) {
String loType = type.toLowerCase();
List<ServiceListenerStatus> list = _serviceListeners.get(loType);
if (list != null) {
synchronized (list) {
ServiceListenerStatus status = new ServiceListenerStatus(listener, ListenerStatus.ASYNCHONEOUS);
list.remove(status);
if (list.isEmpty()) {
_serviceListeners.remove(loType, list);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void registerService(ServiceInfo infoAbstract) throws IOException {
if (this.isClosing() || this.isClosed()) {
throw new IllegalStateException("This DNS is closed.");
}
final ServiceInfoImpl info = (ServiceInfoImpl) infoAbstract;
if (info.getDns() != null) {
if (info.getDns() != this) {
throw new IllegalStateException("A service information can only be registered with a single instamce of JmDNS.");
} else if (_services.get(info.getKey()) != null) {
throw new IllegalStateException("A service information can only be registered once.");
}
}
info.setDns(this);
this.registerServiceType(info.getTypeWithSubtype());
// bind the service to this address
info.recoverState();
info.setServer(_localHost.getName());
info.addAddress(_localHost.getInet4Address());
info.addAddress(_localHost.getInet6Address());
this.waitForAnnounced(DNSConstants.SERVICE_INFO_TIMEOUT);
this.makeServiceNameUnique(info);
while (_services.putIfAbsent(info.getKey(), info) != null) {
this.makeServiceNameUnique(info);
}
this.startProber();
info.waitForAnnounced(DNSConstants.SERVICE_INFO_TIMEOUT);
if (logger.isLoggable(Level.FINE)) {
logger.fine("registerService() JmDNS registered service as " + info);
}
}
/**
* {@inheritDoc}
*/
@Override
public void unregisterService(ServiceInfo infoAbstract) {
final ServiceInfoImpl info = (ServiceInfoImpl) _services.get(infoAbstract.getKey());
if (info != null) {
info.cancelState();
this.startCanceler();
info.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
_services.remove(info.getKey(), info);
if (logger.isLoggable(Level.FINE)) {
logger.fine("unregisterService() JmDNS " + this.getName() + " unregistered service as " + info);
}
} else {
logger.warning(this.getName() + " removing unregistered service info: " + infoAbstract.getKey());
}
}
/**
* {@inheritDoc}
*/
@Override
public void unregisterAllServices() {
if (logger.isLoggable(Level.FINER)) {
logger.finer("unregisterAllServices()");
}
for (String name : _services.keySet()) {
ServiceInfoImpl info = (ServiceInfoImpl) _services.get(name);
if (info != null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("Cancelling service info: " + info);
}
info.cancelState();
}
}
this.startCanceler();
for (String name : _services.keySet()) {
ServiceInfoImpl info = (ServiceInfoImpl) _services.get(name);
if (info != null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("Wait for service info cancel: " + info);
}
info.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
_services.remove(name, info);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean registerServiceType(String type) {
boolean typeAdded = false;
Map<Fields, String> map = ServiceInfoImpl.decodeQualifiedNameMapForType(type);
String domain = map.get(Fields.Domain);
String protocol = map.get(Fields.Protocol);
String application = map.get(Fields.Application);
String subtype = map.get(Fields.Subtype);
final String name = (application.length() > 0 ? "_" + application + "." : "") + (protocol.length() > 0 ? "_" + protocol + "." : "") + domain + ".";
final String loname = name.toLowerCase();
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + ".registering service type: " + type + " as: " + name + (subtype.length() > 0 ? " subtype: " + subtype : ""));
}
if (!_serviceTypes.containsKey(loname) && !application.toLowerCase().equals("dns-sd") && !domain.toLowerCase().endsWith("in-addr.arpa") && !domain.toLowerCase().endsWith("ip6.arpa")) {
typeAdded = _serviceTypes.putIfAbsent(loname, new ServiceTypeEntry(name)) == null;
if (typeAdded) {
final ServiceTypeListenerStatus[] list = _typeListeners.toArray(new ServiceTypeListenerStatus[_typeListeners.size()]);
final ServiceEvent event = new ServiceEventImpl(this, name, "", null);
for (final ServiceTypeListenerStatus status : list) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
status.serviceTypeAdded(event);
}
});
}
}
}
if (subtype.length() > 0) {
ServiceTypeEntry subtypes = _serviceTypes.get(loname);
if ((subtypes != null) && (!subtypes.contains(subtype))) {
synchronized (subtypes) {
if (!subtypes.contains(subtype)) {
typeAdded = true;
subtypes.add(subtype);
final ServiceTypeListenerStatus[] list = _typeListeners.toArray(new ServiceTypeListenerStatus[_typeListeners.size()]);
final ServiceEvent event = new ServiceEventImpl(this, "_" + subtype + "._sub." + name, "", null);
for (final ServiceTypeListenerStatus status : list) {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
status.subTypeForServiceTypeAdded(event);
}
});
}
}
}
}
}
return typeAdded;
}
/**
* Generate a possibly unique name for a service using the information we have in the cache.
*
* @return returns true, if the name of the service info had to be changed.
*/
private boolean makeServiceNameUnique(ServiceInfoImpl info) {
final String originalQualifiedName = info.getKey();
final long now = System.currentTimeMillis();
boolean collision;
do {
collision = false;
// Check for collision in cache
for (DNSEntry dnsEntry : this.getCache().getDNSEntryList(info.getKey())) {
if (DNSRecordType.TYPE_SRV.equals(dnsEntry.getRecordType()) && !dnsEntry.isExpired(now)) {
final DNSRecord.Service s = (DNSRecord.Service) dnsEntry;
if (s.getPort() != info.getPort() || !s.getServer().equals(_localHost.getName())) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("makeServiceNameUnique() JmDNS.makeServiceNameUnique srv collision:" + dnsEntry + " s.server=" + s.getServer() + " " + _localHost.getName() + " equals:" + (s.getServer().equals(_localHost.getName())));
}
info.setName(incrementName(info.getName()));
collision = true;
break;
}
}
}
// Check for collision with other service infos published by JmDNS
final ServiceInfo selfService = _services.get(info.getKey());
if (selfService != null && selfService != info) {
info.setName(incrementName(info.getName()));
collision = true;
}
}
while (collision);
return !(originalQualifiedName.equals(info.getKey()));
}
String incrementName(String name) {
String aName = name;
try {
final int l = aName.lastIndexOf('(');
final int r = aName.lastIndexOf(')');
if ((l >= 0) && (l < r)) {
aName = aName.substring(0, l) + "(" + (Integer.parseInt(aName.substring(l + 1, r)) + 1) + ")";
} else {
aName += " (2)";
}
} catch (final NumberFormatException e) {
aName += " (2)";
}
return aName;
}
/**
* Add a listener for a question. The listener will receive updates of answers to the question as they arrive, or from the cache if they are already available.
*
* @param listener
* DSN listener
* @param question
* DNS query
*/
public void addListener(DNSListener listener, DNSQuestion question) {
final long now = System.currentTimeMillis();
// add the new listener
_listeners.add(listener);
// report existing matched records
if (question != null) {
for (DNSEntry dnsEntry : this.getCache().getDNSEntryList(question.getName().toLowerCase())) {
if (question.answeredBy(dnsEntry) && !dnsEntry.isExpired(now)) {
listener.updateRecord(this.getCache(), now, dnsEntry);
}
}
}
}
/**
* Remove a listener from all outstanding questions. The listener will no longer receive any updates.
*
* @param listener
* DSN listener
*/
public void removeListener(DNSListener listener) {
_listeners.remove(listener);
}
/**
* Renew a service when the record become stale. If there is no service collector for the type this method does nothing.
*
* @param record
* DNS record
*/
public void renewServiceCollector(DNSRecord record) {
ServiceInfo info = record.getServiceInfo();
if (_serviceCollectors.containsKey(info.getType().toLowerCase())) {
// Create/start ServiceResolver
this.startServiceResolver(info.getType());
}
}
// Remind: Method updateRecord should receive a better name.
/**
* Notify all listeners that a record was updated.
*
* @param now
* update date
* @param rec
* DNS record
* @param operation
* DNS cache operation
*/
public void updateRecord(long now, DNSRecord rec, Operation operation) {
// We do not want to block the entire DNS while we are updating the record for each listener (service info)
{
List<DNSListener> listenerList = null;
synchronized (_listeners) {
listenerList = new ArrayList<DNSListener>(_listeners);
}
for (DNSListener listener : listenerList) {
listener.updateRecord(this.getCache(), now, rec);
}
}
if (DNSRecordType.TYPE_PTR.equals(rec.getRecordType()))
// if (DNSRecordType.TYPE_PTR.equals(rec.getRecordType()) || DNSRecordType.TYPE_SRV.equals(rec.getRecordType()))
{
ServiceEvent event = rec.getServiceEvent(this);
if ((event.getInfo() == null) || !event.getInfo().hasData()) {
// We do not care about the subtype because the info is only used if complete and the subtype will then be included.
ServiceInfo info = this.getServiceInfoFromCache(event.getType(), event.getName(), "", false);
if (info.hasData()) {
event = new ServiceEventImpl(this, event.getType(), event.getName(), info);
}
}
List<ServiceListenerStatus> list = _serviceListeners.get(event.getType().toLowerCase());
final List<ServiceListenerStatus> serviceListenerList;
if (list != null) {
synchronized (list) {
serviceListenerList = new ArrayList<ServiceListenerStatus>(list);
}
} else {
serviceListenerList = Collections.emptyList();
}
if (logger.isLoggable(Level.FINEST)) {
logger.finest(this.getName() + ".updating record for event: " + event + " list " + serviceListenerList + " operation: " + operation);
}
if (!serviceListenerList.isEmpty()) {
final ServiceEvent localEvent = event;
switch (operation) {
case Add:
for (final ServiceListenerStatus listener : serviceListenerList) {
if (listener.isSynchronous()) {
listener.serviceAdded(localEvent);
} else {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceAdded(localEvent);
}
});
}
}
break;
case Remove:
for (final ServiceListenerStatus listener : serviceListenerList) {
if (listener.isSynchronous()) {
listener.serviceRemoved(localEvent);
} else {
_executor.submit(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
listener.serviceRemoved(localEvent);
}
});
}
}
break;
default:
break;
}
}
}
}
void handleRecord(DNSRecord record, long now) {
DNSRecord newRecord = record;
Operation cacheOperation = Operation.Noop;
final boolean expired = newRecord.isExpired(now);
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + " handle response: " + newRecord);
}
// update the cache
if (!newRecord.isServicesDiscoveryMetaQuery() && !newRecord.isDomainDiscoveryQuery()) {
final boolean unique = newRecord.isUnique();
final DNSRecord cachedRecord = (DNSRecord) this.getCache().getDNSEntry(newRecord);
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + " handle response cached record: " + cachedRecord);
}
if (unique) {
for (DNSEntry entry : this.getCache().getDNSEntryList(newRecord.getKey())) {
if (newRecord.getRecordType().equals(entry.getRecordType()) && newRecord.getRecordClass().equals(entry.getRecordClass()) && (entry != cachedRecord)) {
((DNSRecord) entry).setWillExpireSoon(now);
}
}
}
if (cachedRecord != null) {
if (expired) {
// if the record has a 0 ttl that means we have a cancel record we need to delay the removal by 1s
if (newRecord.getTTL() == 0) {
cacheOperation = Operation.Noop;
cachedRecord.setWillExpireSoon(now);
// the actual record will be disposed of by the record reaper.
} else {
cacheOperation = Operation.Remove;
this.getCache().removeDNSEntry(cachedRecord);
}
} else {
// If the record content has changed we need to inform our listeners.
if (!newRecord.sameValue(cachedRecord) || (!newRecord.sameSubtype(cachedRecord) && (newRecord.getSubtype().length() > 0))) {
if (newRecord.isSingleValued()) {
cacheOperation = Operation.Update;
this.getCache().replaceDNSEntry(newRecord, cachedRecord);
} else {
// Address record can have more than one value on multi-homed machines
cacheOperation = Operation.Add;
this.getCache().addDNSEntry(newRecord);
}
} else {
cachedRecord.resetTTL(newRecord);
newRecord = cachedRecord;
}
}
} else {
if (!expired) {
cacheOperation = Operation.Add;
this.getCache().addDNSEntry(newRecord);
}
}
}
// Register new service types
if (newRecord.getRecordType() == DNSRecordType.TYPE_PTR) {
// handle DNSConstants.DNS_META_QUERY records
boolean typeAdded = false;
if (newRecord.isServicesDiscoveryMetaQuery()) {
// The service names are in the alias.
if (!expired) {
typeAdded = this.registerServiceType(((DNSRecord.Pointer) newRecord).getAlias());
}
return;
}
typeAdded |= this.registerServiceType(newRecord.getName());
if (typeAdded && (cacheOperation == Operation.Noop)) {
cacheOperation = Operation.RegisterServiceType;
}
}
// notify the listeners
if (cacheOperation != Operation.Noop) {
this.updateRecord(now, newRecord, cacheOperation);
}
}
/**
* Handle an incoming response. Cache answers, and pass them on to the appropriate questions.
*
* @exception IOException
*/
void handleResponse(DNSIncoming msg) throws IOException {
final long now = System.currentTimeMillis();
boolean hostConflictDetected = false;
boolean serviceConflictDetected = false;
for (DNSRecord newRecord : msg.getAllAnswers()) {
this.handleRecord(newRecord, now);
if (DNSRecordType.TYPE_A.equals(newRecord.getRecordType()) || DNSRecordType.TYPE_AAAA.equals(newRecord.getRecordType())) {
hostConflictDetected |= newRecord.handleResponse(this);
} else {
serviceConflictDetected |= newRecord.handleResponse(this);
}
}
if (hostConflictDetected || serviceConflictDetected) {
this.startProber();
}
}
/**
* Handle an incoming query. See if we can answer any part of it given our service infos.
*
* @param in
* @param addr
* @param port
* @exception IOException
*/
void handleQuery(DNSIncoming in, InetAddress addr, int port) throws IOException {
if (logger.isLoggable(Level.FINE)) {
logger.fine(this.getName() + ".handle query: " + in);
}
// Track known answers
boolean conflictDetected = false;
final long expirationTime = System.currentTimeMillis() + DNSConstants.KNOWN_ANSWER_TTL;
for (DNSRecord answer : in.getAllAnswers()) {
conflictDetected |= answer.handleQuery(this, expirationTime);
}
this.ioLock();
try {
if (_plannedAnswer != null) {
_plannedAnswer.append(in);
} else {
DNSIncoming plannedAnswer = in.clone();
if (in.isTruncated()) {
_plannedAnswer = plannedAnswer;
}
this.startResponder(plannedAnswer, port);
}
} finally {
this.ioUnlock();
}
final long now = System.currentTimeMillis();
for (DNSRecord answer : in.getAnswers()) {
this.handleRecord(answer, now);
}
if (conflictDetected) {
this.startProber();
}
}
public void respondToQuery(DNSIncoming in) {
this.ioLock();
try {
if (_plannedAnswer == in) {
_plannedAnswer = null;
}
} finally {
this.ioUnlock();
}
}
/**
* Add an answer to a question. Deal with the case when the outgoing packet overflows
*
* @param in
* @param addr
* @param port
* @param out
* @param rec
* @return outgoing answer
* @exception IOException
*/
public DNSOutgoing addAnswer(DNSIncoming in, InetAddress addr, int port, DNSOutgoing out, DNSRecord rec) throws IOException {
DNSOutgoing newOut = out;
if (newOut == null) {
newOut = new DNSOutgoing(DNSConstants.FLAGS_QR_RESPONSE | DNSConstants.FLAGS_AA, false, in.getSenderUDPPayload());
}
try {
newOut.addAnswer(in, rec);
} catch (final IOException e) {
newOut.setFlags(newOut.getFlags() | DNSConstants.FLAGS_TC);
newOut.setId(in.getId());
send(newOut);
newOut = new DNSOutgoing(DNSConstants.FLAGS_QR_RESPONSE | DNSConstants.FLAGS_AA, false, in.getSenderUDPPayload());
newOut.addAnswer(in, rec);
}
return newOut;
}
/**
* Send an outgoing multicast DNS message.
*
* @param out
* @exception IOException
*/
public void send(DNSOutgoing out) throws IOException {
if (!out.isEmpty()) {
byte[] message = out.data();
final DatagramPacket packet = new DatagramPacket(message, message.length, _group, DNSConstants.MDNS_PORT);
if (logger.isLoggable(Level.FINEST)) {
try {
final DNSIncoming msg = new DNSIncoming(packet);
if (logger.isLoggable(Level.FINEST)) {
logger.finest("send(" + this.getName() + ") JmDNS out:" + msg.print(true));
}
} catch (final IOException e) {
logger.throwing(getClass().toString(), "send(" + this.getName() + ") - JmDNS can not parse what it sends!!!", e);
}
}
final MulticastSocket ms = _socket;
if (ms != null && !ms.isClosed()) {
ms.send(packet);
}
}
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#purgeTimer()
*/
@Override
public void purgeTimer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).purgeTimer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#purgeStateTimer()
*/
@Override
public void purgeStateTimer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).purgeStateTimer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#cancelTimer()
*/
@Override
public void cancelTimer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).cancelTimer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#cancelStateTimer()
*/
@Override
public void cancelStateTimer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).cancelStateTimer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startProber()
*/
@Override
public void startProber() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startProber();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startAnnouncer()
*/
@Override
public void startAnnouncer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startAnnouncer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startRenewer()
*/
@Override
public void startRenewer() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startRenewer();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startCanceler()
*/
@Override
public void startCanceler() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startCanceler();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startReaper()
*/
@Override
public void startReaper() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startReaper();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startServiceInfoResolver(javax.jmdns.impl.ServiceInfoImpl)
*/
@Override
public void startServiceInfoResolver(ServiceInfoImpl info) {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startServiceInfoResolver(info);
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startTypeResolver()
*/
@Override
public void startTypeResolver() {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startTypeResolver();
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startServiceResolver(java.lang.String)
*/
@Override
public void startServiceResolver(String type) {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startServiceResolver(type);
}
/*
* (non-Javadoc)
* @see javax.jmdns.impl.DNSTaskStarter#startResponder(javax.jmdns.impl.DNSIncoming, int)
*/
@Override
public void startResponder(DNSIncoming in, int port) {
DNSTaskStarter.Factory.getInstance().getStarter(this.getDns()).startResponder(in, port);
}
// REMIND: Why is this not an anonymous inner class?
/**
* Shutdown operations.
*/
protected class Shutdown implements Runnable {
/** {@inheritDoc} */
@Override
public void run() {
try {
_shutdown = null;
close();
} catch (Throwable exception) {
System.err.println("Error while shuting down. " + exception);
}
}
}
private final Object _recoverLock = new Object();
/**
* Recover jmdns when there is an error.
*/
public void recover() {
logger.finer(this.getName() + "recover()");
// We have an IO error so lets try to recover if anything happens lets close it.
// This should cover the case of the IP address changing under our feet
if (this.isClosing() || this.isClosed() || this.isCanceling() || this.isCanceled()) {
return;
}
// We need some definite lock here as we may have multiple timer running in the same thread that will not be stopped by the reentrant lock
// in the state object. This is only a problem in this case as we are going to execute in seperate thread so that the timer can clear.
synchronized (_recoverLock) {
// Stop JmDNS
// This protects against recursive calls
if (this.cancelState()) {
logger.finer(this.getName() + "recover() thread " + Thread.currentThread().getName());
Thread recover = new Thread(this.getName() + ".recover()") {
/**
* {@inheritDoc}
*/
@Override
public void run() {
__recover();
}
};
recover.start();
}
}
}
void __recover() {
// Synchronize only if we are not already in process to prevent dead locks
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.getName() + "recover() Cleanning up");
}
logger.warning("RECOVERING");
// Purge the timer
this.purgeTimer();
// We need to keep a copy for reregistration
final Collection<ServiceInfo> oldServiceInfos = new ArrayList<ServiceInfo>(getServices().values());
// Cancel all services
this.unregisterAllServices();
this.disposeServiceCollectors();
this.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
// Purge the canceler timer
this.purgeStateTimer();
// close multicast socket
this.closeMulticastSocket();
this.getCache().clear();
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.getName() + "recover() All is clean");
}
if (this.isCanceled()) {
// All is clear now start the services
for (ServiceInfo info : oldServiceInfos) {
((ServiceInfoImpl) info).recoverState();
}
this.recoverState();
try {
this.openMulticastSocket(this.getLocalHost());
this.start(oldServiceInfos);
} catch (final Exception exception) {
logger.log(Level.WARNING, this.getName() + "recover() Start services exception ", exception);
}
logger.log(Level.WARNING, this.getName() + "recover() We are back!");
} else {
// We have a problem. We could not clear the state.
logger.log(Level.WARNING, this.getName() + "recover() Could not recover we are Down!");
if (this.getDelegate() != null) {
this.getDelegate().cannotRecoverFromIOError(this.getDns(), oldServiceInfos);
}
}
}
public void cleanCache() {
long now = System.currentTimeMillis();
for (DNSEntry entry : this.getCache().allValues()) {
try {
DNSRecord record = (DNSRecord) entry;
if (record.isExpired(now)) {
this.updateRecord(now, record, Operation.Remove);
this.getCache().removeDNSEntry(record);
} else if (record.isStale(now)) {
// we should query for the record we care about i.e. those in the service collectors
this.renewServiceCollector(record);
}
} catch (Exception exception) {
logger.log(Level.SEVERE, this.getName() + ".Error while reaping records: " + entry, exception);
logger.severe(this.toString());
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
if (this.isClosing()) {
return;
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("Cancelling JmDNS: " + this);
}
// Stop JmDNS
// This protects against recursive calls
if (this.closeState()) {
// We got the tie break now clean up
// Stop the timer
logger.finer("Canceling the timer");
this.cancelTimer();
// Cancel all services
this.unregisterAllServices();
this.disposeServiceCollectors();
if (logger.isLoggable(Level.FINER)) {
logger.finer("Wait for JmDNS cancel: " + this);
}
this.waitForCanceled(DNSConstants.CLOSE_TIMEOUT);
// Stop the canceler timer
logger.finer("Canceling the state timer");
this.cancelStateTimer();
// Stop the executor
_executor.shutdown();
// close socket
this.closeMulticastSocket();
// remove the shutdown hook
if (_shutdown != null) {
Runtime.getRuntime().removeShutdownHook(_shutdown);
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("JmDNS closed.");
}
}
advanceState(null);
}
/**
* {@inheritDoc}
*/
@Override
@Deprecated
public void printServices() {
System.err.println(toString());
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
final StringBuilder aLog = new StringBuilder(2048);
aLog.append("\n");
aLog.append("\t
aLog.append("\n\t");
aLog.append(_localHost);
aLog.append("\n\t
for (String key : _services.keySet()) {
aLog.append("\n\t\tService: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_services.get(key));
}
aLog.append("\n");
aLog.append("\t
for (String key : _serviceTypes.keySet()) {
ServiceTypeEntry subtypes = _serviceTypes.get(key);
aLog.append("\n\t\tType: ");
aLog.append(subtypes.getType());
aLog.append(": ");
aLog.append(subtypes.isEmpty() ? "no subtypes" : subtypes);
}
aLog.append("\n");
aLog.append(_cache.toString());
aLog.append("\n");
aLog.append("\t
for (String key : _serviceCollectors.keySet()) {
aLog.append("\n\t\tService Collector: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_serviceCollectors.get(key));
}
aLog.append("\n");
aLog.append("\t
for (String key : _serviceListeners.keySet()) {
aLog.append("\n\t\tService Listener: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_serviceListeners.get(key));
}
return aLog.toString();
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo[] list(String type) {
return this.list(type, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public ServiceInfo[] list(String type, long timeout) {
this.cleanCache();
// Implementation note: The first time a list for a given type is
// requested, a ServiceCollector is created which collects service
// infos. This greatly speeds up the performance of subsequent calls
// to this method. The caveats are, that 1) the first call to this
// method for a given type is slow, and 2) we spawn a ServiceCollector
// instance for each service type which increases network traffic a
// little.
String loType = type.toLowerCase();
boolean newCollectorCreated = false;
if (this.isCanceling() || this.isCanceled()) {
System.out.println("JmDNS Cancelling.");
return new ServiceInfo[0];
}
ServiceCollector collector = _serviceCollectors.get(loType);
if (collector == null) {
newCollectorCreated = _serviceCollectors.putIfAbsent(loType, new ServiceCollector(type)) == null;
collector = _serviceCollectors.get(loType);
if (newCollectorCreated) {
this.addServiceListener(type, collector, ListenerStatus.SYNCHONEOUS);
}
}
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.getName() + "-collector: " + collector);
}
// At this stage the collector should never be null but it keeps findbugs happy.
return (collector != null ? collector.list(timeout) : new ServiceInfo[0]);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, ServiceInfo[]> listBySubtype(String type) {
return this.listBySubtype(type, DNSConstants.SERVICE_INFO_TIMEOUT);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, ServiceInfo[]> listBySubtype(String type, long timeout) {
Map<String, List<ServiceInfo>> map = new HashMap<String, List<ServiceInfo>>(5);
for (ServiceInfo info : this.list(type, timeout)) {
String subtype = info.getSubtype().toLowerCase();
if (!map.containsKey(subtype)) {
map.put(subtype, new ArrayList<ServiceInfo>(10));
}
map.get(subtype).add(info);
}
Map<String, ServiceInfo[]> result = new HashMap<String, ServiceInfo[]>(map.size());
for (String subtype : map.keySet()) {
List<ServiceInfo> infoForSubType = map.get(subtype);
result.put(subtype, infoForSubType.toArray(new ServiceInfo[infoForSubType.size()]));
}
return result;
}
private void disposeServiceCollectors() {
if (logger.isLoggable(Level.FINER)) {
logger.finer("disposeServiceCollectors()");
}
for (String type : _serviceCollectors.keySet()) {
ServiceCollector collector = _serviceCollectors.get(type);
if (collector != null) {
this.removeServiceListener(type, collector);
_serviceCollectors.remove(type, collector);
}
}
}
/**
* Instances of ServiceCollector are used internally to speed up the performance of method <code>list(type)</code>.
*
* @see #list
*/
private static class ServiceCollector implements ServiceListener {
// private static Logger logger = Logger.getLogger(ServiceCollector.class.getName());
/**
* A set of collected service instance names.
*/
private final ConcurrentMap<String, ServiceInfo> _infos;
/**
* A set of collected service event waiting to be resolved.
*/
private final ConcurrentMap<String, ServiceEvent> _events;
/**
* This is the type we are listening for (only used for debugging).
*/
private final String _type;
/**
* This is used to force a wait on the first invocation of list.
*/
private volatile boolean _needToWaitForInfos;
public ServiceCollector(String type) {
super();
_infos = new ConcurrentHashMap<String, ServiceInfo>();
_events = new ConcurrentHashMap<String, ServiceEvent>();
_type = type;
_needToWaitForInfos = true;
}
/**
* A service has been added.
*
* @param event
* service event
*/
@Override
public void serviceAdded(ServiceEvent event) {
synchronized (this) {
ServiceInfo info = event.getInfo();
if ((info != null) && (info.hasData())) {
_infos.put(event.getName(), info);
} else {
String subtype = (info != null ? info.getSubtype() : "");
info = ((JmDNSImpl) event.getDNS()).resolveServiceInfo(event.getType(), event.getName(), subtype, true);
if (info != null) {
_infos.put(event.getName(), info);
} else {
_events.put(event.getName(), event);
}
}
}
}
/**
* A service has been removed.
*
* @param event
* service event
*/
@Override
public void serviceRemoved(ServiceEvent event) {
synchronized (this) {
_infos.remove(event.getName());
_events.remove(event.getName());
}
}
/**
* A service has been resolved. Its details are now available in the ServiceInfo record.
*
* @param event
* service event
*/
@Override
public void serviceResolved(ServiceEvent event) {
synchronized (this) {
_infos.put(event.getName(), event.getInfo());
_events.remove(event.getName());
}
}
/**
* Returns an array of all service infos which have been collected by this ServiceCollector.
*
* @param timeout
* timeout if the info list is empty.
* @return Service Info array
*/
public ServiceInfo[] list(long timeout) {
if (_infos.isEmpty() || !_events.isEmpty() || _needToWaitForInfos) {
long loops = (timeout / 200L);
if (loops < 1) {
loops = 1;
}
for (int i = 0; i < loops; i++) {
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
/* Stub */
}
if (_events.isEmpty() && !_infos.isEmpty() && !_needToWaitForInfos) {
break;
}
}
}
_needToWaitForInfos = false;
return _infos.values().toArray(new ServiceInfo[_infos.size()]);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
final StringBuffer aLog = new StringBuffer();
aLog.append("\n\tType: ");
aLog.append(_type);
if (_infos.isEmpty()) {
aLog.append("\n\tNo services collected.");
} else {
aLog.append("\n\tServices");
for (String key : _infos.keySet()) {
aLog.append("\n\t\tService: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_infos.get(key));
}
}
if (_events.isEmpty()) {
aLog.append("\n\tNo event queued.");
} else {
aLog.append("\n\tEvents");
for (String key : _events.keySet()) {
aLog.append("\n\t\tEvent: ");
aLog.append(key);
aLog.append(": ");
aLog.append(_events.get(key));
}
}
return aLog.toString();
}
}
static String toUnqualifiedName(String type, String qualifiedName) {
String loType = type.toLowerCase();
String loQualifiedName = qualifiedName.toLowerCase();
if (loQualifiedName.endsWith(loType) && !(loQualifiedName.equals(loType))) {
return qualifiedName.substring(0, qualifiedName.length() - type.length() - 1);
}
return qualifiedName;
}
public Map<String, ServiceInfo> getServices() {
return _services;
}
public void setLastThrottleIncrement(long lastThrottleIncrement) {
this._lastThrottleIncrement = lastThrottleIncrement;
}
public long getLastThrottleIncrement() {
return _lastThrottleIncrement;
}
public void setThrottle(int throttle) {
this._throttle = throttle;
}
public int getThrottle() {
return _throttle;
}
public static Random getRandom() {
return _random;
}
public void ioLock() {
_ioLock.lock();
}
public void ioUnlock() {
_ioLock.unlock();
}
public void setPlannedAnswer(DNSIncoming plannedAnswer) {
this._plannedAnswer = plannedAnswer;
}
public DNSIncoming getPlannedAnswer() {
return _plannedAnswer;
}
void setLocalHost(HostInfo localHost) {
this._localHost = localHost;
}
public Map<String, ServiceTypeEntry> getServiceTypes() {
return _serviceTypes;
}
public MulticastSocket getSocket() {
return _socket;
}
public InetAddress getGroup() {
return _group;
}
@Override
public Delegate getDelegate() {
return this._delegate;
}
@Override
public Delegate setDelegate(Delegate delegate) {
Delegate previous = this._delegate;
this._delegate = delegate;
return previous;
}
}
|
package org.minimalj.backend;
import java.util.List;
import java.util.Objects;
import org.minimalj.application.Application;
import org.minimalj.application.Configuration;
import org.minimalj.backend.repository.CountTransaction;
import org.minimalj.backend.repository.DeleteEntityTransaction;
import org.minimalj.backend.repository.DeleteTransaction;
import org.minimalj.backend.repository.EntityTransaction;
import org.minimalj.backend.repository.InsertTransaction;
import org.minimalj.backend.repository.ReadCriteriaTransaction;
import org.minimalj.backend.repository.ReadEntityTransaction;
import org.minimalj.backend.repository.SaveTransaction;
import org.minimalj.backend.repository.UpdateTransaction;
import org.minimalj.backend.repository.WriteTransaction;
import org.minimalj.repository.Repository;
import org.minimalj.repository.TransactionalRepository;
import org.minimalj.repository.query.Criteria;
import org.minimalj.repository.query.Query;
import org.minimalj.security.Authentication;
import org.minimalj.security.Subject;
import org.minimalj.transaction.Isolation;
import org.minimalj.transaction.Transaction;
import org.minimalj.util.Codes;
/**
* A Backend is responsible for executing the transactions.
* It can do this by keeping a database (SqlRepository) or by
* delegating everything to an other Backend (SocketBackend).<p>
*
* Every Frontend needs a Backend. But a Backend can serve more
* than one Frontend.<p>
*
* The Backend keeps a repository that may only be accessed within
* a transaction. See EntityTransaction.<p>
*
* The Backend configuration must be done with system properties.
* These are handled in the initBackend method. The configuration
* cannot be changed during the lifetime of an application VM.<p>
*
* The configuration properties:
* <UL>
* <LI><code>MjBackendAddress</code> and <code>MjBackendPort</code>: if
* these two are set the transactions are delegated to a remote
* SocketBackendServer.</LI>
* <LI><code>MjBackend</code>: if this property is set it specifies
* the classname of the Backend.</LI>
* <LI>If the Backend should run in the same JVM as the Frontend you
* don't need to set any property</LI>
* </UL>
*/
public class Backend {
private static Backend instance;
public static Backend create() {
String backendAddress = Configuration.get("MjBackendAddress");
String backendPort = Configuration.get("MjBackendPort", "8020");
if (backendAddress != null) {
return new SocketBackend(backendAddress, Integer.valueOf(backendPort));
}
if (Configuration.available("MjBackend")) {
return Configuration.getClazz("MjBackend", Backend.class);
}
return new Backend();
};
private Repository repository = null;
private Boolean authenticationActive = null;
private Authentication authentication = null;
private InheritableThreadLocal<Transaction<?>> currentTransaction = new InheritableThreadLocal<>();
public static void setInstance(Backend instance) {
Objects.requireNonNull(instance);
if (Backend.instance != null) {
throw new IllegalStateException("Not allowed to change instance of " + Backend.class.getSimpleName());
}
Backend.instance = instance;
instance.init();
}
public static Backend getInstance() {
if (instance == null) {
setInstance(create());
}
return instance;
}
protected void setRepository(Repository repository) {
this.repository = repository;
}
public Repository getRepository() {
if (!isInTransaction()) {
throw new IllegalStateException("Repository may only be accessed from within a " + Transaction.class.getSimpleName());
}
if (repository == null) {
repository = Repository.create(Application.getInstance());
}
return repository;
}
protected Authentication createAuthentication() {
return Authentication.create();
}
public final Authentication getAuthentication() {
if (authentication == null) {
if (authenticationActive == null) {
authentication = createAuthentication();
authenticationActive = authentication != null;
}
}
return authentication;
}
public boolean isAuthenticationActive() {
return getAuthentication() != null;
}
public boolean isInTransaction() {
return currentTransaction.get() != null;
}
// These methods are shortcuts for CRUD - Transactions.
// note: if they are called within a transaction a nested
// transaction is created.
public static <T> T read(Class<T> clazz, Object id) {
return execute(new ReadEntityTransaction<>(clazz, id));
}
public static <T> List<T> find(Class<T> clazz, Query query) {
return execute(new ReadCriteriaTransaction<>(clazz, query));
}
public static <T> long count(Class<T> clazz, Criteria criteria) {
return execute(new CountTransaction<>(clazz, criteria));
}
public static <T> Object insert(T object) {
return execute(new InsertTransaction<>(object));
}
public static <T> void update(T object) {
execute(new UpdateTransaction<>(object));
}
public static <T> T save(T object) {
return execute(new SaveTransaction<>(object));
}
public static <T> int delete(Class<T> clazz, Criteria criteria) {
return execute(new DeleteTransaction<>(clazz, criteria));
}
public static <T> void delete(T object) {
execute(new DeleteEntityTransaction<>(object));
}
public static <T> T execute(Transaction<T> transaction) {
return getInstance().doExecute(transaction);
}
public <T> T doExecute(Transaction<T> transaction) {
if (isAuthenticationActive()) {
if (!transaction.hasAccess(Subject.getCurrent())) {
throw new IllegalStateException(transaction + " forbidden");
}
}
Transaction<?> outerTransaction = currentTransaction.get();
try {
currentTransaction.set(transaction);
if (getRepository() instanceof TransactionalRepository) {
TransactionalRepository transactionalRepository = (TransactionalRepository) getRepository();
return doExecute(transaction, transactionalRepository);
} else {
return transaction.execute();
}
} finally {
currentTransaction.set(outerTransaction);
handleCodeCache(transaction);
}
}
protected <T> void handleCodeCache(Transaction<T> transaction) {
if (transaction instanceof WriteTransaction || transaction instanceof DeleteEntityTransaction) {
// we could check if the transaction is about a code class. But the
// removeFromCache method is probably faster than to call 'isCode'
Codes.removeFromCache(((EntityTransaction<?, ?>) transaction).getEntityClazz());
}
}
private <T> T doExecute(Transaction<T> transaction, TransactionalRepository transactionalRepository) {
Isolation.Level isolationLevel = transaction.getIsolation();
T result;
boolean commit = false;
try {
transactionalRepository.startTransaction(isolationLevel.getLevel());
result = transaction.execute();
commit = true;
} finally {
transactionalRepository.endTransaction(commit);
}
return result;
}
private void init() {
if (Configuration.available("MjInit")) {
Transaction<?> init = Configuration.getClazz("MjInit", Transaction.class);
init.execute();
}
}
}
|
package com.redhat.ceylon.eclipse.code.refactor;
import static com.redhat.ceylon.eclipse.util.EditorUtil.getDocument;
import static com.redhat.ceylon.eclipse.util.Indents.getDefaultLineDelimiter;
import static com.redhat.ceylon.eclipse.util.Nodes.getNodeLength;
import static com.redhat.ceylon.eclipse.util.Nodes.getNodeStartOffset;
import static org.eclipse.ltk.core.refactoring.RefactoringStatus.createErrorStatus;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.ltk.core.refactoring.CompositeChange;
import org.eclipse.ltk.core.refactoring.DocumentChange;
import org.eclipse.ltk.core.refactoring.RefactoringStatus;
import org.eclipse.ltk.core.refactoring.TextChange;
import org.eclipse.ltk.core.refactoring.TextFileChange;
import org.eclipse.text.edits.InsertEdit;
import org.eclipse.text.edits.MultiTextEdit;
import org.eclipse.text.edits.ReplaceEdit;
import org.eclipse.ui.IEditorPart;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.eclipse.util.Escaping;
import com.redhat.ceylon.eclipse.util.Nodes;
import com.redhat.ceylon.model.typechecker.model.Class;
import com.redhat.ceylon.model.typechecker.model.Interface;
import com.redhat.ceylon.model.typechecker.model.Package;
import com.redhat.ceylon.model.typechecker.model.Parameter;
import com.redhat.ceylon.model.typechecker.model.ParameterList;
import com.redhat.ceylon.model.typechecker.model.Type;
import com.redhat.ceylon.model.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.model.typechecker.model.Unit;
public class AliasRefactoring extends AbstractRefactoring {
private static class FindAliasedTypeVisitor
extends Visitor {
private Type type;
private List<Node> nodes = new ArrayList<Node>();
private FindAliasedTypeVisitor(Type type) {
this.type = type;
}
public List<Node> getNodes() {
return nodes;
}
@Override
public void visit(Tree.Type that) {
super.visit(that);
Type t = that.getTypeModel();
if (t!=null && type.isExactly(t)) {
nodes.add(that);
}
}
@Override
public void visit(Tree.BaseTypeExpression that) {
super.visit(that);
TypeDeclaration td = type.getDeclaration();
if (isClassWithParameters(td)) {
Type t = that.getTarget().getType();
if (t!=null && type.isExactly(t)) {
nodes.add(that);
}
}
}
}
private static boolean isClassWithParameters(
TypeDeclaration td) {
return td instanceof Class &&
((Class) td).getParameterList()!=null;
}
private String newName;
private String typeString;
private final Type type;
// private boolean renameValuesAndFunctions;
public Node getNode() {
return node;
}
public AliasRefactoring(IEditorPart editor) {
super(editor);
if (rootNode!=null) {
if (node instanceof Tree.Type) {
Tree.Type t = (Tree.Type) node;
type = t.getTypeModel();
newName = null;
typeString = Nodes.toString(t, tokens);
}
else {
type = null;
}
}
else {
type = null;
}
}
@Override
public boolean isEnabled() {
return type!=null &&
project != null;
}
public int getCount() {
return type==null ?
0 : countDeclarationOccurrences();
}
@Override
int countReferences(Tree.CompilationUnit cu) {
FindAliasedTypeVisitor frv =
new FindAliasedTypeVisitor(type);
cu.visit(frv);
return frv.getNodes().size();
}
public String getName() {
return "Introduce Type Alias";
}
public RefactoringStatus checkInitialConditions(
IProgressMonitor pm)
throws CoreException,
OperationCanceledException {
// Check parameters retrieved from editor context
return new RefactoringStatus();
}
public RefactoringStatus checkFinalConditions(
IProgressMonitor pm)
throws CoreException,
OperationCanceledException {
if (newName==null || !newName.matches("^[a-zA-Z_]\\w*$")) {
return createErrorStatus(
"Not a legal Ceylon identifier");
}
else if (Escaping.KEYWORDS.contains(newName)) {
return createErrorStatus(
"'" + newName + "' is a Ceylon keyword");
}
else {
int ch = newName.codePointAt(0);
if (!Character.isUpperCase(ch)) {
return createErrorStatus(
"Not an initial uppercase identifier");
}
}
/*Declaration existing = declaration.getContainer()
.getMemberOrParameter(declaration.getUnit(),
newName, null, false);
if (null!=existing && !existing.equals(declaration)) {
return createWarningStatus("An existing declaration named '" +
newName + "' already exists in the same scope");
}*/
return new RefactoringStatus();
}
public CompositeChange createChange(
IProgressMonitor pm)
throws CoreException,
OperationCanceledException {
List<PhasedUnit> units = getAllUnits();
pm.beginTask(getName(), units.size());
CompositeChange cc = new CompositeChange(getName());
int i=0;
for (PhasedUnit pu: units) {
Package editorPackage =
editor.getParseController()
.getRootNode()
.getUnit()
.getPackage();
boolean inSamePackage =
pu.getPackage()
.equals(editorPackage);
if (inSamePackage && searchInFile(pu)) {
TextFileChange tfc = newTextFileChange(pu);
renameInFile(tfc, cc,
pu.getCompilationUnit());
pm.worked(i++);
}
}
if (searchInEditor()) {
DocumentChange dc = newDocumentChange();
renameInFile(dc, cc,
editor.getParseController()
.getRootNode());
pm.worked(i++);
}
pm.done();
return cc;
}
private int aliasOffset;
int getAliasOffset() {
return aliasOffset;
}
int getAliasLength() {
return typeString.length();
}
void renameInFile(TextChange tfc,
CompositeChange cc,
Tree.CompilationUnit root) {
tfc.setEdit(new MultiTextEdit());
if (type!=null) {
Unit editorUnit =
editor.getParseController()
.getRootNode()
.getUnit();
Unit unit = root.getUnit();
if (editorUnit.getPackage()
.equals(unit.getPackage())) {
IDocument doc = getDocument(tfc);
String delim =
getDefaultLineDelimiter(document);
if (newName!=null) {
for (Node node: getNodesToRename(root)) {
renameNode(tfc, node, root);
}
}
if (unit.getFilename()
.equals(editorUnit.getFilename())) {
Type t = getType();
TypeDeclaration td = t.getDeclaration();
StringBuffer header = new StringBuffer();
aliasOffset =
doc.getLength() +
delim.length()*2;
if (td.isShared()) {
header.append("shared ");
aliasOffset += 7;
}
StringBuffer args = new StringBuffer();
String initialName = getInitialName();
if (isClassWithParameters(td)) {
Class c = (Class) td;
aliasOffset += 6;
header.append("class ")
.append(initialName)
.append("(");
args.append("(");
boolean first = true;
ParameterList pl =
c.getParameterList();
for (Parameter p:
pl.getParameters()) {
if (first) {
first = false;
}
else {
header.append(", ");
args.append(", ");
}
String ptype =
t.getTypedParameter(p)
.getFullType()
.asString(unit);
String pname = p.getName();
header.append(ptype)
.append(" ")
.append(pname);
args.append(pname);
}
header.append(")");
args.append(")");
}
else if (td instanceof Interface) {
aliasOffset += 10;
header.append("interface ")
.append(initialName);
}
else {
aliasOffset += 6;
header.append("alias ")
.append(initialName);
}
tfc.addEdit(new InsertEdit(
doc.getLength(),
delim + delim +
header + " => " +
t.asString(unit) +
args + ";"));
}
}
// if (renameValuesAndFunctions) {
// for (Tree.Identifier id: getIdentifiersToRename(root)) {
// renameIdentifier(tfc, id, root);
}
if (cc!=null && tfc.getEdit().hasChildren()) {
cc.add(tfc);
}
}
String getInitialName() {
return newName==null ?
typeString : newName;
}
public List<Node> getNodesToRename(
Tree.CompilationUnit root) {
FindAliasedTypeVisitor frv =
new FindAliasedTypeVisitor(type);
root.visit(frv);
return frv.getNodes();
}
protected void renameNode(TextChange tfc, Node node,
Tree.CompilationUnit root) {
tfc.addEdit(new ReplaceEdit(
getNodeStartOffset(node),
getNodeLength(node),
newName));
}
/*public boolean isRenameValuesAndFunctions() {
return renameValuesAndFunctions;
}
public void setRenameValuesAndFunctions(boolean renameLocals) {
this.renameValuesAndFunctions = renameLocals;
}*/
public void setNewName(String text) {
newName = text;
}
public Type getType() {
return type;
}
public String getNewName() {
return newName;
}
}
|
package org.nuxeo.common;
import java.io.File;
import java.util.Properties;
/**
* @author <a href="mailto:bs@nuxeo.com">Bogdan Stefanescu</a>
*/
public class Environment {
/**
* Constants that identifies possible hosts for the framework.
*/
public static final String JBOSS_HOST = "JBoss";
public static final String NXSERVER_HOST = "NXServer"; // Jetty or GF3 embedded
public static final String TOMCAT_HOST = "Tomcat";
// the home directory
public static final String HOME_DIR = "org.nuxeo.app.home";
// the web root
public static final String WEB_DIR = "org.nuxeo.app.web";
// the config dir
public static final String CONFIG_DIR = "org.nuxeo.app.config";
// the data dir
public static final String DATA_DIR = "org.nuxeo.app.data";
// the log dir
public static final String LOG_DIR = "org.nuxeo.app.log";
// the application layout (optional)
// directory containing nuxeo runtime osgi bundles
public static final String BUNDLES_DIR = "nuxeo.osgi.app.bundles";
public static final String BUNDLES = "nuxeo.osgi.bundles";
private static Environment DEFAULT;
public static void setDefault(Environment env) {
DEFAULT = env;
}
public static Environment getDefault() {
return DEFAULT;
}
protected final File home;
protected File data;
protected File log;
protected File config;
protected File web;
protected File temp;
protected final Properties properties;
protected String[] args;
protected boolean isAppServer;
protected String hostAppName;
protected String hostAppVersion;
public Environment(File home) {
this(home, null);
}
public Environment(File home, Properties properties) {
this.home = home;
this.properties = new Properties();
if (properties != null) {
loadProperties(properties);
}
this.properties.put(HOME_DIR, this.home.getAbsolutePath());
}
public File getHome() {
return home;
}
public boolean isApplicationServer() {
return isAppServer;
}
public void setIsApplicationServer(boolean isAppServer) {
this.isAppServer = isAppServer;
}
public String getHostApplicationName() {
return hostAppName;
}
public String getHostApplicationVersion() {
return hostAppVersion;
}
public void setHostApplicationName(String name) {
hostAppName = name;
}
public void setHostApplicationVersion(String version) {
hostAppVersion = version;
}
public File getTemp() {
if (temp == null) {
temp = new File(home, "tmp");
}
return temp;
}
public void setTemp(File temp) {
this.temp = temp;
}
public File getConfig() {
if (config == null) {
config = new File(home, "config");
}
return config;
}
public void setConfig(File config) {
this.config = config;
}
public File getLog() {
if (log == null) {
log = new File(home, "log");
}
return log;
}
public void setLog(File log) {
this.log = log;
}
public File getData() {
if (data == null) {
data = new File(home, "data");
}
return data;
}
public void setData(File data) {
this.data = data;
}
public File getWeb() {
if (web == null) {
web = new File(home, "web");
}
return web;
}
public void setWeb(File web) {
this.web = web;
}
public String[] getCommandLineArguments() {
return args;
}
public void setCommandLineArguments(String[] args) {
this.args = args;
}
public String getProperty(String key) {
return properties.getProperty(key);
}
public String getProperty(String key, String defaultValue) {
String val = properties.getProperty(key);
return val == null ? defaultValue : val;
}
public void setProperty(String key, String value) {
properties.put(key, value);
}
public Properties getProperties() {
return properties;
}
public void loadProperties(Properties properties) {
this.properties.putAll(properties);
this.properties.put(HOME_DIR, home.getAbsolutePath());
}
public boolean isJBoss() {
return JBOSS_HOST.equals(hostAppName);
}
public boolean isJetty() {
return NXSERVER_HOST.equals(hostAppName);
}
public boolean isTomcat() {
return TOMCAT_HOST.equals(hostAppName);
}
}
|
package me.coley.recaf.ui;
import javafx.scene.Node;
import javafx.scene.control.*;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import javafx.stage.Stage;
import me.coley.recaf.Recaf;
import me.coley.recaf.command.impl.Export;
import me.coley.recaf.config.ConfBackend;
import me.coley.recaf.control.gui.GuiController;
import me.coley.recaf.mapping.MappingImpl;
import me.coley.recaf.plugin.PluginsManager;
import me.coley.recaf.plugin.api.MenuProviderPlugin;
import me.coley.recaf.search.QueryType;
import me.coley.recaf.ui.controls.*;
import me.coley.recaf.util.ClasspathUtil;
import me.coley.recaf.util.IOUtil;
import me.coley.recaf.util.Log;
import me.coley.recaf.util.self.SelfUpdater;
import me.coley.recaf.workspace.*;
import org.apache.commons.io.FileUtils;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import static me.coley.recaf.util.LangUtil.translate;
import static me.coley.recaf.util.Log.*;
import static me.coley.recaf.util.UiUtil.*;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* Primary menu.
*
* @author Matt
*/
public class MainMenu extends MenuBar {
private final FileChooser fcLoadApp = new FileChooser();
private final FileChooser fcLoadMap = new FileChooser();
private final FileChooser fcSaveApp = new FileChooser();
private final FileChooser fcSaveWorkspace = new FileChooser();
private final GuiController controller;
private final Menu mFile;
private final Menu mFileRecent;
private final Menu mMapping;
private final Menu mConfig;
private final Menu mThemeEditor;
private final Menu mSearch;
private final Menu mHistory;
private final Menu mAttach;
private final Menu mPlugins;
private final Menu mHelp;
/**
* @param controller
* Controller context.
*/
public MainMenu(GuiController controller) {
// TODO: Properly managed disabled state of menu items
this.controller = controller;
mFile = new Menu(translate("ui.menubar.file"));
mFileRecent = new Menu(translate("ui.menubar.file.recent"));
mMapping = new Menu(translate("ui.menubar.mapping"));
updateRecent();
if (InstrumentationResource.isActive()) {
// Agent file menu
mFile.getItems().addAll(
new ActionMenuItem(translate("ui.menubar.file.addlib"), this::addLibrary),
new ActionMenuItem(translate("ui.menubar.file.saveapp"), this::saveApplication),
new ActionMenuItem(translate("ui.menubar.file.agentexport"), this::saveAgent));
} else {
// Normal file menu
mFile.getItems().addAll(
new ActionMenuItem(translate("ui.menubar.file.load"), this::load),
mFileRecent,
new ActionMenuItem(translate("ui.menubar.file.addlib"), this::addLibrary),
new ActionMenuItem(translate("ui.menubar.file.saveapp"), this::saveApplication),
new ActionMenuItem(translate("ui.menubar.file.saveworkspace"), this::saveWorkspace));
// Mapping menu
Menu mApply = new Menu(translate("ui.menubar.mapping.apply"));
for (MappingImpl impl : MappingImpl.values())
mApply.getItems().add(new ActionMenuItem(impl.getDisplay(), () -> applyMap(impl)));
mMapping.getItems().add(mApply);
}
mConfig = new ActionMenu(translate("ui.menubar.config"), this::showConfig);
mThemeEditor = new ActionMenu(translate("ui.menubar.themeeditor"), this::showThemeEditor);
mSearch = new Menu(translate("ui.menubar.search"));
mSearch.getItems().addAll(
new ActionMenuItem(translate("ui.menubar.search.string"), this::searchString),
new ActionMenuItem(translate("ui.menubar.search.value"), this::searchValue),
new ActionMenuItem(translate("ui.menubar.search.cls_reference"), this::searchClassReference),
new ActionMenuItem(translate("ui.menubar.search.mem_reference"), this::searchMemberReference),
new ActionMenuItem(translate("ui.menubar.search.declare"), this::searchDeclaration),
new ActionMenuItem(translate("ui.menubar.search.insn"), this::searchInsn));
mAttach = new ActionMenu(translate("ui.menubar.attach"), this::attach);
mHistory = new ActionMenu(translate("ui.menubar.history"), this::showHistory);
mHelp = new Menu(translate("ui.menubar.help"));
if (SelfUpdater.hasUpdate()) {
mHelp.getItems().add(0,
new ActionMenuItem(translate("ui.menubar.help.update") + SelfUpdater.getLatestVersion(),
this::showUpdatePrompt));
}
mHelp.getItems().addAll(
new ActionMenuItem(translate("ui.menubar.help.documentation"), this::showDocumentation),
new ActionMenuItem(translate("ui.menubar.help.info"), this::showInformation),
new ActionMenuItem(translate("ui.menubar.help.contact"), this::showContact)
);
mPlugins = new Menu(translate("ui.menubar.plugins"));
if (PluginsManager.getInstance().hasPlugins())
mPlugins.getItems()
.add(new ActionMenuItem(translate("ui.menubar.plugins.manage"), this::openPluginManager));
mPlugins.getItems()
.add(new ActionMenuItem(translate("ui.menubar.plugins.opendir"), this::openPluginDirectory));
if (!PluginsManager.getInstance().ofType(MenuProviderPlugin.class).isEmpty()) {
mPlugins.getItems().add(new SeparatorMenuItem());
PluginsManager.getInstance().ofType(MenuProviderPlugin.class).forEach(plugin -> {
mPlugins.getItems().add(plugin.createMenu());
});
}
getMenus().addAll(mFile, mConfig, /* mThemeEditor, */ mSearch, mHistory);
if (!InstrumentationResource.isActive()) {
if (ClasspathUtil.classExists("com.sun.tools.attach.VirtualMachine")) {
getMenus().add(mAttach);
}
getMenus().add(mMapping);
}
getMenus().addAll(mPlugins, mHelp);
// Setup file-choosers
ExtensionFilter filter = new ExtensionFilter(translate("ui.fileprompt.open.extensions"),
"*.jar", "*.war", "*.class", "*.json");
fcLoadApp.setTitle(translate("ui.fileprompt.open"));
fcLoadApp.getExtensionFilters().add(filter);
fcLoadApp.setSelectedExtensionFilter(filter);
fcSaveApp.setTitle(translate("ui.fileprompt.export"));
fcSaveApp.getExtensionFilters().add(filter);
fcSaveApp.setSelectedExtensionFilter(filter);
filter = new ExtensionFilter(translate("ui.fileprompt.open.extensions"),
"*.txt", "*.map", "*.mapping", "*.enigma", "*.pro", "*.srg", "*.tiny", "*.tinyv2");
fcLoadMap.setTitle(translate("ui.fileprompt.open"));
fcLoadMap.getExtensionFilters().add(filter);
fcLoadMap.setSelectedExtensionFilter(filter);
filter = new ExtensionFilter(translate("ui.fileprompt.open.extensions"), "*.json");
fcSaveWorkspace.setTitle(translate("ui.fileprompt.export"));
fcSaveWorkspace.getExtensionFilters().add(filter);
fcSaveWorkspace.setSelectedExtensionFilter(filter);
}
/**
* Open string search window.
*
* @return Search window.
*/
public SearchPane searchString() {
return search(QueryType.STRING, "string");
}
/**
* Open value search window.
*
* @return Search window.
*/
public SearchPane searchValue() {
return search(QueryType.VALUE, "value");
}
/**
* Open class reference search window.
*
* @return Search window.
*/
public SearchPane searchClassReference() {
return search(QueryType.CLASS_REFERENCE, "cls_reference");
}
/**
* Open member reference search window.
*
* @return Search window.
*/
public SearchPane searchMemberReference() {
return search(QueryType.MEMBER_REFERENCE, "mem_reference");
}
/**
* Open declaration search window.
*
* @return Search window.
*/
public SearchPane searchDeclaration() {
return search(QueryType.MEMBER_DEFINITION, "declare");
}
/**
* Open instruction text search window.
*
* @return Search window.
*/
public SearchPane searchInsn() {
return search(QueryType.INSTRUCTION_TEXT, "insn");
}
private SearchPane search(QueryType type, String key) {
SearchPane pane = new SearchPane(controller, type);
Stage stage = controller.windows().window(
translate("ui.menubar.search") + ":" + translate("ui.menubar.search." + key),
pane, 600, 400);
stage.show();
stage.toFront();
return pane;
}
/**
* Prompt a file open prompt to load an application.
*/
private void load() {
fcLoadApp.setInitialDirectory(config().getRecentLoadDir());
File file = fcLoadApp.showOpenDialog(null);
if(file != null) {
controller.loadWorkspace(IOUtil.toPath(file), null);
}
}
/**
* Adds a selected resource to the current workspace.
*/
private void addLibrary() {
fcLoadApp.setInitialDirectory(config().getRecentLoadDir());
List<File> files = fcLoadApp.showOpenMultipleDialog(null);
if (files != null) {
for (File file : files) {
try {
JavaResource resource = FileSystemResource.of(file.toPath());
controller.getWorkspace().getLibraries().add(resource);
controller.windows().getMainWindow().getNavigator().refresh();
} catch(Exception ex) {
error(ex, "Failed to add library: {}", file.getName());
ExceptionAlert.show(ex, "Failed to add library: " + file.getName());
}
}
}
}
/**
* Save the current application to a file.
*/
public void saveApplication() {
if (controller.getWorkspace() == null) {
return;
}
fcSaveApp.setInitialDirectory(config().getRecentSaveAppDir());
File file = fcSaveApp.showSaveDialog(null);
if (file != null) {
Export exporter = new Export();
exporter.setController(controller);
exporter.output = file;
try {
exporter.call();
config().recentSaveApp = file.getAbsolutePath();
} catch(Exception ex) {
error(ex, "Failed to save application to file: {}", file.getName());
ExceptionAlert.show(ex, "Failed to save application to file: " + file.getName());
}
}
}
/**
* Load a file and apply mappings of the given type.
*
* @param impl Mapping implementation type.
*/
private void applyMap(MappingImpl impl) {
fcLoadMap.setInitialDirectory(config().getRecentLoadDir());
File file = fcLoadMap.showOpenDialog(null);
if (file != null) {
try {
impl.create(file.toPath(), controller.getWorkspace())
.accept(controller.getWorkspace().getPrimary());
} catch (Exception ex) {
error(ex, "Failed to apply mappings: {}", file.getName());
ExceptionAlert.show(ex, "Failed to apply mappings: " + file.getName());
}
}
}
/**
* Show update prompt.
*/
public void showUpdatePrompt() {
Stage stage = controller.windows()
.window(translate("ui.menubar.help.update") + SelfUpdater.getLatestVersion(),
new UpdatePane(controller));
stage.show();
stage.toFront();
}
/**
* Display history window.
*/
private void showHistory() {
Stage stage = controller.windows().getHistoryWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.history"), new HistoryPane(controller), 800, 600);
controller.windows().setHistoryWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Display contact information window.
*/
private void showContact() {
Stage stage = controller.windows().getContactWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.help.contact"), new ContactInfoPane());
controller.windows().setContactWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Open documentation page in browser.
*/
private void showDocumentation() {
try {
Desktop.getDesktop().browse(new URL(Recaf.DOC_URL).toURI());
} catch(Exception ex) {
Log.error(ex, "Failed to open documentation url");
}
}
/**
* Display system information window.
*/
private void showInformation() {
Stage stage = controller.windows().getInformationWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.help.info"), new SysInfoPane());
controller.windows().setInformationWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Display system information window.
*/
private void openPluginManager() {
Stage stage = controller.windows().getPluginsWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.plugins"), new PluginManagerPane(), 600, 233);
controller.windows().setPluginsWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Open plugin directory.
*/
private void openPluginDirectory() {
try {
Desktop.getDesktop().browse(Recaf.getDirectory("plugins").toUri());
} catch(IOException ex) {
Log.error(ex, "Failed to open plugins directory");
}
}
/**
* Display attach window.
*/
private void attach() {
Stage stage = controller.windows().getAttachWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.attach"), new AttachPane(controller), 800, 600);
controller.windows().setAttachWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Save the current application via instrumentation.
*/
public void saveAgent() {
try {
InstrumentationResource.getInstance().save();
} catch(Throwable t) {
error(t, "Failed to save agent changes");
ExceptionAlert.show(t, "Failed to save agent changes");
}
}
/**
* Save the current workspace to a file.
*/
private void saveWorkspace() {
if (controller.getWorkspace() == null) {
return;
}
fcSaveWorkspace.setInitialDirectory(config().getRecentSaveWorkspaceDir());
File file = fcSaveWorkspace.showSaveDialog(null);
if (file != null) {
String json = WorkspaceIO.toJson(controller.getWorkspace());
try {
FileUtils.write(file, json, UTF_8);
config().recentSaveWorkspace = file.getAbsolutePath();
} catch(IOException ex) {
error(ex, "Failed to save workspace to file: {}", file.getName());
ExceptionAlert.show(ex, "Failed to save workspace to file: " + file.getName());
}
}
}
/**
* Update the recent files menu.
*/
public void updateRecent() {
mFileRecent.getItems().clear();
config().getRecentFiles().forEach(this::addRecentItem);
}
/**
* @param path
* Path to add to recent files menu.
*/
private void addRecentItem(String path) {
Path fspath = Paths.get(path);
if(Files.exists(fspath)) {
String name = fspath.getFileName().toString();
Node graphic = new IconView(getFileIcon(name));
mFileRecent.getItems().add(new ActionMenuItem(name, graphic, () -> controller.loadWorkspace(fspath, null)));
} else {
// Not a valid file, so we remove it from the files list
config().recentFiles.remove(path);
}
}
/**
* Open config window.
*/
private void showConfig() {
Stage stage = controller.windows().getConfigWindow();
if(stage == null) {
stage = controller.windows().window(translate("ui.menubar.config"), new ConfigTabs(controller));
controller.windows().setConfigWindow(stage);
}
stage.show();
stage.toFront();
}
/**
* Open theme editor window.
*/
private void showThemeEditor() {
Themes.showThemeEditor(controller);
}
/**
* @return Private config.
*/
private ConfBackend config() {
return controller.config().backend();
}
}
|
package org.nybatis.core.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.Spliterator;
import java.util.function.Consumer;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.nybatis.core.reflection.Reflector;
import org.nybatis.core.reflection.mapper.NListSerializer;
import org.nybatis.core.util.StringUtil;
import org.nybatis.core.validation.Assertion;
import org.nybatis.core.validation.Validator;
/**
* Multiple Data aggregated by NMap
*
* @author nayasis
*/
@JsonSerialize( using = NListSerializer.class )
public class NList implements Serializable, Cloneable, Iterable<NMap> {
private static final long serialVersionUID = -3169472792493027837L;
private static final String NO_KEY = NList.class.getName() + ".NO_KEY";
protected Map<Object, Integer> header = new LinkedHashMap<>();
protected Map<Object, String> alias = new LinkedHashMap<>();
protected List<NMap> dataBody = new ArrayList<>();
public NList() {}
public NList( String json ) {
fromJson( json );
}
public NList( NList initialData ) {
if( initialData == null || initialData.size() == 0 ) return;
dataBody.addAll( initialData.dataBody );
header.putAll( initialData.header );
alias.putAll( initialData.alias );
}
public NList( List<?> initialData ) {
addRow( initialData );
}
public NList( List<NMap> data, Set<?> header ) {
if( data == null || header == null ) return;
for( NMap nmap : data )
dataBody.add( nmap );
for( Object key : header )
this.header.put( key, data.size() );
}
public NList fromJson( String json ) {
if( StringUtil.isEmpty(json) ) return this;
List<Map<String, Object>> maps = new Reflector().toListFromJson( json );
for( Map map : maps ) {
addRow( map );
}
return refreshKey();
}
/**
* .
*
* @param alias
*/
public NList addAliases( Object... alias ) {
int startIndex = this.alias.size();
Iterator<Object> iterator = header.keySet().iterator();
for( int i = 0; i < startIndex; i++ ) {
if( ! iterator.hasNext() ) return this;
iterator.next();
}
for( Object text : alias ) {
if( ! iterator.hasNext() ) break;
this.alias.put( iterator.next(), StringUtil.nvl( text ) );
}
return this;
}
/**
* key .
*
* @param key key
* @param alias
* @param overwrite (false , .)
*/
public NList setAlias( Object key, Object alias, boolean overwrite ) {
if( containsKey(key) ) {
if( overwrite || ! this.alias.containsKey( key ) ) {
this.alias.put( key, StringUtil.nvl( alias ) );
}
}
return this;
}
/**
* key .
*
* @param key key
* @param alias
*/
public NList setAlias( Object key, String alias ) {
setAlias( key, alias, true );
return this;
}
/**
* key .
*
* @param key key
* @return
*/
public String getAlias( Object key ) {
return containsKey( key ) ? alias.get( key ) : null;
}
/**
* .
*
* @return
*/
public List<String> getAliases() {
refreshKey();
List<String> aliases = new ArrayList<>();
for( Object key : header.keySet() ) {
aliases.add( StringUtil.nvl( getAlias(key), StringUtil.nvl(key)) );
}
return aliases;
}
/**
* key .
*
* @param key key
* @return self-instance
*/
public NList addKey( Object... key ) {
for( Object val : key ) {
if( ! containsKey( val ) ) {
header.put( val, 0 );
}
}
return this;
}
/**
* Refresh Header and Key information
*/
public NList refreshKey() {
Map<Object, Integer> newHeader = new HashMap<>();
for( int i = dataBody.size() - 1; i >=0; i
for( Object key : dataBody.get( i ).keySet() ) {
if( newHeader.containsKey(key) ) continue;
newHeader.put( key, i + 1 );
}
}
Map<Object, Integer> buffer = new LinkedHashMap<>();
Set<Object> bufferKeyset = new LinkedHashSet<>();
bufferKeyset.addAll( header.keySet() );
for( Object key : header.keySet() ) {
if( newHeader.containsKey(key) ) {
buffer.put( key, newHeader.get( key ) );
bufferKeyset.remove( key );
}
}
for( Object key : bufferKeyset ) {
buffer.put( key, newHeader.get( key ) );
}
header.clear();
header.putAll( buffer );
return this;
}
/**
* .
*
* @param key
* @param value
*/
public NList addRow( Object key, Object value ) {
int dataSize = size( key );
int totalSize = size();
if( totalSize == dataSize ) {
NMap row = new NMap();
row.put( key, value );
dataBody.add( row );
} else {
dataBody.get( dataSize ).put( key, value );
}
header.put( key, ++dataSize );
return this;
}
/**
* add row with json text
*
* <pre>
* {@link NList} data = new {@link NList};
*
* data.add( "{key:'1', val:'AAA'}" );
*
* </pre>
*
* @param jsonString json
*/
public NList addRow( String jsonString ) {
addRow( new NMap( jsonString ) );
return this;
}
public NList addRow( Map<?, ?> data ) {
addRow( new NMap( data ) ) ;
return this;
}
public NList addRow( NMap data ) {
if( data == null ) data = new NMap();
dataBody.add( data );
int size = dataBody.size();
for( Object key : data.keySet() ) {
header.put( key, size );
}
return this;
}
public NList addRow( NList data ) {
if( data != null ) {
dataBody.addAll( data.dataBody );
refreshKey();
}
return this;
}
/**
* .
*
* @param data generic String Json , generic NMap .
*/
@SuppressWarnings( "rawtypes" )
public NList addRow( List<?> data ) {
if( data != null ) {
for( Object e : data ) {
if( e instanceof NMap ) {
addRow( (NMap) e );
} else if( e instanceof Map ) {
addRow( new NMap( (Map) e ) );
} else if( e instanceof String ) { // json string
addRow( new NMap( (String) e ) );
} else {
addRow( new NMap( e ) );
}
}
}
return this;
}
public int size( Object key ) {
return Validator.nvl( header.get( key ), 0 );
}
public int size() {
return dataBody.size();
}
/**
* key List .
*
* @param key key
* @return key List
*/
@SuppressWarnings( { "unchecked", "rawtypes" } )
public List toList( String key ) {
List result = new ArrayList<>();
if( ! containsKey( key ) ) return result;
for( NMap row : dataBody ) {
result.add(row.get( key ) );
}
return result;
}
/**
* Bean List .
* @param <T>
*
* @param klass Bean Class
* @return Bean List
*/
public <T> List<T> toList( Class<T> klass ) {
List<T> result = new ArrayList<T>();
for( NMap row : dataBody ) {
result.add( row.toBean( klass ) );
}
return result;
}
/**
* Get List consisted with NMap
*
* @return Data List
*/
public List<NMap> toList() {
return dataBody;
}
/**
* .
*
* @param index
*/
public NList removeRow( int index ) {
if( index < 0 ) return this;
for( Object key : getRow(index).keySet() ) {
subtractKeySize( key );
}
dataBody.remove( index );
return this;
}
private void subtractKeySize( Object key ) {
if( header.containsKey( key ) ) {
header.put( key, Math.max(header.get( key ) - 1, 0) );
}
}
/**
* .
*
* @param key name of column
*/
public NList removeKey( Object key ) {
header.remove( key );
for( NMap row : dataBody ) {
row.remove( key );
}
return this;
}
/**
* .
*
* @param keyIndex
*/
public NList removeKeyBy( int keyIndex ) {
Object key = getKey( keyIndex );
if( key != null ) removeKey( key );
return this;
}
/**
* .
*
* @param key
* @param rowIndex
* @param value
*/
public NList set( Object key, int rowIndex, Object value ) {
NMap data = dataBody.get( rowIndex );
data.put( key, value );
if( ! containsKey( key ) ) {
header.put( key, ++rowIndex );
}
return this;
}
/**
* .
*
* @param keyIndex
* @param rowIndex
* @param value
*/
public NList setBy( int keyIndex, int rowIndex, Object value ) {
Object key = getKey( keyIndex );
if( key != null )
set( key, rowIndex, value );
return this;
}
/**
* Set row data
*
* @param rowIndex index
* @param map map data
*/
public NList setRow( int rowIndex, NMap map ) {
dataBody.set( rowIndex, map );
int size = rowIndex + 1;
for( Object key : map.keySet() ) {
if( containsKey( key ) ) continue;
header.put( key, size );
}
return this;
}
/**
* Get Row Data
* @param rowIndex index
* @return row map data
*/
public NMap getRow( int rowIndex ) {
return dataBody.get( rowIndex );
}
public NList setBy( int index, Map<Object, Object> data ) {
setRow( index, new NMap( data ) );
return this;
}
public Object get( Object key, int index ) {
NMap data = dataBody.get( index );
return data == null ? null : data.get( key );
}
public Object getBy( int keyIndex, int index ) {
return get( getKey(keyIndex), index );
}
public String getString( Object key, int index ) {
return dataBody.get( index ).getString(key);
}
public String getStringBy( int keyIndex, int index ) {
return dataBody.get( index ).getString( getKey( keyIndex) );
}
public int getInt( Object key, int index ) {
return dataBody.get( index).getInt( key );
}
public int getIntBy( int keyIndex, int index ) {
return dataBody.get( index ).getInt( getKey( keyIndex) );
}
public long getLong( Object key, int index ) {
return dataBody.get( index ).getLong( key );
}
public long getLongBy( int keyIndex, int index ) {
return dataBody.get( index ).getLong( getKey(keyIndex) );
}
public float getFloat( Object key, int index ) {
return dataBody.get( index ).getFloat( key );
}
public float getFloatBy( int keyIndex, int index ) {
return dataBody.get( index ).getFloat( getKey( keyIndex) );
}
public double getDouble( Object key, int index ) {
return dataBody.get( index ).getDouble( key );
}
public double getDoubleBy( int keyIndex, int index ) {
return dataBody.get( index ).getDouble( getKey( keyIndex) );
}
public boolean containsKey( Object key ) {
return header.containsKey( key );
}
public boolean contains( NMap row ) {
return dataBody.contains( row );
}
public int keySize() {
return header.size();
}
public Set<Object> keySet() {
return header.keySet();
}
public Set<Object> keySetCloned() {
Set<Object> result = new LinkedHashSet<>();
result.addAll( keySet() );
return result;
}
private Object getKey( int keyIndex ) {
Assertion.isTrue( 0 <= keyIndex && keyIndex <= keySize(), new IndexOutOfBoundsException( String.format( "Index[%d] is out of bounds from 0 to %d", keyIndex, keySize() ) ) );
Iterator<Object> iterator = header.keySet().iterator();
for( int i = 0; i < keyIndex; i++ )
iterator.next();
return iterator.next();
}
/**
* .
*
* @return NList
*/
public NList deduplicate() {
NList result = new NList();
for( NMap row : dataBody ) {
if( result.contains( row ) ) continue;
result.addRow( row );
}
return result;
}
/**
* Remove data include header information
*/
public NList clear() {
header.clear();
alias.clear();
dataBody.clear();
return this;
}
/**
* Remove data only
*/
public NList clearData() {
dataBody.clear();
return this;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals( Object object ) {
if( object == null ) return false;
if( object == this ) return true;
if( ! (object instanceof NList) ) return false;
NList table = (NList) object;
refreshKey();
table.refreshKey();
if( ! header.equals( table.header ) ) return false;
if( ! alias.equals( table.alias ) ) return false;
if( size() != table.size() ) return false;
for( int i = 0, iCnt = size(); i < iCnt; i++ ) {
NMap rowThis = getRow( i );
NMap rowOther = table.getRow( i );
if( rowThis == null ) {
if( rowOther != null ) return false;
} else {
if( ! rowThis.equals( rowOther ) ) return false;
}
}
return true;
}
/**
* print data only first 1000 rows
*
* @return grid data
*/
public String toString() {
return new NListPrinter(this).toString(true, false);
}
/**
* print all row data
*
* @return grid data
*/
public String toDebugString() {
return toDebugString( true, true );
}
/**
* Print data
*
* @param printHeader if true, print header.
* @param printAllRow if true, print all row.
* @return
*/
public String toDebugString( boolean printHeader, boolean printAllRow ) {
return new NListPrinter(this).toString(printHeader, printAllRow);
}
/* (non-Javadoc)
* @see java.lang.Object#clone()
*/
public NList clone() {
return new Reflector().clone(NList.this);
}
/**
* Sort data
*
* @param comparator comparator to determine the order of the list.
* A {@code null} value indicates that the elements' <i>natural ordering</i> should be used.
*/
public NList sort( Comparator<NMap> comparator ) {
Collections.sort( dataBody, comparator );
return this;
}
@Override
public Iterator<NMap> iterator() {
final int size = size();
return new Iterator<NMap>() {
int index = 0;
public boolean hasNext() {
return index < size;
}
public NMap next() {
NMap row = getRow( index );
index++;
return row;
}
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public void forEachRemaining( Consumer<? super NMap> action ) {
throw new UnsupportedOperationException();
}
};
}
@Override
public void forEach( Consumer<? super NMap> action ) {
Objects.requireNonNull( action );
for( NMap row : this ) {
action.accept( row );
}
}
@Override
public Spliterator<NMap> spliterator() {
throw new UnsupportedOperationException();
}
}
|
package nbrenwald;
/* Solutions to Chapter 1, Arrays and Strings */
public class ArraysAndStrings {
/* Exercise 1-3 */
public static boolean isPermutation(String a, String b) {
if (a != null && b != null && (a.length() == b.length())) {
// One option, count chars in each one - perhaps in a hash map.
// Order the chars in both string, then walk along checking. Order n log n
// If chars are ascii, we could do counting sort quite easily with array of 127 elements
// we could hold an array of 127 chars, we do one loop to count characters for a, one to
// decrement for b,
// one to check all are 0.
// Question is a and A considered the same?
// Are they Ascii
int[] asciiArray = new int[127];
char[] aArray = a.toCharArray();
char[] bArray = b.toCharArray();
for (char ca : aArray) {
asciiArray[ca]++;
}
for (char cb : bArray) {
asciiArray[cb]
}
for (int i : asciiArray) {
if (i != 0)
return false;
}
return true;
}
return false;
}
/* Exercise 1-4 */
public static String replaceSubstring(String a, String b, int trueLength) {
char[] aArray = a.toCharArray();
char[] bArray = b.toCharArray();
// Told we can assume enough space at the end, so we wont check this at the moment.
// First idea, we should work backwards from length shifiting characters to true end.
// so one pointer to end of initial string, one pointer to end of ful string.
int i = trueLength - 1;
int j = aArray.length - 1;
while (i >= 0) {
if (aArray[i] == ' ') {
for (int x = bArray.length - 1; x >= 0; x
aArray[j] = bArray[x];
j
}
i
} else {
aArray[j] = aArray[i];
i
j
}
}
return String.valueOf(aArray);
}
/* Exercise 1-5 */
public static String compress(String uncompressed){
// Only upper and lower case ascii characters. need to keep the original ordering.
if(uncompressed.length()>2){//a string with fewer than 3 chars could not be improved
char[] uncompressedArray = uncompressed.toCharArray();
StringBuilder sb = new StringBuilder(uncompressedArray.length*2); //area to test
char previousChar=uncompressedArray[0];
int previousCount=1;
for(int i =1; i< uncompressedArray.length; i++){
if(uncompressedArray[i]==previousChar){
previousCount++;
}
else{
//do the write
sb.append(previousChar);
sb.append(previousCount);
previousChar = uncompressedArray[i];
previousCount=1;
}
}
sb.append(previousChar);
sb.append(previousCount);
if(sb.length()<uncompressed.length())return sb.toString();
}
return uncompressed;
}
/* Exercise 1-6 */
public static int[][] rotateSquareMatrix(int[][] squareMatrix){
return null;
}
}
|
package org.pignat.app.qr2gerber;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import net.miginfocom.swing.MigLayout;
import java.awt.Dimension;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Hashtable;
import java.util.Map;
import com.google.zxing.EncodeHintType;
import com.google.zxing.WriterException;
import com.google.zxing.client.j2se.MatrixToImageWriter;
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel;
public class App extends JFrame implements ActionListener {
class CustomDraw extends JPanel {
int lx;
int ly;
BufferedImage img = null;
CustomDraw(int x, int y) {
lx = x;
ly = y;
}
protected void paintComponent(Graphics g) {
super.paintComponent(g);
if (img != null) {
g.drawImage(img, 0, 0, this);
}
}
public Dimension getPreferredSize() {
return new Dimension(lx, ly);
}
public Dimension getMinimumSize() {
return new Dimension(lx, ly);
}
public void setImage(BufferedImage i)
{
img = i;
repaint();
}
}
private JButton gobutton;
private JButton browseButton;
private JTextField stringField;
private JTextField sizeField;
private JTextField destField;
private CustomDraw preview;
public App() {
setLayout(new MigLayout());
stringField = new JTextField("https://github.com/RandomReaper/qr2gerber", 35);
sizeField = new JTextField("10.0", 35);
gobutton = new JButton("Generate");
gobutton.addActionListener(this);
browseButton = new JButton("browse");
browseButton.addActionListener(this);
destField = new JTextField("/tmp/toto.txt");
preview = new CustomDraw(177, 177);
setDefaultCloseOperation(EXIT_ON_CLOSE);
setTitle("qr2gerber");
setSize(400, 300);
setLocationRelativeTo(null);
add(new JLabel("String"));
add(stringField , "wrap");
add(new JLabel("Size (mm): ") , "gap unrelated");
add(sizeField , "wrap");
add(new JLabel("Preview"));
add(preview , "wrap, grow");
add(new JLabel("Save to"));
add(destField , "grow");
add(browseButton , "wrap");
add(gobutton , "wrap");
pack();
}
public void actionPerformed(ActionEvent e) {
if (e.getSource() == gobutton)
{
try {
double size = Double.parseDouble(sizeField.getText());
QRPlusInfo qrcode = null;
Map<EncodeHintType, Object> encodingOptions = new Hashtable<EncodeHintType, Object>();
encodingOptions.put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.H);
qrcode = QRPlusInfo.encode(stringField.getText(), encodingOptions).invert();
QRPlusInfo.encode(stringField.getText());
preview.setImage(MatrixToImageWriter.toBufferedImage(QRPlusInfo.encode(stringField.getText()).m_qrcode));
preview.setSize(qrcode.size(), qrcode.size());
QRtoGerber q2g = new QRtoGerber(qrcode, size);
PrintWriter out = new PrintWriter(destField.getText());
out.println(q2g.toGerber());
out.close();
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (WriterException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
if (e.getSource() == browseButton)
{
final JFileChooser fc = new JFileChooser();
int returnVal = fc.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION)
{
destField.setText(fc.getSelectedFile().getAbsolutePath());
}
}
}
public static void main(String args[]) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
App ex = new App();
ex.setVisible(true);
}
});
}
}
|
package quantisan.qte_lmax;
import com.lmax.api.Callback;
import com.lmax.api.FailureResponse;
import com.lmax.api.LmaxApi;
import com.lmax.api.Session;
import com.lmax.api.account.LoginCallback;
import com.lmax.api.account.LoginRequest;
import com.lmax.api.orderbook.OrderBookEvent;
import com.lmax.api.orderbook.OrderBookEventListener;
import com.lmax.api.orderbook.OrderBookSubscriptionRequest;
public class SimpleBot implements LoginCallback, OrderBookEventListener {
private final static long GBP_USD_INSTRUMENT_ID = 4001;
private Session session;
public static void main(String[] args) {
String demoUrl = "https://testapi.lmaxtrader.com";
LmaxApi lmaxApi = new LmaxApi(demoUrl);
SimpleBot simpleBot = new SimpleBot();
lmaxApi.login(new LoginRequest("quantisan2", "J63VFqmXBaQStdAxKnD7", LoginRequest.ProductType.CFD_DEMO), simpleBot);
}
@Override
public void onLoginSuccess(Session session) {
System.out.printf("Logged in, account details: %s%n", session.getAccountDetails());
this.session = session;
session.registerOrderBookEventListener(this);
session.subscribe(new OrderBookSubscriptionRequest(GBP_USD_INSTRUMENT_ID), new Callback()
{
public void onSuccess()
{
System.out.println("Successful subscription");
}
public void onFailure(FailureResponse failureResponse)
{
System.err.printf("Failed to subscribe: %s%n", failureResponse);
}
});
session.start();
}
@Override
public void onLoginFailure(FailureResponse failureResponse) {
System.err.printf("Failed to login, reason: %s%n", failureResponse);
}
@Override
public void notify(OrderBookEvent orderBookEvent) {
System.out.printf("Market data: %s%n", orderBookEvent);
}
}
|
package org.yakindu.sct.ui.editor.preferences;
import org.eclipse.gmf.runtime.diagram.ui.figures.DiagramColorConstants;
import org.eclipse.gmf.runtime.diagram.ui.preferences.DiagramPreferenceInitializer;
import org.eclipse.gmf.runtime.diagram.ui.preferences.IPreferenceConstants;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.preference.PreferenceConverter;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.FontData;
import org.yakindu.sct.ui.editor.DiagramActivator;
/**
*
* @author andreas muelder
*
*/
public class PreferenceInitializer extends DiagramPreferenceInitializer
implements StatechartColorConstants {
@Override
public void initializeDefaultPreferences() {
super.initializeDefaultPreferences();
// Line colors
Color lineColor = DiagramColorConstants.darkGray;
PreferenceConverter.setDefault(getPreferenceStore(),
IPreferenceConstants.PREF_LINE_COLOR, lineColor.getRGB());
// rulers and grid defaults
getPreferenceStore().setDefault(
IPreferenceConstants.PREF_SNAP_TO_GEOMETRY, true);
// set default font
FontData defaultFont = new FontData("Arial", 10 , SWT.NONE);
PreferenceConverter.setDefault(getPreferenceStore(),
IPreferenceConstants.PREF_DEFAULT_FONT, defaultFont);
}
@Override
protected IPreferenceStore getPreferenceStore() {
return DiagramActivator.getDefault().getPreferenceStore();
}
}
|
package org.oasis_eu.portal.core.model.catalog;
import com.fasterxml.jackson.annotation.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class CatalogEntry {
private static final Logger logger = LoggerFactory.getLogger(CatalogEntry.class);
private String id;
private CatalogEntryType type;
@JsonProperty("name")
private String defaultName;
@JsonProperty("description")
private String defaultDescription;
private Map<String, String> localizedNames = new HashMap<>();
private Map<String, String> localizedDescriptions = new HashMap<>();
private Map<String, String> localizedIcons = new HashMap<>();
@JsonProperty("icon")
private String defaultIcon;
@JsonProperty("service_uri")
private String url;
@JsonProperty("notification_uri")
private String notificationUrl;
@JsonIgnore
private List<AppstoreCategory> categories = new ArrayList<>();
@JsonProperty("category_ids")
private List<String> categoryIds = new ArrayList<>();
@JsonProperty("payment_option")
private PaymentOption paymentOption;
@JsonProperty("target_audience")
private Audience targetAudience;
@JsonProperty("territory_id")
private String territoryId;
@JsonProperty("provider_id")
private String providerId;
/**
* App store publication status
*/
@JsonProperty("visible")
private boolean visible = false;
@JsonProperty("redirect_uris")
private List<String> redirectUris;
@JsonProperty("post_logout_redirect_uris")
private List<String> postLogoutRedirectUris;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public CatalogEntryType getType() {
return type;
}
public void setType(CatalogEntryType type) {
this.type = type;
}
public String getDefaultName() {
return defaultName;
}
public void setDefaultName(String defaultName) {
this.defaultName = defaultName;
}
@JsonIgnore
public void setLocalizedNames(Map<String, String> localizedNames) {
this.localizedNames = localizedNames;
}
public String getDefaultDescription() {
return defaultDescription;
}
public void setDefaultDescription(String defaultDescription) {
this.defaultDescription = defaultDescription;
}
@JsonIgnore
public void setLocalizedDescriptions(Map<String, String> localizedDescriptions) {
this.localizedDescriptions = localizedDescriptions;
}
@JsonIgnore
public void setLocalizedIcons(Map<String, String> localizedIcons) {
this.localizedIcons = localizedIcons;
}
@JsonAnySetter
public void setTranslation(String key, String value) {
if (key.startsWith("name
localizedNames.put(key.substring("name#".length()), value);
} else if (key.startsWith("description
localizedDescriptions.put(key.substring("description#".length()), value);
} else if (key.startsWith("icon
localizedIcons.put(key.substring("icon#".length()), value);
} else {
logger.info("Discarding unknown property {}", key);
}
}
@JsonAnyGetter
public Map<String, String> getTranslations() {
Map<String, String> result = new HashMap<>();
localizedNames.entrySet().forEach(e -> result.put("name#" + e.getKey(), e.getValue()));
localizedDescriptions.entrySet().forEach(e -> result.put("description#" + e.getKey(), e.getValue()));
localizedIcons.entrySet().forEach(e -> result.put("icon#" + e.getKey(), e.getValue()));
return result;
}
public String getDefaultIcon() {
return defaultIcon;
}
public void setDefaultIcon(String defaultIcon) {
this.defaultIcon = defaultIcon;
}
public List<AppstoreCategory> getCategories() {
return categories;
}
public void setCategories(List<AppstoreCategory> categories) {
this.categories = categories;
}
public List<String> getCategoryIds() {
return categoryIds;
}
public void setCategoryIds(List<String> categoryIds) {
this.categoryIds = categoryIds;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getNotificationUrl() {
return notificationUrl;
}
public void setNotificationUrl(String notificationUrl) {
this.notificationUrl = notificationUrl;
}
public PaymentOption getPaymentOption() {
return paymentOption;
}
public void setPaymentOption(PaymentOption paymentOption) {
this.paymentOption = paymentOption;
}
public Audience getTargetAudience() {
return targetAudience;
}
public void setTargetAudience(Audience targetAudience) {
this.targetAudience = targetAudience;
}
public String getTerritoryId() {
return territoryId;
}
public void setTerritoryId(String territoryId) {
this.territoryId = territoryId;
}
public boolean isVisible() {
return visible;
}
public void setVisible(boolean visible) {
this.visible = visible;
}
public String getProviderId() {
return providerId;
}
public void setProviderId(String providerId) {
this.providerId = providerId;
}
public String getName(Locale locale) {
if (localizedNames.containsKey(locale.getLanguage())) {
return localizedNames.get(locale.getLanguage());
} else {
return defaultName;
}
}
public String getDescription(Locale locale) {
if (localizedDescriptions.containsKey(locale.getLanguage())) {
return localizedDescriptions.get(locale.getLanguage());
} else {
return defaultDescription;
}
}
public String getIcon(Locale locale) {
if (localizedIcons.containsKey(locale.getLanguage())) {
return localizedIcons.get(locale.getLanguage());
} else {
return defaultIcon;
}
}
public void setRedirectUris(List<String> redirectUris) {
this.redirectUris = redirectUris;
}
public void setPostLogoutRedirectUris(List<String> postLogoutRedirectUris) {
this.postLogoutRedirectUris = postLogoutRedirectUris;
}
@Override
public String toString() {
return "CatalogEntry{" +
"id='" + id + '\'' +
", type=" + type +
", defaultName='" + defaultName + '\'' +
", defaultDescription='" + defaultDescription + '\'' +
", localizedNames=" + localizedNames +
", localizedDescriptions=" + localizedDescriptions +
", localizedIcons=" + localizedIcons +
", defaultIcon='" + defaultIcon + '\'' +
", url='" + url + '\'' +
", notificationUrl='" + notificationUrl + '\'' +
", categories=" + categories +
", categoryIds=" + categoryIds +
", paymentOption=" + paymentOption +
", targetAudience=" + targetAudience +
", territoryId='" + territoryId + '\'' +
", providerId='" + providerId + '\'' +
", visible=" + visible +
", redirectUris=" + redirectUris +
", postLogoutRedirectUris=" + postLogoutRedirectUris +
'}';
}
}
|
package se.kits.gakusei.dto;
public class EventDTO {
private long timestamp;
private String gamemode;
private String type;
private String data;
private String nuggetid;
private String nuggetcategory;
private String username;
private String lesson;
public EventDTO() {}
public long getTimestamp() {
return timestamp;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public String getGamemode() {
return gamemode;
}
public void setGamemode(String gamemode) {
this.gamemode = gamemode;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getNuggetid() {
return nuggetid;
}
public void setNuggetid(String nuggetid) {
this.nuggetid = nuggetid;
}
public String getLesson() {
return lesson;
}
public void setLesson(String lesson) {
this.lesson = lesson;
}
public String getNuggetcategory() {
return nuggetcategory;
}
public void setNuggetcategory(String nuggetcategory) {
this.nuggetcategory = nuggetcategory;
}
public int getNuggetcategoryAsInt(){
switch (getNuggetcategory()){
case "guess" : return 2;//Database "vocab"
case "kanji" : return 3;
case "quiz" : return 4;
case "flashcards" : return 2;//Also Vocab, just different training method
case "grammar" : return 6;
case "translate" : return 7;
default:return 1;
}
}
}
|
package nl.ovapi.rid.model;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import nl.ovapi.bison.model.KV17cvlinfo;
import nl.ovapi.bison.model.KV17cvlinfo.Mutation;
import nl.ovapi.bison.model.KV17cvlinfo.Mutation.MutationType;
import nl.ovapi.bison.model.KV6posinfo;
import nl.ovapi.bison.model.KV6posinfo.Type;
import nl.ovapi.exceptions.StopNotFoundException;
import nl.ovapi.exceptions.TooEarlyException;
import nl.ovapi.exceptions.TooOldException;
import nl.ovapi.exceptions.UnknownKV6PosinfoType;
import nl.ovapi.rid.gtfsrt.Utils;
import nl.ovapi.rid.model.JourneyPattern.JourneyPatternPoint;
import nl.ovapi.rid.model.TimeDemandGroup.TimeDemandGroupPoint;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.google.transit.realtime.GtfsRealtime.TripDescriptor;
import com.google.transit.realtime.GtfsRealtime.TripDescriptor.ScheduleRelationship;
import com.google.transit.realtime.GtfsRealtime.TripUpdate;
import com.google.transit.realtime.GtfsRealtime.TripUpdate.StopTimeEvent;
import com.google.transit.realtime.GtfsRealtime.TripUpdate.StopTimeUpdate;
import com.google.transit.realtime.GtfsRealtime.VehicleDescriptor;
import com.google.transit.realtime.GtfsRealtimeOVapi;
import com.google.transit.realtime.GtfsRealtimeOVapi.OVapiStopTimeUpdate;
@ToString()
public class Journey {
@Getter
@Setter
private Long id;
@Getter
@Setter
private JourneyPattern journeypattern;
@Getter
@Setter
private TimeDemandGroup timedemandgroup;
@Getter
@Setter
private Map<Integer, Long> realizedArrivals;
@Getter
@Setter
private Map<Integer, Long> realizedDepartures;
@Getter
@Setter
private Integer departuretime;
@Getter
@Setter
private Boolean wheelchairaccessible;
@Getter
@Setter
private String agencyId;
@Getter
@Setter
private String operatingDay;
@Getter
private KV6posinfo posinfo;
@Getter
@Setter
private boolean isCanceled;
private Map<Integer, ArrayList<Mutation>> mutations;
@Getter
@Setter
private Map<Integer, KV6posinfo> reinforcements;
public Journey() {
mutations = Maps.newHashMap();
reinforcements = Maps.newHashMap();
realizedArrivals = Maps.newHashMap();
realizedDepartures = Maps.newHashMap();
}
public void clearKV6(){
posinfo = null;
}
private static final boolean RECORD_TIMES = true;
private static final int PUNCTUALITY_FLOOR = 15; // seconds
private static final int DEFAULT_SPEED = (int) (75 / 3.6); // meters per
// seconds
private static final int LONGHAUL_SPEED = (int) (95 / 3.6); // meters per
// seconds
private static final int SHORTHAUL_SPEED = (int) (45 / 3.6); // meters per
// seconds
private static final int MIN_PUNCTUALITY = -300; // Minimum allowed
// punctuality.
private static final Logger _log = LoggerFactory.getLogger(Journey.class);
public TripDescriptor.Builder tripDescriptor(){
TripDescriptor.Builder tripDescriptor = TripDescriptor.newBuilder();
tripDescriptor.setStartDate(operatingDay.replace("-", ""));
tripDescriptor.setTripId(id.toString());
tripDescriptor.setScheduleRelationship(isCanceled ? ScheduleRelationship.CANCELED : ScheduleRelationship.SCHEDULED);
return tripDescriptor;
}
public StopTimeEvent.Builder stopTimeEventArrival(TimeDemandGroupPoint tpt,JourneyPatternPoint pt, int punctuality){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
stopTimeEvent.setDelay(punctuality);
stopTimeEvent.setTime(getDepartureEpoch()+tpt.getTotaldrivetime()+punctuality);
return stopTimeEvent;
}
public StopTimeEvent.Builder stopTimeEventArrivalRecorded(TimeDemandGroupPoint tpt, long time){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
long targettime = getDepartureEpoch()+tpt.getTotaldrivetime();
int delay = (int)(time-targettime);
if (Math.abs(delay) < PUNCTUALITY_FLOOR){
stopTimeEvent.setDelay(0);
stopTimeEvent.setTime(targettime);
}else{
stopTimeEvent.setDelay(delay);
stopTimeEvent.setTime(time);
}
return stopTimeEvent;
}
public boolean hasMutations(){
return mutations.size() > 0 || isCanceled;
}
public StopTimeEvent.Builder stopTimeEventDeparture(TimeDemandGroupPoint tpt,JourneyPatternPoint pt, int punctuality){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
if (mutations.containsKey(pt.getPointorder())){
for (Mutation m : mutations.get(pt.getPointorder())){
if (m.getMutationtype() == MutationType.LAG){
punctuality = Math.max(punctuality, m.getLagtime());
}
}
}
stopTimeEvent.setDelay(punctuality);
stopTimeEvent.setTime(getDepartureEpoch()+tpt.getTotaldrivetime()+tpt.getStopwaittime()+punctuality);
return stopTimeEvent;
}
public StopTimeEvent.Builder stopTimeEventDepartureRecorded(TimeDemandGroupPoint tpt, long time){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
long targettime = getDepartureEpoch()+tpt.getTotaldrivetime()+tpt.getStopwaittime();
stopTimeEvent.setDelay((int)(time-targettime));
int delay = (int)(time-targettime);
if (Math.abs(delay) < PUNCTUALITY_FLOOR){
stopTimeEvent.setDelay(0);
stopTimeEvent.setTime(targettime);
}else{
stopTimeEvent.setDelay(delay);
stopTimeEvent.setTime(time);
}
return stopTimeEvent;
}
/**
* @return POSIX time when journey end in seconds since January 1st 1970 00:00:00 UTC
*/
public long getEndEpoch(){
try {
Calendar c = Calendar.getInstance(TimeZone.getDefault());
c.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(getOperatingDay()));
c.set(Calendar.HOUR, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
c.add(Calendar.SECOND, getDeparturetime());
c.add(Calendar.SECOND, timedemandgroup.getPoints().get(timedemandgroup.getPoints().size()-1).getTotaldrivetime());
if (posinfo != null && posinfo.getPunctuality() != null){
c.add(Calendar.SECOND, Math.abs(posinfo.getPunctuality()));
}
return c.getTimeInMillis()/1000;
} catch (ParseException e) {
return -1;
}
}
/**
* @return POSIX time when journey is scheduled to start in seconds since January 1st 1970 00:00:00 UTC
*/
public long getDepartureEpoch(){
try {
Calendar c = Calendar.getInstance(TimeZone.getDefault());
c.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(getOperatingDay()));
c.set(Calendar.HOUR, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
c.add(Calendar.SECOND, getDeparturetime());
return c.getTimeInMillis()/1000;
} catch (ParseException e) {
return -1;
}
}
private StopTimeUpdate.Builder recordedTimes(TimeDemandGroupPoint tpt, JourneyPatternPoint pt){
if (!RECORD_TIMES)
return null;
StopTimeUpdate.Builder stopTimeUpdate = StopTimeUpdate.newBuilder();
boolean stopcanceled = isCanceled;
boolean destChanged = false;
if (mutations.containsKey(pt.getPointorder())){ // Check if mutation exists with cancel
for (Mutation m : mutations.get(pt.getPointorder())){
if (m.getMutationtype() == MutationType.SHORTEN){
stopcanceled = true;
}
if (m.getMutationtype() == MutationType.CHANGEDESTINATION && !destChanged){
destChanged = true;
String destination = m.getDestinationname50();
if (destination == null){
destination = m.getDestinationname16();
}
if (destination == null){
destination = m.getDestinationdisplay16();
}
if (destination != null){
OVapiStopTimeUpdate.Builder ovapiStopTimeUpdate = OVapiStopTimeUpdate.newBuilder();
ovapiStopTimeUpdate.setStopHeadsign(destination);
stopTimeUpdate.setExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate, ovapiStopTimeUpdate.build());
}
}
}
}
stopTimeUpdate.setStopSequence(pt.getPointorder());
stopTimeUpdate.setStopId(pt.getPointref().toString());
if (!pt.isScheduled())
return null; // Dummy point
if (stopcanceled){
stopTimeUpdate.setScheduleRelationship(StopTimeUpdate.ScheduleRelationship.SKIPPED);
return stopTimeUpdate;
}
if (realizedArrivals.containsKey(pt.getPointorder()))
stopTimeUpdate.setArrival(stopTimeEventArrivalRecorded(tpt,realizedArrivals.get(pt.getPointorder())));
if (realizedDepartures.containsKey(pt.getPointorder()))
stopTimeUpdate.setDeparture(stopTimeEventDepartureRecorded(tpt,realizedDepartures.get(pt.getPointorder())));
if ((pt.isWaitpoint() ||pt.getPointorder() <= 1) && stopTimeUpdate.hasArrival() && stopTimeUpdate.getArrival().getDelay() < 0 && !stopTimeUpdate.hasDeparture()){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
stopTimeEvent.setTime(getDepartureTime(tpt.getPointorder()));
stopTimeEvent.setDelay(0);
stopTimeUpdate.setDeparture(stopTimeEvent);
}
if (stopTimeUpdate.hasDeparture() && !stopTimeUpdate.hasArrival()){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
long time = stopTimeUpdate.getDeparture().getTime();
int delay = stopTimeUpdate.getDeparture().getDelay();
if (delay > MIN_PUNCTUALITY){
stopTimeEvent.setTime(time);
stopTimeEvent.setDelay(delay);
stopTimeUpdate.setArrival(stopTimeEvent);
}
}
if (!stopTimeUpdate.hasDeparture() && stopTimeUpdate.hasArrival()){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
long time = stopTimeUpdate.getArrival().getTime();
int delay = stopTimeUpdate.getArrival().getDelay();
if (delay > MIN_PUNCTUALITY){
stopTimeEvent.setTime(time);
stopTimeEvent.setDelay(delay);
stopTimeUpdate.setDeparture(stopTimeEvent);
}
}
if (!stopTimeUpdate.hasArrival() && !stopTimeUpdate.hasDeparture()){
StopTimeEvent.Builder stopTimeEvent = StopTimeEvent.newBuilder();
long targettime = getDepartureEpoch()+tpt.getTotaldrivetime();
stopTimeEvent.setTime(targettime);
stopTimeEvent.setDelay(0);
stopTimeUpdate.setArrival(stopTimeEvent);
targettime += tpt.getStopwaittime();
stopTimeEvent.setTime(targettime);
stopTimeEvent.setDelay(0);
stopTimeUpdate.setDeparture(stopTimeEvent);
}
return stopTimeUpdate;
}
public long getDepartureTime(int pointorder){
for (TimeDemandGroupPoint tpt : getTimedemandgroup().getPoints()){
if (tpt.getPointorder().equals(pointorder)){
return getDepartureEpoch()+tpt.getTotaldrivetime()+tpt.getStopwaittime();
}
}
throw new IllegalArgumentException("Pointorder "+pointorder+"does not exist");
}
public long getArrivalTime(int pointorder){
for (TimeDemandGroupPoint tpt : getTimedemandgroup().getPoints()){
if (tpt.getPointorder().equals(pointorder)){
return getDepartureEpoch()+tpt.getTotaldrivetime();
}
}
throw new IllegalArgumentException("Pointorder "+pointorder+"does not exist");
}
public TripUpdate.Builder filter(TripUpdate.Builder tripUpdate){
if (tripUpdate.getStopTimeUpdateCount() == 0)
return tripUpdate;
tripUpdate.getStopTimeUpdateOrBuilderList();
long lastTime = Long.MAX_VALUE;
for (int i = tripUpdate.getStopTimeUpdateCount()-1; i >= 0; i--){ //Filter negative dwells and stoptimes
StopTimeUpdate.Builder update = tripUpdate.getStopTimeUpdateBuilder(i);
if (update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.NO_DATA ||
update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.SKIPPED ||
update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate)){
continue;
}
if (!update.hasDeparture() || !update.hasArrival()){
System.out.println(tripUpdate.build());
_log.error("Departure or arrival is missing");
}
if (update.getDeparture().getTime() > lastTime){
int offset = (int) (lastTime - update.getDeparture().getTime());
update.getDepartureBuilder().setTime(update.getDeparture().getTime()+offset);
update.getDepartureBuilder().setDelay((int)(update.getDepartureBuilder().getTime()-getDepartureTime(update.getStopSequence())));
}
lastTime = update.getDeparture().getTime();
if (update.getArrival().getTime() > lastTime){
int offset = (int) (lastTime - update.getArrival().getTime());
update.getArrivalBuilder().setTime(update.getArrival().getTime()+offset);
update.getArrivalBuilder().setDelay((int)(update.getArrivalBuilder().getTime()-getArrivalTime(update.getStopSequence())));
}
lastTime = update.getArrival().getTime();
}
ArrayList<StopTimeUpdate.Builder> updates = new ArrayList<StopTimeUpdate.Builder>();
int lastDelay = Integer.MIN_VALUE;
StopTimeUpdate.ScheduleRelationship lastSchedule = StopTimeUpdate.ScheduleRelationship.SCHEDULED;
boolean hadStopTimeExtension = false;
List<StopTimeUpdate.Builder> unfilteredUpdates = tripUpdate.getStopTimeUpdateBuilderList();
for (int i = 0; i < unfilteredUpdates.size(); i++){
StopTimeUpdate.Builder update = unfilteredUpdates.get(i);
if (update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.NO_DATA ||
update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.SKIPPED ||
update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate)){
if (update.hasArrival()){
update.clearArrival();
}
if (update.hasDeparture()){
update.clearDeparture();
}
updates.add(update); //No data
lastDelay = Integer.MIN_VALUE;
lastSchedule = update.hasScheduleRelationship() ? StopTimeUpdate.ScheduleRelationship.SCHEDULED :
update.getScheduleRelationship();
hadStopTimeExtension = update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate);
continue;
}
boolean override = lastSchedule != update.getScheduleRelationship() ||
hadStopTimeExtension != update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate);
if (update.hasArrival()){
if (update.getArrival().getDelay() == lastDelay && (lastDelay == 0 || i != unfilteredUpdates.size()-1) && !override){
update.clearArrival();
}else{
lastDelay = update.getArrival().getDelay();
}
}
if (update.hasDeparture()){
if (update.getDeparture().getDelay() == lastDelay && (i != 0) && !override){
update.clearDeparture();
}else{
lastDelay = update.getDeparture().getDelay();
}
}
if (update.hasArrival() || update.hasDeparture()){
updates.add(update);
}
lastSchedule = update.hasScheduleRelationship() ? StopTimeUpdate.ScheduleRelationship.SCHEDULED :
update.getScheduleRelationship();
hadStopTimeExtension = update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate);
}
tripUpdate.clearStopTimeUpdate();
for (StopTimeUpdate.Builder update: updates){
tripUpdate.addStopTimeUpdate(update);
}
return tripUpdate;
}
public TripUpdate.Builder updateTimes(KV6posinfo posinfo) {
boolean passed = posinfo.getMessagetype() != KV6posinfo.Type.DELAY;
int punctuality = Math
.max(MIN_PUNCTUALITY, posinfo.getPunctuality() == null ? 0
: posinfo.getPunctuality());
TripUpdate.Builder tripUpdate = TripUpdate.newBuilder();
tripUpdate.setTrip(tripDescriptor());
tripUpdate.setTimestamp(posinfo.getTimestamp());
if (isCanceled){
return tripUpdate;
}
VehicleDescriptor.Builder vehicleDesc = posinfo.getVehicleDescription();
if (vehicleDesc != null)
tripUpdate.setVehicle(vehicleDesc);
switch (posinfo.getMessagetype()) {// These types do not contain // information regarding punctuality
case INIT:
realizedArrivals = Maps.newHashMap();
realizedDepartures = Maps.newHashMap();
case END:
case OFFROUTE:
if (getPosinfo() != null && !hasMutations() && getPosinfo().getMessagetype() != Type.OFFROUTE)
return null; //We've already sent out NO_DATE
for (int i = 0; i < timedemandgroup.getPoints().size(); i++) {
TimeDemandGroupPoint tpt = timedemandgroup.getPoints().get(i);
JourneyPatternPoint pt = journeypattern.getPoint(tpt.pointorder);
if (!pt.isScheduled())
continue;
StopTimeUpdate.Builder recordedTimes = recordedTimes(tpt,pt);
if (recordedTimes != null){
tripUpdate.addStopTimeUpdate(recordedTimes);
}else if (posinfo.getMessagetype() == Type.OFFROUTE){
StopTimeUpdate.Builder noData = StopTimeUpdate.newBuilder();
noData.setStopSequence(pt.getPointorder());
noData.setStopId(pt.getPointref().toString());
noData.setScheduleRelationship(StopTimeUpdate.ScheduleRelationship.NO_DATA);
tripUpdate.addStopTimeUpdate(noData);
if (!hasMutations())
break;
}
}
this.posinfo = posinfo;
tripUpdate = filter(tripUpdate);
if (tripUpdate.getStopTimeUpdateCount() > 0)
return tripUpdate;
default:
break;
}
int passageseq = 0;
int elapsedtime = 0;
for (int i = 0; i < timedemandgroup.getPoints().size(); i++) {
TimeDemandGroupPoint tpt = timedemandgroup.getPoints().get(i);
JourneyPatternPoint pt = journeypattern.getPoint(tpt.pointorder);
if (pt.getOperatorpointref().equals(posinfo.getUserstopcode())) {
if (posinfo.getPassagesequencenumber() - passageseq > 0) {
passageseq++; // Userstop equal but posinfo relates to n-th
// passing
} else {
elapsedtime = tpt.getTotaldrivetime()+tpt.getStopwaittime();
passed = false;
switch (posinfo.getMessagetype()) {
case ARRIVAL:
if (RECORD_TIMES)
realizedArrivals.put(pt.getPointorder(), posinfo.getTimestamp());
long targetArrivalTime = getDepartureEpoch()+tpt.getTotaldrivetime();
int newPunctuality = (int)(posinfo.getTimestamp()-targetArrivalTime);
if (newPunctuality > -60 && newPunctuality - punctuality < 600){
punctuality = newPunctuality;
}
if ((pt.isWaitpoint() || i == 0) && punctuality < 0)
punctuality = 0;
break;
case DEPARTURE:
long targetDepartureTime = getDepartureEpoch()+tpt.getTotaldrivetime()+tpt.getStopwaittime();
newPunctuality = (int)(posinfo.getTimestamp()-targetDepartureTime);
if (RECORD_TIMES && (!pt.isWaitpoint() || newPunctuality > 0))
realizedDepartures.put(pt.getPointorder(), posinfo.getTimestamp());
if ((pt.isWaitpoint() || i == 0) && punctuality < 0)
punctuality = 0;
break;
case ONSTOP:
if ((pt.isWaitpoint() || i == 0) && punctuality < 0)
punctuality = 0;
targetDepartureTime = getDepartureEpoch()+tpt.getTotaldrivetime()+tpt.getStopwaittime();
newPunctuality = (int)(posinfo.getTimestamp()-targetDepartureTime);
if (newPunctuality > -30 && newPunctuality - punctuality < 600){
punctuality = newPunctuality;
}
break;
default:
break;
}
}
StopTimeUpdate.Builder recorded = recordedTimes(tpt,pt);
if (recorded != null){
tripUpdate.addStopTimeUpdate(recorded);
}
} else if (!passed) { //Stops not visted by the vehicle
StopTimeUpdate.Builder stopTimeUpdate = StopTimeUpdate.newBuilder();
stopTimeUpdate.setStopSequence(tpt.getPointorder());
stopTimeUpdate.setStopId(pt.getPointref().toString());
stopTimeUpdate.setArrival(stopTimeEventArrival(tpt,pt,punctuality));
boolean stopcanceled = isCanceled;
boolean destChanged = false;
if (mutations.containsKey(tpt.getPointorder())){ // Check if mutation exists with cancel
for (Mutation m : mutations.get(tpt.getPointorder())){
if (m.getMutationtype() == MutationType.SHORTEN){
stopcanceled = true;
}
if (m.getMutationtype() == MutationType.CHANGEDESTINATION && !destChanged){
destChanged = true;
String destination = m.getDestinationname50();
if (destination == null){
destination = m.getDestinationname16();
}
if (destination == null){
destination = m.getDestinationdisplay16();
}
if (destination != null){
OVapiStopTimeUpdate.Builder ovapiStopTimeUpdate = OVapiStopTimeUpdate.newBuilder();
ovapiStopTimeUpdate.setStopHeadsign(destination);
stopTimeUpdate.setExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate, ovapiStopTimeUpdate.build());
}
}
}
}
stopTimeUpdate.setScheduleRelationship(
stopcanceled ? StopTimeUpdate.ScheduleRelationship.SKIPPED
: StopTimeUpdate.ScheduleRelationship.SCHEDULED);
if (pt.isWaitpoint() && punctuality < 0)
punctuality = 0;
if (tpt.getStopwaittime() != 0 && punctuality > 0) { //Minimize delay by cutting into dwells
int stopwaittime = tpt.getStopwaittime();
if (stopwaittime > 20 ) {
punctuality -= Math.max(0, stopwaittime - 20);
punctuality = Math.max(0, punctuality);
}
}
stopTimeUpdate.setDeparture(stopTimeEventDeparture(tpt,pt,punctuality));
if (pt.isScheduled()){
tripUpdate.addStopTimeUpdate(stopTimeUpdate);
}
punctuality = stopTimeUpdate.getDeparture().getDelay();
if (i+1 < timedemandgroup.getPoints().size()){
TimeDemandGroupPoint ntpt = timedemandgroup.getPoints().get(i+1);
JourneyPatternPoint npt = journeypattern.getPoint(ntpt.pointorder);
int distanceToNext = npt.getDistancefromstartroute() - pt.getDistancefromstartroute();
int secondsToNext = ntpt.getTotaldrivetime() - (tpt.getTotaldrivetime()+tpt.getStopwaittime());
int speed = DEFAULT_SPEED;
if (distanceToNext > 10000) {
speed = LONGHAUL_SPEED;
} else if (distanceToNext < 1000) {
speed = SHORTHAUL_SPEED;
}
int fastest = distanceToNext / speed;
if ((punctuality > 0 || secondsToNext < fastest) && i != timedemandgroup.getPoints().size() - 1) {
punctuality -= (secondsToNext - fastest);
if (punctuality < 0) {
punctuality = 0;
}
} else if (punctuality < 0 && i != timedemandgroup.getPoints().size() - 1) {
punctuality = decayeddelay(punctuality,
tpt.getTotaldrivetime() - elapsedtime);
}
if (Math.abs(punctuality) < PUNCTUALITY_FLOOR) {
punctuality = 0;
}
}
}else{ //JourneyPatternPoint has been passed.
StopTimeUpdate.Builder recorded = recordedTimes(tpt,pt);
if (recorded != null){
tripUpdate.addStopTimeUpdate(recorded);
}
}
}
this.posinfo = posinfo;
tripUpdate = filter(tripUpdate);
if (tripUpdate.getStopTimeUpdateCount() > 0)
return tripUpdate;
else
return null;
}
public int decayeddelay(int delay, int elapsedtime) {
if (delay == 0)
return 0;
double vlamba = 1.0 / 500.0;
double decay = Math.exp(-vlamba * elapsedtime);
int decayeddelay = (int) (decay * delay);
return decayeddelay;
}
private void parseMutateJourney(Long timestamp, Mutation m) {
switch (m.getMutationtype()) {
case CANCEL:
isCanceled = true;
break;
case RECOVER:
isCanceled = false;
break;
default:
break;
}
}
private JourneyPatternPoint getJourneyStop (String userstopcode,int passageSequencenumber){
for (int i = 0; i < timedemandgroup.getPoints().size(); i++) {
TimeDemandGroupPoint tpt = timedemandgroup.getPoints().get(i);
JourneyPatternPoint pt = journeypattern.getPoint(tpt.pointorder);
if (pt.getOperatorpointref().equals(userstopcode)){
if (passageSequencenumber > 0){
passageSequencenumber
}else{
return pt;
}
}
}
return null;
}
private void parseMutateJourneyStop(Long timestamp, Mutation m)
throws StopNotFoundException {
JourneyPatternPoint pst = getJourneyStop(m.getUserstopcode(),m.getPassagesequencenumber());
if (pst == null) {
throw new StopNotFoundException(m.toString());
}
if (!mutations.containsKey(pst.getPointorder()))
mutations.put(pst.getPointorder(), new ArrayList<Mutation>());
switch (m.getMutationtype()) {
case CHANGEDESTINATION:
case CHANGEPASSTIMES:
case LAG:
case RECOVER:
case CANCEL:
case SHORTEN:
mutations.get(pst.getPointorder()).add(m);
break;
default:
_log.info("Unknown mutationtype {}",m);
break;
}
}
public TripUpdate.Builder update(ArrayList<KV17cvlinfo> cvlinfos) {
long timestamp = 0;
if (cvlinfos.size() == 0){
return null;
}
mutations.clear();
for (KV17cvlinfo cvlinfo : cvlinfos) {
for (Mutation mut : cvlinfo.getMutations()) {
try {
timestamp = Math.max(timestamp, cvlinfo.getTimestamp());
switch (mut.getMessagetype()) {
case KV17MUTATEJOURNEY:
parseMutateJourney(cvlinfo.getTimestamp(), mut);
continue;
case KV17MUTATEJOURNEYSTOP:
parseMutateJourneyStop(cvlinfo.getTimestamp(), mut);
continue;
}
} catch (Exception e) {
_log.error("Error applying KV17",e);
}
}
}
int posinfoAge = (posinfo == null) ? Integer.MAX_VALUE :
(int)(Utils.currentTimeSecs()-posinfo.getTimestamp());
if (timestamp == 0)
timestamp = Utils.currentTimeSecs();
if (posinfo != null && posinfoAge < 120){
TripUpdate.Builder timeUpdate = updateTimes(posinfo);
timeUpdate.setTimestamp(timestamp);
return timeUpdate;
}else{
KV6posinfo posinfo = new KV6posinfo();
posinfo.setMessagetype(Type.DELAY); //Fake KV6posinfo to get things moving
posinfo.setPunctuality(0);
posinfo.setTimestamp(timestamp);
return updateTimes(posinfo);
}
}
public TripUpdate.Builder update(KV6posinfo posinfo) throws StopNotFoundException,UnknownKV6PosinfoType, TooEarlyException, TooOldException {
long currentTime = Utils.currentTimeSecs();
if (posinfo.getTimestamp()<currentTime-120){
throw new TooOldException(posinfo.toString());
}
long departureTime = getDepartureEpoch();
if (currentTime < departureTime){
int timeDeltaSeconds = (int)(departureTime-Utils.currentTimeSecs());
if (timeDeltaSeconds>=3600){
switch(posinfo.getMessagetype()){
case INIT:
break;
default:
throw new TooEarlyException(posinfo.toString());
}
}
}
if (posinfo.getUserstopcode() != null
&& !journeypattern.contains(posinfo.getUserstopcode())) {
throw new StopNotFoundException(posinfo.toString());
}
return updateTimes(posinfo);
}
}
|
package selling.sunshine.model;
import java.util.List;
public class Order extends Entity {
private String orderId;
private Agent agent;
private double price;
private OrderStatus status;
private List<OrderItem> orderItems;
public Order() {
super();
this.status = OrderStatus.SUBMITTED;
}
public String getOrderId() {
return orderId;
}
public void setOrderId(String orderId) {
this.orderId = orderId;
}
public List<OrderItem> getOrderItems() {
return orderItems;
}
public void setOrderItems(List<OrderItem> orderItems) {
this.orderItems = orderItems;
}
public Agent getAgent() {
return agent;
}
public void setAgent(Agent agent) {
this.agent = agent;
}
public OrderStatus getStatus() {
return status;
}
public void setStatus(OrderStatus status) {
this.status = status;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
this.price = price;
}
}
|
package com.opengamma.masterdb.historicaltimeseries;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.time.Duration;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import javax.time.calendar.OffsetDateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundleWithDates;
import com.opengamma.id.ExternalIdSearch;
import com.opengamma.id.ExternalIdWithDates;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.masterdb.AbstractDocumentDbMaster;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.Paging;
import com.opengamma.util.db.DbDateUtils;
import com.opengamma.util.db.DbMapSqlParameterSource;
import com.opengamma.util.db.DbSource;
import com.opengamma.util.db.PostgreSQLDbHelper;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
/**
* A time-series master implementation using a database for persistence.
* <p>
* This is a full implementation of the time-series master using an SQL database.
* Full details of the API are in {@link HistoricalTimeSeriesMaster}.
* <p>
* This implementation uses two linked unique identifiers, one for the document
* and one for the time-series. They share the same scheme, but have different values
* and versions. All the methods accept both formats although where possible they
* should be treated separately.
* .
* <p>
* This class uses SQL via JDBC. The SQL may be changed by subclassing the relevant methods.
* <p>
* This class is mutable but must be treated as immutable after configuration.
*/
public class DbHistoricalTimeSeriesMaster extends AbstractDocumentDbMaster<HistoricalTimeSeriesInfoDocument> implements HistoricalTimeSeriesMaster {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(DbHistoricalTimeSeriesMaster.class);
/**
* The default scheme for unique identifiers.
*/
public static final String IDENTIFIER_SCHEME_DEFAULT = "DbHts";
/**
* The prefix used for data point unique identifiers.
*/
protected static final String DATA_POINT_PREFIX = "DP";
/**
* SQL select.
*/
protected static final String SELECT =
"SELECT " +
"main.id AS doc_id, " +
"main.oid AS doc_oid, " +
"main.ver_from_instant AS ver_from_instant, " +
"main.ver_to_instant AS ver_to_instant, " +
"main.corr_from_instant AS corr_from_instant, " +
"main.corr_to_instant AS corr_to_instant, " +
"nm.name AS name, " +
"df.name AS data_field, " +
"ds.name AS data_source, " +
"dp.name AS data_provider, " +
"ot.name AS observation_time, " +
"i.key_scheme AS key_scheme, " +
"i.key_value AS key_value, " +
"di.valid_from AS key_valid_from, " +
"di.valid_to AS key_valid_to ";
protected static final String FROM_PREFIX = "FROM hts_document main ";
protected static final String FROM_POSTFIX = " INNER JOIN hts_name nm ON main.name_id = nm.id "
+ "INNER JOIN hts_data_field df ON main.data_field_id = df.id "
+ "INNER JOIN hts_data_source ds ON main.data_source_id = ds.id "
+ "INNER JOIN hts_data_provider dp ON main.data_provider_id = dp.id "
+ "INNER JOIN hts_observation_time ot ON main.observation_time_id = ot.id "
+ "LEFT JOIN hts_doc2idkey di ON (di.doc_id = main.id) " + "LEFT JOIN hts_idkey i ON (di.idkey_id = i.id) ";
/**
* SQL from.
*/
protected static final String FROM = FROM_PREFIX + FROM_POSTFIX;
/**
* Dimension table.
*/
private final NamedDimensionDbTable _nameTable;
/**
* Dimension table.
*/
private final NamedDimensionDbTable _dataFieldTable;
/**
* Dimension table.
*/
private final NamedDimensionDbTable _dataSourceTable;
/**
* Dimension table.
*/
private final NamedDimensionDbTable _dataProviderTable;
/**
* Dimension table.
*/
private final NamedDimensionDbTable _observationTimeTable;
/**
* Creates an instance.
*
* @param dbSource the database source combining all configuration, not null
*/
public DbHistoricalTimeSeriesMaster(final DbSource dbSource) {
super(dbSource, IDENTIFIER_SCHEME_DEFAULT);
_nameTable = new NamedDimensionDbTable(dbSource, "name", "hts_name", "hts_dimension_seq");
_dataFieldTable = new NamedDimensionDbTable(dbSource, "data_field", "hts_data_field", "hts_dimension_seq");
_dataSourceTable = new NamedDimensionDbTable(dbSource, "data_source", "hts_data_source", "hts_dimension_seq");
_dataProviderTable = new NamedDimensionDbTable(dbSource, "data_provider", "hts_data_provider", "hts_dimension_seq");
_observationTimeTable = new NamedDimensionDbTable(dbSource, "observation_time", "hts_observation_time", "hts_dimension_seq");
}
/**
* Gets the dimension table helper.
*
* @return the table, not null
*/
protected NamedDimensionDbTable getNameTable() {
return _nameTable;
}
/**
* Gets the dimension table helper.
*
* @return the table, not null
*/
protected NamedDimensionDbTable getDataFieldTable() {
return _dataFieldTable;
}
/**
* Gets the dimension table helper.
*
* @return the table, not null
*/
protected NamedDimensionDbTable getDataSourceTable() {
return _dataSourceTable;
}
/**
* Gets the dimension table helper.
*
* @return the table, not null
*/
protected NamedDimensionDbTable getDataProviderTable() {
return _dataProviderTable;
}
/**
* Gets the dimension table helper.
*
* @return the table, not null
*/
protected NamedDimensionDbTable getObservationTimeTable() {
return _observationTimeTable;
}
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
result.setDataFields(getDataFieldTable().names());
}
if (request.isDataSources()) {
result.setDataSources(getDataSourceTable().names());
}
if (request.isDataProviders()) {
result.setDataProviders(getDataProviderTable().names());
}
if (request.isObservationTimes()) {
result.setObservationTimes(getObservationTimeTable().names());
}
return result;
}
@Override
public HistoricalTimeSeriesInfoSearchResult search(final HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getPagingRequest(), "request.pagingRequest");
ArgumentChecker.notNull(request.getVersionCorrection(), "request.versionCorrection");
s_logger.debug("search {}", request);
final HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
if ((request.getObjectIds() != null && request.getObjectIds().size() == 0) ||
(ExternalIdSearch.canMatch(request.getExternalIdSearch()) == false)) {
result.setPaging(Paging.of(request.getPagingRequest(), 0));
return result;
}
final VersionCorrection vc = request.getVersionCorrection().withLatestFixed(now());
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addTimestamp("version_as_of_instant", vc.getVersionAsOf())
.addTimestamp("corrected_to_instant", vc.getCorrectedTo())
.addValueNullIgnored("name", getDbHelper().sqlWildcardAdjustValue(request.getName()))
.addValueNullIgnored("data_field", getDbHelper().sqlWildcardAdjustValue(request.getDataField()))
.addValueNullIgnored("data_source", getDbHelper().sqlWildcardAdjustValue(request.getDataSource()))
.addValueNullIgnored("data_provider", getDbHelper().sqlWildcardAdjustValue(request.getDataProvider()))
.addValueNullIgnored("observation_time", getDbHelper().sqlWildcardAdjustValue(request.getObservationTime()))
.addDateNullIgnored("id_validity_date", request.getValidityDate())
.addValueNullIgnored("key_value", getDbHelper().sqlWildcardAdjustValue(request.getExternalIdValue()));
if (request.getExternalIdSearch() != null) {
int i = 0;
for (ExternalId id : request.getExternalIdSearch()) {
args.addValue("key_scheme" + i, id.getScheme().getName());
args.addValue("key_value" + i, id.getValue());
i++;
}
}
searchWithPaging(request.getPagingRequest(), sqlSearchHistoricalTimeSeries(request), args, new HistoricalTimeSeriesDocumentExtractor(), result);
return result;
}
/**
* Gets the SQL to search for documents.
*
* @param request the request, not null
* @return the SQL search and count, not null
*/
protected String[] sqlSearchHistoricalTimeSeries(final HistoricalTimeSeriesInfoSearchRequest request) {
String where = "WHERE ver_from_instant <= :version_as_of_instant AND ver_to_instant > :version_as_of_instant " +
"AND corr_from_instant <= :corrected_to_instant AND corr_to_instant > :corrected_to_instant ";
if (request.getName() != null) {
where += "AND name_id IN (" + getNameTable().sqlSelectSearch(request.getName()) + ") ";
}
if (request.getDataField() != null) {
where += "AND data_field_id IN (" + getDataFieldTable().sqlSelectSearch(request.getDataField()) + ") ";
}
if (request.getDataSource() != null) {
where += "AND data_source_id IN (" + getDataSourceTable().sqlSelectSearch(request.getDataSource()) + ") ";
}
if (request.getDataProvider() != null) {
where += "AND data_provider_id IN (" + getDataProviderTable().sqlSelectSearch(request.getDataProvider()) + ") ";
}
if (request.getObservationTime() != null) {
where += "AND observation_time_id IN (" + getObservationTimeTable().sqlSelectSearch(request.getObservationTime()) + ") ";
}
if (request.getObjectIds() != null) {
StringBuilder buf = new StringBuilder(request.getObjectIds().size() * 10);
for (ObjectId objectId : request.getObjectIds()) {
checkScheme(objectId);
buf.append(extractOid(objectId)).append(", ");
}
buf.setLength(buf.length() - 2);
where += "AND oid IN (" + buf + ") ";
}
if (request.getExternalIdSearch() != null && request.getExternalIdSearch().size() > 0) {
where += sqlSelectMatchingHistoricalTimeSeriesKeys(request.getExternalIdSearch(), request.getValidityDate());
}
if (request.getExternalIdValue() != null) {
where += sqlSelectExternalIdValue(request.getExternalIdValue(), request.getValidityDate());
}
where += sqlAdditionalWhere();
String selectFromWhereInner = "SELECT id FROM hts_document " + where;
String inner = getDbHelper().sqlApplyPaging(selectFromWhereInner, "ORDER BY id ", request.getPagingRequest());
boolean isPostgres = this.getDbSource().getDialect() instanceof PostgreSQLDbHelper;
if (isPostgres) {
//TODO: this is a hack. Query optimizer gives up on the mahoosive query. If we split it manually it goes a fair bit faster
String tempTable = "CREATE LOCAL TEMP TABLE ids ON COMMIT DROP as " + inner + ";\n";
String search = tempTable + SELECT + FROM_PREFIX + " INNER JOIN ids ON ids.id = main.id" + FROM_POSTFIX
+ " ORDER BY main.id" + sqlAdditionalOrderBy(false);
String count = "SELECT COUNT(*) FROM hts_document " + where;
return new String[] {search, count};
} else {
String search = sqlSelectFrom() + "WHERE main.id IN (" + inner + ") ORDER BY main.id"
+ sqlAdditionalOrderBy(false);
String count = "SELECT COUNT(*) FROM hts_document " + where;
return new String[] {search, count};
}
}
/**
* Gets the SQL to match identifier value.
*
* @param identifierValue the identifier value, not null
* @param validityDate the validity date, not null
* @return the SQL, not null
*/
protected String sqlSelectExternalIdValue(final String identifierValue, final LocalDate validityDate) {
String select = "SELECT DISTINCT doc_id " +
"FROM hts_doc2idkey di, hts_document main " +
"WHERE doc_id = main.id " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant " +
(validityDate != null ? "AND di.valid_from <= :id_validity_date AND di.valid_to >= :id_validity_date " : "") +
"AND idkey_id IN ( SELECT id FROM hts_idkey WHERE " + getDbHelper().sqlWildcardQuery("UPPER(key_value) ", "UPPER(:key_value)", identifierValue) + ") ";
return "AND id IN (" + select + ") ";
}
/**
* Gets the SQL to match the {@code ExternalIdSearch}.
*
* @param idSearch the identifier search, not null
* @param validityDate the validity date, not null
* @return the SQL, not null
*/
protected String sqlSelectMatchingHistoricalTimeSeriesKeys(final ExternalIdSearch idSearch, final LocalDate validityDate) {
switch (idSearch.getSearchType()) {
case EXACT:
return "AND id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysExact(idSearch, validityDate) + ") ";
case ALL:
return "AND id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysAll(idSearch, validityDate) + ") ";
case ANY:
return "AND id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysAny(idSearch, validityDate) + ") ";
case NONE:
return "AND id NOT IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysAny(idSearch, validityDate) + ") ";
}
throw new UnsupportedOperationException("Search type is not supported: " + idSearch.getSearchType());
}
/**
* Gets the SQL to find all the series matching.
*
* @param idSearch the identifier search, not null
* @param validityDate the validity date, not null
* @return the SQL, not null
*/
protected String sqlSelectMatchingHistoricalTimeSeriesKeysExact(final ExternalIdSearch idSearch, final LocalDate validityDate) {
// compare size of all matched to size in total
// filter by dates to reduce search set
String a = "SELECT doc_id AS matched_doc_id, COUNT(doc_id) AS matched_count " +
"FROM hts_doc2idkey di, hts_document main " +
"WHERE di.doc_id = main.id " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant " +
(validityDate != null ? "AND di.valid_from <= :id_validity_date AND di.valid_to >= :id_validity_date " : "") +
"AND idkey_id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysOr(idSearch) + ") " +
"GROUP BY doc_id " +
"HAVING COUNT(doc_id) >= " + idSearch.size() + " ";
String b = "SELECT doc_id AS total_doc_id, COUNT(doc_id) AS total_count " +
"FROM hts_doc2idkey di, hts_document main " +
"WHERE di.doc_id = main.id " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant " +
(validityDate != null ? "AND di.valid_from <= :id_validity_date AND di.valid_to >= :id_validity_date " : "") +
"GROUP BY doc_id ";
String select = "SELECT matched_doc_id AS doc_id " +
"FROM (" + a + ") AS a, (" + b + ") AS b " +
"WHERE matched_doc_id = total_doc_id " +
"AND matched_count = total_count ";
return select;
}
/**
* Gets the SQL to find all the series matching.
*
* @param idSearch the identifier search, not null
* @param validityDate the validity date, not null
* @return the SQL, not null
*/
protected String sqlSelectMatchingHistoricalTimeSeriesKeysAll(final ExternalIdSearch idSearch, final LocalDate validityDate) {
// only return doc_id when all requested ids match (having count >= size)
// filter by dates to reduce search set
String select = "SELECT doc_id " +
"FROM hts_doc2idkey di, hts_document main " +
"WHERE di.doc_id = main.id " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant " +
(validityDate != null ? "AND di.valid_from <= :id_validity_date AND di.valid_to >= :id_validity_date " : "") +
"AND idkey_id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysOr(idSearch) + ") " +
"GROUP BY doc_id " +
"HAVING COUNT(doc_id) >= " + idSearch.size() + " ";
return select;
}
/**
* Gets the SQL to find all the series matching any identifier.
*
* @param idSearch the identifier search, not null
* @param validityDate the validity date, not null
* @return the SQL, not null
*/
protected String sqlSelectMatchingHistoricalTimeSeriesKeysAny(final ExternalIdSearch idSearch, final LocalDate validityDate) {
// optimized search for commons case of individual ORs
// filter by dates to reduce search set
String select = "SELECT DISTINCT doc_id " +
"FROM hts_doc2idkey di, hts_document main " +
"WHERE di.doc_id = main.id " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant " +
(validityDate != null ? "AND di.valid_from <= :id_validity_date AND di.valid_to >= :id_validity_date " : "") +
"AND idkey_id IN (" + sqlSelectMatchingHistoricalTimeSeriesKeysOr(idSearch) + ") ";
return select;
}
/**
* Gets the SQL to find all the ids for a single bundle.
*
* @param idSearch the identifier search, not null
* @return the SQL, not null
*/
protected String sqlSelectMatchingHistoricalTimeSeriesKeysOr(final ExternalIdSearch idSearch) {
String select = "SELECT id FROM hts_idkey ";
for (int i = 0; i < idSearch.size(); i++) {
select += (i == 0 ? "WHERE " : "OR ");
select += "(key_scheme = :key_scheme" + i + " AND key_value = :key_value" + i + ") ";
}
return select;
}
@Override
public HistoricalTimeSeriesInfoDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (uniqueId.getVersion() != null && uniqueId.getVersion().contains("P")) {
VersionCorrection vc = extractTimeSeriesInstants(uniqueId);
return get(uniqueId.getObjectId(), vc);
}
return doGet(uniqueId, new HistoricalTimeSeriesDocumentExtractor(), "HistoricalTimeSeries");
}
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectId, final VersionCorrection versionCorrection) {
return doGetByOidInstants(objectId, versionCorrection, new HistoricalTimeSeriesDocumentExtractor(), "HistoricalTimeSeries");
}
@Override
public HistoricalTimeSeriesInfoHistoryResult history(final HistoricalTimeSeriesInfoHistoryRequest request) {
return doHistory(request, new HistoricalTimeSeriesInfoHistoryResult(), new HistoricalTimeSeriesDocumentExtractor());
}
/**
* Inserts a new document.
*
* @param document the document, not null
* @return the new document, not null
*/
@Override
protected HistoricalTimeSeriesInfoDocument insert(final HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document.getInfo(), "document.info");
ArgumentChecker.notNull(document.getInfo().getName(), "document.info.name");
ArgumentChecker.notNull(document.getInfo().getDataField(), "document.info.dataField");
ArgumentChecker.notNull(document.getInfo().getDataSource(), "document.info.dataSource");
ArgumentChecker.notNull(document.getInfo().getDataProvider(), "document.info.dataProvider");
ArgumentChecker.notNull(document.getInfo().getObservationTime(), "document.info.observationTime");
final long docId = nextId("hts_master_seq");
final long docOid = (document.getUniqueId() != null ? extractOid(document.getUniqueId()) : docId);
// the arguments for inserting into the table
final ManageableHistoricalTimeSeriesInfo info = document.getInfo();
final DbMapSqlParameterSource seriesArgs = new DbMapSqlParameterSource()
.addValue("doc_id", docId)
.addValue("doc_oid", docOid)
.addTimestamp("ver_from_instant", document.getVersionFromInstant())
.addTimestampNullFuture("ver_to_instant", document.getVersionToInstant())
.addTimestamp("corr_from_instant", document.getCorrectionFromInstant())
.addTimestampNullFuture("corr_to_instant", document.getCorrectionToInstant())
.addValue("name_id", getNameTable().ensure(info.getName()))
.addValue("data_field_id", getDataFieldTable().ensure(info.getDataField()))
.addValue("data_source_id", getDataSourceTable().ensure(info.getDataSource()))
.addValue("data_provider_id", getDataProviderTable().ensure(info.getDataProvider()))
.addValue("observation_time_id", getObservationTimeTable().ensure(info.getObservationTime()));
// the arguments for inserting into the idkey tables
final List<DbMapSqlParameterSource> assocList = new ArrayList<DbMapSqlParameterSource>();
final List<DbMapSqlParameterSource> idKeyList = new ArrayList<DbMapSqlParameterSource>();
for (ExternalIdWithDates id : info.getExternalIdBundle()) {
final DbMapSqlParameterSource assocArgs = new DbMapSqlParameterSource()
.addValue("doc_id", docId)
.addValue("key_scheme", id.getExternalId().getScheme().getName())
.addValue("key_value", id.getExternalId().getValue())
.addValue("valid_from", DbDateUtils.toSqlDateNullFarPast(id.getValidFrom()))
.addValue("valid_to", DbDateUtils.toSqlDateNullFarFuture(id.getValidTo()));
assocList.add(assocArgs);
if (getJdbcTemplate().queryForList(sqlSelectIdKey(), assocArgs).isEmpty()) {
// select avoids creating unecessary id, but id may still not be used
final long idKeyId = nextId("hts_idkey_seq");
final DbMapSqlParameterSource idkeyArgs = new DbMapSqlParameterSource()
.addValue("idkey_id", idKeyId)
.addValue("key_scheme", id.getExternalId().getScheme().getName())
.addValue("key_value", id.getExternalId().getValue());
idKeyList.add(idkeyArgs);
}
}
getJdbcTemplate().update(sqlInsertHistoricalTimeSeries(), seriesArgs);
getJdbcTemplate().batchUpdate(sqlInsertIdKey(), idKeyList.toArray(new DbMapSqlParameterSource[idKeyList.size()]));
getJdbcTemplate().batchUpdate(sqlInsertHtsIdKey(), assocList.toArray(new DbMapSqlParameterSource[assocList.size()]));
// set the uniqueId
final UniqueId uniqueId = createUniqueId(docOid, docId);
info.setUniqueId(uniqueId);
document.setUniqueId(uniqueId);
document.getInfo().setTimeSeriesObjectId(uniqueId.getObjectId().withValue(DATA_POINT_PREFIX + uniqueId.getValue()));
return document;
}
/**
* Gets the SQL for inserting a document.
*
* @return the SQL, not null
*/
protected String sqlInsertHistoricalTimeSeries() {
return "INSERT INTO hts_document " +
"(id, oid, ver_from_instant, ver_to_instant, corr_from_instant, corr_to_instant, name_id, " +
"data_field_id, data_source_id, data_provider_id, observation_time_id) " +
"VALUES " +
"(:doc_id, :doc_oid, :ver_from_instant, :ver_to_instant, :corr_from_instant, :corr_to_instant, :name_id, " +
":data_field_id, :data_source_id, :data_provider_id, :observation_time_id)";
}
/**
* Gets the SQL for inserting an hts-idkey association.
*
* @return the SQL, not null
*/
protected String sqlInsertHtsIdKey() {
return "INSERT INTO hts_doc2idkey " +
"(doc_id, idkey_id, valid_from, valid_to) " +
"VALUES " +
"(:doc_id, (" + sqlSelectIdKey() + "), :valid_from, :valid_to)";
}
/**
* Gets the SQL for selecting an idkey.
*
* @return the SQL, not null
*/
protected String sqlSelectIdKey() {
return "SELECT id FROM hts_idkey WHERE key_scheme = :key_scheme AND key_value = :key_value";
}
/**
* Gets the SQL for inserting an idkey.
*
* @return the SQL, not null
*/
protected String sqlInsertIdKey() {
return "INSERT INTO hts_idkey (id, key_scheme, key_value) " +
"VALUES (:idkey_id, :key_scheme, :key_value)";
}
@Override
public ManageableHistoricalTimeSeries getTimeSeries(
UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
checkScheme(uniqueId);
final VersionCorrection vc;
if (uniqueId.isVersioned() && uniqueId.getValue().startsWith(DATA_POINT_PREFIX)) {
vc = extractTimeSeriesInstants(uniqueId);
} else {
vc = VersionCorrection.LATEST;
}
return getTimeSeries(uniqueId, vc, fromDateInclusive, toDateInclusive);
}
@Override
public ManageableHistoricalTimeSeries getTimeSeries(
ObjectIdentifiable objectId, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
final long oid = extractOid(objectId);
final VersionCorrection vc = versionCorrection.withLatestFixed(now());
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addValue("doc_oid", oid)
.addTimestamp("version_as_of_instant", vc.getVersionAsOf())
.addTimestamp("corrected_to_instant", vc.getCorrectedTo())
.addValue("start_date", DbDateUtils.toSqlDateNullFarPast(fromDateInclusive))
.addValue("end_date", DbDateUtils.toSqlDateNullFarFuture(toDateInclusive));
final NamedParameterJdbcOperations namedJdbc = getDbSource().getJdbcTemplate().getNamedParameterJdbcOperations();
ManageableHistoricalTimeSeries result = namedJdbc.query(sqlSelectDataPointsCommon(), args, new ManageableHTSExtractor(oid));
if (result == null) {
throw new DataNotFoundException("Unable to find time-series: " + objectId);
}
LocalDateDoubleTimeSeries series = namedJdbc.query(sqlSelectDataPoints(), args, new DataPointsExtractor());
result.setTimeSeries(series);
return result;
}
/**
* Gets the SQL to load the data point common info.
*
* @return the SQL, not null
*/
protected String sqlSelectDataPointsCommon() {
// find latest version-correction before query instants and min/max dates
String selectCommon =
"SELECT doc_oid, MAX(ver_instant) AS max_ver_instant, MAX(corr_instant) AS max_corr_instant, " +
"MAX(point_date) AS max_point_date, MIN(point_date) AS min_point_date " +
"FROM hts_point " +
"WHERE doc_oid = :doc_oid " +
"AND ver_instant <= :version_as_of_instant " +
"AND corr_instant <= :corrected_to_instant " +
"GROUP BY doc_oid ";
// select document table to handle empty set of points and to handle removal
String selectMain = "SELECT main.ver_from_instant AS ver_from_instant, main.corr_from_instant AS corr_from_instant, common.* " +
"FROM hts_document main " +
"LEFT JOIN (" + selectCommon + ") common ON main.oid = common.doc_oid " +
"WHERE main.oid = :doc_oid " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant ";
return selectMain;
}
/**
* Gets the SQL to load the data points.
*
* @return the SQL, not null
*/
protected String sqlSelectDataPoints() {
String selectPoints =
"SELECT point_date, point_value " +
"FROM hts_point " +
"WHERE doc_oid = :doc_oid " +
"AND ver_instant <= :version_as_of_instant " +
"AND corr_instant <= :corrected_to_instant " +
"AND point_date >= :start_date " +
"AND point_date <= :end_date " +
"ORDER BY point_date, corr_instant DESC ";
return selectPoints;
}
@Override
public UniqueId updateTimeSeriesDataPoints(final ObjectIdentifiable objectId, final LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(series, "series");
s_logger.debug("add time-series data points to {}", objectId);
// retry to handle concurrent conflicts
for (int retry = 0; true; retry++) {
final UniqueId uniqueId = resolveObjectId(objectId, VersionCorrection.LATEST);
if (series.isEmpty()) {
return uniqueId;
}
try {
final Instant now = now();
UniqueId resultId = getTransactionTemplate().execute(new TransactionCallback<UniqueId>() {
@Override
public UniqueId doInTransaction(final TransactionStatus status) {
insertDataPointsCheckMaxDate(uniqueId, series);
return insertDataPoints(uniqueId, series, now);
}
});
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, resultId, now);
return resultId;
} catch (DataIntegrityViolationException ex) {
if (retry == getMaxRetries()) {
throw ex;
}
} catch (DataAccessException ex) {
throw fixSQLExceptionCause(ex);
}
}
}
/**
* Checks the data points can be inserted.
*
* @param uniqueId the unique identifier, not null
* @param series the time-series data points, not empty, not null
*/
protected void insertDataPointsCheckMaxDate(final UniqueId uniqueId, final LocalDateDoubleTimeSeries series) {
final Long docOid = extractOid(uniqueId);
final VersionCorrection vc = extractTimeSeriesInstants(uniqueId);
final DbMapSqlParameterSource queryArgs = new DbMapSqlParameterSource()
.addValue("doc_oid", docOid)
.addTimestamp("ver_instant", vc.getVersionAsOf())
.addTimestamp("corr_instant", vc.getCorrectedTo());
Date result = getDbSource().getJdbcTemplate().queryForObject(sqlSelectMaxPointDate(), Date.class, queryArgs);
if (result != null) {
LocalDate maxDate = DbDateUtils.fromSqlDateAllowNull(result);
if (series.getTimeAt(0).isBefore(maxDate)) {
throw new IllegalArgumentException("Unable to add time-series as it starts before the latest in the database");
}
}
}
/**
* Inserts the data points.
*
* @param uniqueId the unique identifier, not null
* @param series the time-series data points, not empty, not null
* @param now the current instant, not null
* @return the unique identifier, not null
*/
protected UniqueId insertDataPoints(final UniqueId uniqueId, final LocalDateDoubleTimeSeries series, final Instant now) {
final Long docOid = extractOid(uniqueId);
final Timestamp nowTS = DbDateUtils.toSqlTimestamp(now);
final List<DbMapSqlParameterSource> argsList = new ArrayList<DbMapSqlParameterSource>();
for (Entry<LocalDate, Double> entry : series) {
LocalDate date = entry.getKey();
Double value = entry.getValue();
if (date == null || value == null) {
throw new IllegalArgumentException("Time-series must not contain a null value");
}
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addValue("doc_oid", docOid)
.addDate("point_date", date)
.addValue("ver_instant", nowTS)
.addValue("corr_instant", nowTS)
.addValue("point_value", value);
argsList.add(args);
}
getJdbcTemplate().batchUpdate(sqlInsertDataPoint(), argsList.toArray(new DbMapSqlParameterSource[argsList.size()]));
return createTimeSeriesUniqueId(docOid, now, now);
}
/**
* Gets the SQL for inserting a document.
*
* @return the SQL, not null
*/
protected String sqlSelectMaxPointDate() {
return
"SELECT MAX(point_date) AS max_point_date " +
"FROM hts_point " +
"WHERE doc_oid = :doc_oid " +
"AND ver_instant <= :ver_instant " +
"AND corr_instant <= :corr_instant ";
}
/**
* Gets the SQL for inserting a document.
*
* @return the SQL, not null
*/
protected String sqlInsertDataPoint() {
return
"INSERT INTO hts_point " +
"(doc_oid, point_date, ver_instant, corr_instant, point_value) " +
"VALUES " +
"(:doc_oid, :point_date, :ver_instant, :corr_instant, :point_value)";
}
@Override
public UniqueId correctTimeSeriesDataPoints(final ObjectIdentifiable objectId, final LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(series, "series");
s_logger.debug("add time-series data points to {}", objectId);
// retry to handle concurrent conflicts
for (int retry = 0; true; retry++) {
final UniqueId uniqueId = resolveObjectId(objectId, VersionCorrection.LATEST);
if (series.isEmpty()) {
return uniqueId;
}
try {
final Instant now = now();
UniqueId resultId = getTransactionTemplate().execute(new TransactionCallback<UniqueId>() {
@Override
public UniqueId doInTransaction(final TransactionStatus status) {
return correctDataPoints(uniqueId, series, now);
}
});
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, resultId, now);
return resultId;
} catch (DataIntegrityViolationException ex) {
if (retry == getMaxRetries()) {
throw ex;
}
} catch (DataAccessException ex) {
throw fixSQLExceptionCause(ex);
}
}
}
/**
* Corrects the data points.
*
* @param uniqueId the unique identifier, not null
* @param series the time-series data points, not empty, not null
* @param now the current instant, not null
* @return the unique identifier, not null
*/
protected UniqueId correctDataPoints(UniqueId uniqueId, LocalDateDoubleTimeSeries series, Instant now) {
final Long docOid = extractOid(uniqueId);
final Timestamp nowTS = DbDateUtils.toSqlTimestamp(now);
final List<DbMapSqlParameterSource> argsList = new ArrayList<DbMapSqlParameterSource>();
for (Entry<LocalDate, Double> entry : series) {
LocalDate date = entry.getKey();
Double value = entry.getValue();
if (date == null || value == null) {
throw new IllegalArgumentException("Time-series must not contain a null value");
}
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addValue("doc_oid", docOid)
.addDate("point_date", date)
.addValue("corr_instant", nowTS)
.addValue("point_value", value);
argsList.add(args);
}
getJdbcTemplate().batchUpdate(sqlInsertCorrectDataPoints(), argsList.toArray(new DbMapSqlParameterSource[argsList.size()]));
return resolveObjectId(uniqueId, VersionCorrection.of(now, now));
}
/**
* Gets the SQL for inserting data points.
*
* @return the SQL, not null
*/
protected String sqlInsertCorrectDataPoints() {
return
"INSERT INTO hts_point " +
"(doc_oid, point_date, ver_instant, corr_instant, point_value) " +
"VALUES " +
"(:doc_oid, :point_date, " +
getDbHelper().sqlNullDefault("(SELECT ver_instant FROM hts_point " +
"WHERE doc_oid = :doc_oid AND point_date = :point_date AND ver_instant = corr_instant)", ":corr_instant") + ", " +
":corr_instant, :point_value)";
}
@Override
public UniqueId removeTimeSeriesDataPoints(final ObjectIdentifiable objectId, final LocalDate fromDateInclusive, final LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectId, "objectId");
if (fromDateInclusive != null && toDateInclusive != null) {
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
}
s_logger.debug("removing time-series data points from {}", objectId);
// retry to handle concurrent conflicts
for (int retry = 0; true; retry++) {
final UniqueId uniqueId = resolveObjectId(objectId, VersionCorrection.LATEST);
try {
final Instant now = now();
UniqueId resultId = getTransactionTemplate().execute(new TransactionCallback<UniqueId>() {
@Override
public UniqueId doInTransaction(final TransactionStatus status) {
return removeDataPoints(uniqueId, fromDateInclusive, toDateInclusive, now);
}
});
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, resultId, now);
return resultId;
} catch (DataIntegrityViolationException ex) {
if (retry == getMaxRetries()) {
throw ex;
}
} catch (DataAccessException ex) {
throw fixSQLExceptionCause(ex);
}
}
}
/**
* Removes data points.
*
* @param uniqueId the unique identifier, not null
* @param fromDateInclusive the start date to remove from, not null
* @param toDateInclusive the end date to remove to, not null
* @param now the current instant, not null
* @return the unique identifier, not null
*/
protected UniqueId removeDataPoints(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive, Instant now) {
final Long docOid = extractOid(uniqueId);
// query dates to remove
final DbMapSqlParameterSource queryArgs = new DbMapSqlParameterSource()
.addValue("doc_oid", docOid)
.addValue("start_date", DbDateUtils.toSqlDateNullFarPast(fromDateInclusive))
.addValue("end_date", DbDateUtils.toSqlDateNullFarFuture(toDateInclusive));
List<Map<String, Object>> dates = getJdbcTemplate().queryForList(sqlSelectRemoveDataPoints(), queryArgs);
// insert new rows to remove them
final Timestamp nowTS = DbDateUtils.toSqlTimestamp(now);
final List<DbMapSqlParameterSource> argsList = new ArrayList<DbMapSqlParameterSource>();
for (Map<String, Object> date : dates) {
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addValue("doc_oid", docOid)
.addValue("point_date", date.get("POINT_DATE"))
.addValue("corr_instant", nowTS);
args.addValue("point_value", null, Types.DOUBLE);
argsList.add(args);
}
getJdbcTemplate().batchUpdate(sqlInsertCorrectDataPoints(), argsList.toArray(new DbMapSqlParameterSource[argsList.size()]));
return resolveObjectId(uniqueId, VersionCorrection.of(now, now));
}
/**
* Gets the SQL for selecting data points to be removed.
*
* @return the SQL, not null
*/
protected String sqlSelectRemoveDataPoints() {
String select =
"SELECT DISTINCT point_date " +
"FROM hts_point " +
"WHERE doc_oid = :doc_oid " +
"AND point_date >= :start_date " +
"AND point_date <= :end_date ";
return select;
}
/**
* Creates a unique identifier.
*
* @param oid the object identifier
* @param verInstant the version instant, not null
* @param corrInstant the correction instant, not null
* @return the unique identifier
*/
protected UniqueId createTimeSeriesUniqueId(long oid, Instant verInstant, Instant corrInstant) {
String oidStr = DATA_POINT_PREFIX + oid;
Duration dur = Duration.between(verInstant, corrInstant);
String verStr = verInstant.toString() + dur.toString();
return UniqueId.of(getUniqueIdScheme(), oidStr, verStr);
}
/**
* Extracts the object row id from the object identifier.
*
* @param objectId the object identifier, not null
* @return the date, null if no point date
*/
@Override
protected long extractOid(ObjectIdentifiable objectId) {
String value = objectId.getObjectId().getValue();
if (value.startsWith(DATA_POINT_PREFIX)) {
value = value.substring(DATA_POINT_PREFIX.length());
}
try {
return Long.parseLong(value);
} catch (RuntimeException ex) {
throw new IllegalArgumentException("UniqueId is not from this master (non-numeric object id): " + objectId, ex);
}
}
/**
* Extracts the instants from the unique identifier.
*
* @param uniqueId the unique identifier, not null
* @return the instants, version, correction, not null
*/
protected VersionCorrection extractTimeSeriesInstants(UniqueId uniqueId) {
try {
int pos = uniqueId.getVersion().indexOf('P');
String verStr = uniqueId.getVersion().substring(0, pos);
String corrStr = uniqueId.getVersion().substring(pos);
Instant ver = OffsetDateTime.parse(verStr).toInstant();
Instant corr = ver.plus(Duration.parse(corrStr));
return VersionCorrection.of(ver, corr);
} catch (RuntimeException ex) {
throw new IllegalArgumentException("UniqueId is not from this master (invalid version): " + uniqueId, ex);
}
}
@Override
protected long extractRowId(UniqueId uniqueId) {
int pos = uniqueId.getVersion().indexOf('P');
if (pos < 0) {
return super.extractRowId(uniqueId);
}
VersionCorrection vc = extractTimeSeriesInstants(uniqueId);
HistoricalTimeSeriesInfoDocument doc = get(uniqueId.getObjectId(), vc); // not very efficient, but works
return super.extractRowId(doc.getUniqueId());
}
/**
* Resolves an object identifier to a unique identifier.
*
* @param objectId the time-series object identifier, not null
* @param versionCorrection the version-correction locator to search at, not null
* @return the time-series, not null
*/
protected UniqueId resolveObjectId(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
checkScheme(objectId);
final long oid = extractOid(objectId);
versionCorrection = versionCorrection.withLatestFixed(now());
final DbMapSqlParameterSource args = new DbMapSqlParameterSource()
.addValue("doc_oid", oid)
.addTimestamp("version_as_of_instant", versionCorrection.getVersionAsOf())
.addTimestamp("corrected_to_instant", versionCorrection.getCorrectedTo());
final NamedParameterJdbcOperations namedJdbc = getDbSource().getJdbcTemplate().getNamedParameterJdbcOperations();
final UniqueIdExtractor extractor = new UniqueIdExtractor(oid);
UniqueId uniqueId = namedJdbc.query(sqlSelectUniqueIdByVersionCorrection(), args, extractor);
if (uniqueId == null) {
throw new DataNotFoundException("Unable to find time-series: " + objectId.getObjectId());
}
return uniqueId;
}
/**
* Gets the SQL to load the data points.
*
* @return the SQL, not null
*/
protected String sqlSelectUniqueIdByVersionCorrection() {
// find latest version-correction before query instants
String selectInstants =
"SELECT doc_oid, MAX(ver_instant) AS max_ver_instant, MAX(corr_instant) AS max_corr_instant " +
"FROM hts_point " +
"WHERE doc_oid = :doc_oid " +
"AND ver_instant <= :version_as_of_instant " +
"AND corr_instant <= :corrected_to_instant " +
"GROUP BY doc_oid ";
// select document to handle empty series and to check/use first doc instants
String select =
"SELECT main.ver_from_instant AS ver_from_instant, main.corr_from_instant AS corr_from_instant, instants.* " +
"FROM hts_document main " +
"LEFT JOIN (" + selectInstants + ") instants ON main.oid = instants.doc_oid " +
"WHERE main.oid = :doc_oid " +
"AND main.ver_from_instant <= :version_as_of_instant AND main.ver_to_instant > :version_as_of_instant " +
"AND main.corr_from_instant <= :corrected_to_instant AND main.corr_to_instant > :corrected_to_instant ";
return select;
}
@Override
protected String sqlSelectFrom() {
return SELECT + FROM;
}
@Override
protected String mainTableName() {
return "hts_document";
}
/**
* Mapper from SQL rows to a HistoricalTimeSeriesDocument.
*/
protected final class HistoricalTimeSeriesDocumentExtractor implements ResultSetExtractor<List<HistoricalTimeSeriesInfoDocument>> {
private long _lastDocId = -1;
private ManageableHistoricalTimeSeriesInfo _info;
private List<HistoricalTimeSeriesInfoDocument> _documents = new ArrayList<HistoricalTimeSeriesInfoDocument>();
@Override
public List<HistoricalTimeSeriesInfoDocument> extractData(final ResultSet rs) throws SQLException, DataAccessException {
while (rs.next()) {
final long docId = rs.getLong("DOC_ID");
if (_lastDocId != docId) {
_lastDocId = docId;
buildHistoricalTimeSeries(rs, docId);
}
final String idScheme = rs.getString("KEY_SCHEME");
final String idValue = rs.getString("KEY_VALUE");
final LocalDate validFrom = DbDateUtils.fromSqlDateNullFarPast(rs.getDate("KEY_VALID_FROM"));
final LocalDate validTo = DbDateUtils.fromSqlDateNullFarFuture(rs.getDate("KEY_VALID_TO"));
if (idScheme != null && idValue != null) {
ExternalIdWithDates id = ExternalIdWithDates.of(ExternalId.of(idScheme, idValue), validFrom, validTo);
_info.setExternalIdBundle(_info.getExternalIdBundle().withExternalId(id));
}
}
return _documents;
}
private void buildHistoricalTimeSeries(final ResultSet rs, final long docId) throws SQLException {
final long docOid = rs.getLong("DOC_OID");
final Timestamp versionFrom = rs.getTimestamp("VER_FROM_INSTANT");
final Timestamp versionTo = rs.getTimestamp("VER_TO_INSTANT");
final Timestamp correctionFrom = rs.getTimestamp("CORR_FROM_INSTANT");
final Timestamp correctionTo = rs.getTimestamp("CORR_TO_INSTANT");
final String name = rs.getString("NAME");
final String dataField = rs.getString("DATA_FIELD");
final String dataSource = rs.getString("DATA_SOURCE");
final String dataProvider = rs.getString("DATA_PROVIDER");
final String observationTime = rs.getString("OBSERVATION_TIME");
UniqueId uniqueId = createUniqueId(docOid, docId);
_info = new ManageableHistoricalTimeSeriesInfo();
_info.setUniqueId(uniqueId);
_info.setName(name);
_info.setDataField(dataField);
_info.setDataSource(dataSource);
_info.setDataProvider(dataProvider);
_info.setObservationTime(observationTime);
_info.setExternalIdBundle(ExternalIdBundleWithDates.EMPTY);
_info.setTimeSeriesObjectId(uniqueId.getObjectId().withValue(DATA_POINT_PREFIX + uniqueId.getValue()));
HistoricalTimeSeriesInfoDocument doc = new HistoricalTimeSeriesInfoDocument(_info);
doc.setVersionFromInstant(DbDateUtils.fromSqlTimestamp(versionFrom));
doc.setVersionToInstant(DbDateUtils.fromSqlTimestampNullFarFuture(versionTo));
doc.setCorrectionFromInstant(DbDateUtils.fromSqlTimestamp(correctionFrom));
doc.setCorrectionToInstant(DbDateUtils.fromSqlTimestampNullFarFuture(correctionTo));
_documents.add(doc);
}
}
/**
* Mapper from SQL rows to a LocalDateDoubleTimeSeries.
*/
protected final class DataPointsExtractor implements ResultSetExtractor<LocalDateDoubleTimeSeries> {
@Override
public LocalDateDoubleTimeSeries extractData(final ResultSet rs) throws SQLException, DataAccessException {
final List<LocalDate> dates = new ArrayList<LocalDate>(256);
final List<Double> values = new ArrayList<Double>(256);
LocalDate last = null;
while (rs.next()) {
LocalDate date = DbDateUtils.fromSqlDateAllowNull(rs.getDate("POINT_DATE"));
if (date.equals(last) == false) {
last = date;
Double value = (Double) rs.getObject("POINT_VALUE");
if (value != null) {
dates.add(date);
values.add(value);
}
}
}
return new ArrayLocalDateDoubleTimeSeries(dates, values);
}
}
/**
* Mapper from SQL rows to a UniqueId.
*/
protected final class UniqueIdExtractor implements ResultSetExtractor<UniqueId> {
private final long _objectId;
public UniqueIdExtractor(final long objectId) {
_objectId = objectId;
}
@Override
public UniqueId extractData(final ResultSet rs) throws SQLException, DataAccessException {
while (rs.next()) {
Timestamp ver = rs.getTimestamp("max_ver_instant");
Timestamp corr = rs.getTimestamp("max_corr_instant");
if (ver == null) {
ver = rs.getTimestamp("ver_from_instant");
corr = rs.getTimestamp("corr_from_instant");
}
Instant verInstant = DbDateUtils.fromSqlTimestamp(ver);
Instant corrInstant = (corr != null ? DbDateUtils.fromSqlTimestamp(corr) : verInstant);
return createTimeSeriesUniqueId(_objectId, verInstant, corrInstant);
}
return null;
}
}
/**
* Mapper from SQL rows to a ManageableHistoricalTimeSeries.
*/
protected final class ManageableHTSExtractor implements ResultSetExtractor<ManageableHistoricalTimeSeries> {
private final long _objectId;
public ManageableHTSExtractor(final long objectId) {
_objectId = objectId;
}
@Override
public ManageableHistoricalTimeSeries extractData(final ResultSet rs) throws SQLException, DataAccessException {
while (rs.next()) {
Timestamp ver = rs.getTimestamp("max_ver_instant");
Timestamp corr = rs.getTimestamp("max_corr_instant");
if (ver == null) {
ver = rs.getTimestamp("ver_from_instant");
corr = rs.getTimestamp("corr_from_instant");
}
Instant verInstant = DbDateUtils.fromSqlTimestamp(ver);
Instant corrInstant = (corr != null ? DbDateUtils.fromSqlTimestamp(corr) : verInstant);
ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
hts.setUniqueId(createTimeSeriesUniqueId(_objectId, verInstant, corrInstant));
hts.setVersionInstant(verInstant);
hts.setCorrectionInstant(corrInstant);
hts.setEarliest(DbDateUtils.fromSqlDateAllowNull(rs.getDate("min_point_date")));
hts.setLatest(DbDateUtils.fromSqlDateAllowNull(rs.getDate("max_point_date")));
return hts;
}
return null;
}
}
}
|
package no.javazone.ems;
import net.hamnaberg.json.Collection;
import net.hamnaberg.json.Item;
import net.hamnaberg.json.Link;
import net.hamnaberg.json.parser.CollectionParser;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
public class EmsAdapter {
private final WebTarget emsWebTarget;
public EmsAdapter(String emsHost) {
Client client = ClientBuilder.newClient();
emsWebTarget = client.target("http://" + emsHost);
}
public List<Session> getSessions(String eventId) {
WebTarget webTarget = emsWebTarget
.path("/ems/server/events")
.path(eventId)
.path("sessions");
String response = webTarget.request().buildGet().invoke(String.class);
try {
Collection collection = new CollectionParser().parse(response);
return mapToForedragsliste(collection);
} catch (IOException e) {
return Collections.emptyList();
}
}
private List<Session> mapToForedragsliste(Collection collection) throws IOException {
return collection
.getItems()
.stream()
.map(EmsAdapter::mapItemTilForedrag)
.collect(Collectors.toList());
}
private static Session mapItemTilForedrag(Item item) {
return new Session(
mapItemProperty(item, "title"),
getForedragsholdere(item.linkByRel("speaker collection")));
}
private static List<Foredragsholder> getForedragsholdere(Optional<Link> link) {
if (link.isPresent()) {
WebTarget webTarget = ClientBuilder.newClient()
.target(link.get().getHref());
String response = webTarget.request().buildGet().invoke(String.class);
try {
Collection collection = new CollectionParser().parse(response);
return collection
.getItems()
.stream()
.map(EmsAdapter::mapItemTilForedragsholder)
.collect(Collectors.toList());
} catch (IOException e) {
throw new RuntimeException("Finner ikke speakers");
}
} else {
throw new RuntimeException("Speakerlink finnes ikke");
}
}
private static Foredragsholder mapItemTilForedragsholder(Item item) {
return new Foredragsholder(
mapItemProperty(item, "name"),
mapItemProperty(item, "bio"));
}
private static String mapItemProperty(Item item, String property) {
return item.propertyByName(property).get().getValue().get().asString();
}
}
|
package tld.testmod.client.gui;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.inventory.Container;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import tld.testmod.client.gui.util.GuiScrollingListOf;
import tld.testmod.common.storage.models.User;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.UUID;
public class GuiDB extends GuiContainer
{
private final GuiScrollingListOf<User> userGuiScrollingList;
public GuiDB(Container container)
{
super(container);
this.mc = Minecraft.getMinecraft();
this.fontRenderer = mc.fontRenderer;
xSize = width;
ySize = height;
Keyboard.enableRepeatEvents(true);
userGuiScrollingList = new GuiScrollingListOf<User>(this)
{
@Override
protected void drawSlot(int slotIdx, int entryRight, int slotTop, int slotBuffer, Tessellator tess)
{
if (!isEmpty() && slotIdx >= 0 && slotIdx < size() && get(slotIdx) != null)
{
User user = get(slotIdx);
String trimmedName = fontRenderer.trimStringToWidth(user.userName, listWidth - 10);
fontRenderer.drawStringWithShadow(trimmedName, (float) left + 3, slotTop, 0xADD8E6);
}
}
@Override
protected void selectedClickedCallback(int selectedIndex)
{
}
@Override
protected void selectedDoubleClickedCallback(int selectedIndex)
{
}
};
}
@Override
public void initGui()
{
super.initGui();
buttonList.clear();
this.guiLeft = 0;
this.guiTop = 0;
int guiListWidth = (width - 15) * 3 / 4;
// Library List
int entryHeight = mc.fontRenderer.FONT_HEIGHT + 2;
int left = 5;
int titleTop = 20;
int listTop = titleTop + 25;
int listHeight = height - titleTop - entryHeight - 2 - 10 - 25 - 25;
int listBottom = listTop + listHeight;
int statusTop = listBottom + 4;
int userListWidth = (width - 15) / 4;
userGuiScrollingList.setLayout(entryHeight, userListWidth, listHeight, listTop, listBottom, left);
User user01 = new User();
user01.userName = "OneWill";
user01.uid = UUID.randomUUID();
User user02 = new User();
user02.userName = "TwoCan";
user02.uid = UUID.randomUUID();
userGuiScrollingList.add(user01);
userGuiScrollingList.add(user02);
}
@Override
protected void actionPerformed(GuiButton button) throws IOException
{
if (button.enabled)
{
switch (button.id)
{
case 0:
case 1:
case 2:
break;
default:
}
}
// updateState();
super.actionPerformed(button);
}
@Override
public void updateScreen()
{
super.updateScreen();
}
@Override
public boolean doesGuiPauseGame()
{
return false;
}
@Override
public void drawScreen(int mouseX, int mouseY, float partialTicks)
{
drawDefaultBackground();
userGuiScrollingList.drawScreen(mouseX, mouseY, partialTicks);
super.drawScreen(mouseX, mouseY, partialTicks);
}
@Override
protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY)
{
super.drawGuiContainerForegroundLayer(mouseX, mouseY);
}
@Override
protected void drawGuiContainerBackgroundLayer(float v, int i, int i1)
{
}
@Override
protected void keyTyped(char typedChar, int keyCode) throws IOException
{
userGuiScrollingList.keyTyped(typedChar, keyCode);
// updateState();
super.keyTyped(typedChar, keyCode);
}
@Override
public void onResize(@Nonnull Minecraft mcIn, int w, int h)
{
//updateState();
super.onResize(mcIn, w, h);
}
@Override
public void handleMouseInput() throws IOException
{
int mouseX = Mouse.getEventX() * width / mc.displayWidth;
int mouseY = height - Mouse.getEventY() * height / mc.displayHeight - 1;
userGuiScrollingList.handleMouseInput(mouseX, mouseY);
super.handleMouseInput();
}
@Override
protected void mouseClicked(int mouseX, int mouseY, int mouseButton) throws IOException
{
//search.mouseClicked(mouseX, mouseY, mouseButton);
//clearOnMouseLeftClicked(search, mouseX, mouseY, mouseButton);
super.mouseClicked(mouseX, mouseY, mouseButton);
//updateState();
}
}
|
package org.basex.index.ft;
import static org.basex.core.Text.*;
import static org.basex.data.DataText.*;
import static org.basex.util.Token.*;
import static org.basex.util.ft.FTFlag.*;
import java.io.IOException;
import org.basex.core.Prop;
import org.basex.data.Data;
import org.basex.data.DataText;
import org.basex.index.IndexIterator;
import org.basex.index.IndexStats;
import org.basex.index.IndexToken;
import org.basex.io.random.DataAccess;
import org.basex.util.Levenshtein;
import org.basex.util.Num;
import org.basex.util.Performance;
import org.basex.util.TokenBuilder;
import org.basex.util.Util;
import org.basex.util.ft.FTLexer;
import org.basex.util.hash.TokenIntMap;
final class FTFuzzy extends FTIndex {
/** Entry size. */
private static final int ENTRY = 9;
/** Token positions. */
private final int[] tp;
/** Levenshtein reference. */
private final Levenshtein ls = new Levenshtein();
/** Index storing each unique token length and pointer
* on the first token with this length. */
private final DataAccess inX;
/** Index storing each token, its data size and pointer on the data. */
private final DataAccess inY;
/** Storing pre and pos values for each token. */
private final DataAccess inZ;
/**
* Constructor, initializing the index structure.
* @param d data reference
* @throws IOException I/O Exception
*/
protected FTFuzzy(final Data d) throws IOException {
super(d);
// cache token length index
inY = new DataAccess(d.meta.dbfile(DATAFTX + 'y'));
inZ = new DataAccess(d.meta.dbfile(DATAFTX + 'z'));
inX = new DataAccess(d.meta.dbfile(DATAFTX + 'x'));
tp = new int[d.meta.maxlen + 3];
for(int i = 0; i < tp.length; ++i) tp[i] = -1;
int is = inX.readNum();
while(--is >= 0) {
int p = inX.readNum();
final int r;
// legacy issue (7.0.2 -> 7.1)
if(p >= 4096) {
r = p << 24 | (inX.read1() & 0xFF) << 16 |
(inX.read1() & 0xFF) << 8 | inX.read1() & 0xFF;
p = p >> 8 | 0x40;
} else {
r = inX.read4();
}
tp[p] = r;
}
tp[tp.length - 1] = (int) inY.length();
}
@Override
public synchronized int count(final IndexToken ind) {
if(ind.get().length > data.meta.maxlen) return Integer.MAX_VALUE;
// estimate costs for queries which stretch over multiple index entries
final FTLexer lex = (FTLexer) ind;
if(lex.ftOpt().is(FZ)) return Math.max(1, data.meta.size / 10);
final byte[] tok = lex.get();
final int id = cache.id(tok);
if(id > 0) return cache.size(id);
int s = 0;
long poi = 0;
final long p = token(tok);
if(p > -1) {
s = size(p, tok.length);
poi = pointer(p, tok.length);
}
cache.add(tok, s, poi);
return s;
}
@Override
public synchronized IndexIterator iter(final IndexToken ind) {
final byte[] tok = ind.get();
// support fuzzy search
if(((FTLexer) ind).ftOpt().is(FZ)) {
int k = data.meta.prop.num(Prop.LSERROR);
if(k == 0) k = tok.length >> 2;
return fuzzy(tok, k, false);
}
// return cached or new result
final int id = cache.id(tok);
if(id == 0) {
final int p = token(tok);
return p > -1 ? iter(pointer(p, tok.length),
size(p, tok.length), inZ, false) : FTIndexIterator.FTEMPTY;
}
return iter(cache.pointer(id), cache.size(id), inZ, false);
}
@Override
public TokenIntMap entries(final byte[] prefix) {
final TokenIntMap tim = new TokenIntMap();
for(int s = prefix.length; s < tp.length - 1; s++) {
int p = tp[s];
if(p == -1) continue;
int i = s + 1;
int r = -1;
do r = tp[i++]; while(r == -1);
inY.cursor(p);
boolean f = false;
while(p < r) {
final byte[] tok = inY.readBytes(s);
final long poi = inY.read5();
final int size = inY.read4();
cache.add(tok, size, poi);
if(startsWith(tok, prefix)) {
tim.add(tok, size);
f = true;
} else if(f) {
break;
}
p += s + ENTRY;
}
}
return tim;
}
@Override
public synchronized byte[] info() {
final TokenBuilder tb = new TokenBuilder();
tb.add(INDEXSTRUC + FUZZYSTRUC + NL);
tb.addExt("- %: %" + NL, CREATEST, Util.flag(data.meta.stemming));
tb.addExt("- %: %" + NL, CREATECS, Util.flag(data.meta.casesens));
tb.addExt("- %: %" + NL, CREATEDC, Util.flag(data.meta.diacritics));
if(data.meta.language != null)
tb.addExt("- %: %" + NL, CREATELN, data.meta.language);
final long l = inX.length() + inY.length() + inZ.length();
tb.add(SIZEDISK + Performance.format(l, true) + NL);
final IndexStats stats = new IndexStats(data);
addOccs(stats);
stats.print(tb);
return tb.finish();
}
@Override
public synchronized void close() throws IOException {
inX.close();
inY.close();
inZ.close();
}
/**
* Determines the pointer on a token.
* @param tok token looking for
* @return int pointer or {@code -1} if token was not found
*/
private int token(final byte[] tok) {
final int tl = tok.length;
// left limit
int l = tp[tl];
if(l == -1) return -1;
int i = 1;
int r;
// find right limit
do r = tp[tl + i++]; while(r == -1);
final int x = r;
// binary search
final int o = tl + ENTRY;
while(l < r) {
final int m = l + (r - l >> 1) / o * o;
final int c = diff(inY.readBytes(m, tl), tok);
if(c == 0) return m;
if(c < 0) l = m + o;
else r = m - o;
}
// accept entry if pointer is inside relevant tokens
return r != x && l == r && eq(inY.readBytes(l, tl), tok) ? l : -1;
}
/**
* Collects all tokens and their sizes found in the index structure.
* @param stats statistics
*/
private void addOccs(final IndexStats stats) {
int i = 0;
while(i < tp.length && tp[i] == -1) ++i;
int p = tp[i];
int j = i + 1;
while(j < tp.length && tp[j] == -1) ++j;
while(p < tp[tp.length - 1]) {
if(stats.adding(size(p, i))) stats.add(inY.readBytes(p, i));
p += i + ENTRY;
if(p == tp[j]) {
i = j;
while(j + 1 < tp.length && tp[++j] == -1);
}
}
}
/**
* Gets the pointer on ftdata for a token.
* @param pt pointer on token
* @param lt length of the token
* @return int pointer on ftdata
*/
private long pointer(final long pt, final int lt) {
return inY.read5(pt + lt);
}
/**
* Reads the size of ftdata from disk.
* @param pt pointer on token
* @param lt length of the token
* @return size of the ftdata
*/
private int size(final long pt, final int lt) {
return inY.read4(pt + lt + 5);
}
/**
* Performs a fuzzy search for token, with e maximal number
* of errors e.
* @param tok token looking for
* @param k number of errors allowed
* @param f fast evaluation
* @return iterator
*/
private IndexIterator fuzzy(final byte[] tok, final int k, final boolean f) {
FTIndexIterator it = FTIndexIterator.FTEMPTY;
final int tl = tok.length;
final int e = Math.min(tp.length - 1, tl + k);
int s = Math.max(1, tl - k) - 1;
while(++s <= e) {
int p = tp[s];
if(p == -1) continue;
int i = s + 1;
int r = -1;
while(i < tp.length && r == -1) r = tp[i++];
while(p < r) {
if(ls.similar(inY.readBytes(p, s), tok, k)) {
it = FTIndexIterator.union(
iter(pointer(p, s), size(p, s), inZ, f), it);
}
p += s + ENTRY;
}
}
return it;
}
}
|
package trinity.visitors;
import com.google.common.collect.ImmutableList;
import org.antlr.v4.runtime.ParserRuleContext;
import trinity.customExceptions.SymbolAlreadyDefinedException;
import trinity.customExceptions.SymbolNotFoundException;
import trinity.*;
import trinity.types.*;
import java.util.ArrayList;
import java.util.List;
public class TypeVisitor extends TrinityBaseVisitor<Type> implements TrinityVisitor<Type> {
private ErrorReporter errorReporter;
private SymbolTable symbolTable;
private final Type scalar = new PrimitiveType(EnumType.SCALAR);
private final Type bool = new PrimitiveType(EnumType.BOOLEAN);
public TypeVisitor(ErrorReporter errorReporter, SymbolTable symbolTable) {
this.errorReporter = errorReporter;
this.symbolTable = symbolTable;
addstdlib();
}
private void addstdlib() {
Type numFunc = new FunctionType(scalar, ImmutableList.of(scalar));
List<String> funcs = ImmutableList.of("abs", "round", "floor", "ceil", "sin", "cos", "tan", "asin", "acos", "atan", "log", "log10", "sqrt");
try {
for (String func : funcs) {
symbolTable.enterSymbol(func, numFunc);
}
} catch (SymbolAlreadyDefinedException e) {
errorReporter.reportError("error adding standard library to symbol table");
}
}
private boolean expect(Type expected, Type actual, ParserRuleContext ctx) {
if (expected.equals(actual)) {
return true;
} else {
errorReporter.reportError("Expected type " + expected + " but got " + actual, ctx);
return false;
}
}
@Override
public Type visitConstDecl(TrinityParser.ConstDeclContext ctx) {
// Declared (expected) type:
Type LHS = ctx.type().accept(this);
// Type found in expr (RHS of declaration)
Type RHS = ctx.semiExpr().accept(this);
// Check if the two achieved types matches each other and react accordingly:
if (expect(LHS, RHS, ctx.semiExpr())) {
try {
symbolTable.enterSymbol(ctx.ID().getText(), LHS);
} catch (SymbolAlreadyDefinedException e) {
errorReporter.reportError("Symbol was already defined!", ctx.ID().getSymbol());
}
}
return null;
}
@Override
public Type visitFunctionDecl(TrinityParser.FunctionDeclContext ctx) {
Type funcType = ctx.type().accept(this);
List<String> formalParameterIds = new ArrayList<String>();
List<Type> formalParameterTypes = new ArrayList<Type>();
if (ctx.formalParameters() != null) {
for (TrinityParser.FormalParameterContext formalParameter : ctx.formalParameters().formalParameter()) {
formalParameterTypes.add(formalParameter.accept(this));
formalParameterIds.add(formalParameter.ID().getText());
}
}
FunctionType functionDecl = new FunctionType(funcType, formalParameterTypes);
try {
symbolTable.enterSymbol(ctx.ID().getText(), functionDecl);
} catch (SymbolAlreadyDefinedException e) {
errorReporter.reportError("Symbol was already defined!", ctx.ID().getSymbol());
}
symbolTable.openScope();
symbolTable.setCurrentFunction(functionDecl);
assert formalParameterIds.size() == formalParameterTypes.size();
for (int i = 0; i < formalParameterTypes.size(); i++) {
try {
symbolTable.enterSymbol(formalParameterIds.get(i), formalParameterTypes.get(i));
} catch (SymbolAlreadyDefinedException e) {
errorReporter.reportError("Formal parameter Symbol was already defined!", ctx);
}
}
ctx.block().accept(this);
symbolTable.closeScope();
return null;
}
@Override
public Type visitFunctionCall(TrinityParser.FunctionCallContext ctx) {
Type type;
try {
type = symbolTable.retrieveSymbol(ctx.ID().getText());
} catch (SymbolNotFoundException e) {
errorReporter.reportError("Symbol not defined!", ctx);
return null;
}
if (type instanceof FunctionType) {
FunctionType funcType = (FunctionType) type;
if (ctx.exprList() != null) {
List<TrinityParser.ExprContext> actualParams = ctx.exprList().expr();
if (actualParams.size() != funcType.getParameterTypes().size()) {
errorReporter.reportError(ctx.ID().getText() + " called with wrong number of parameters", ctx);
return null;
}
for (int i = 0; i < actualParams.size(); i++) {
expect(funcType.getParameterTypes().get(i), actualParams.get(i).accept(this), actualParams.get(i));
}
}
return ctx.t = funcType.getType();
} else {
errorReporter.reportError(ctx.ID().getText() + " is not a function", ctx.getStart());
return null;
}
}
@Override
public Type visitFormalParameters(TrinityParser.FormalParametersContext ctx) {
errorReporter.reportError("internal compiler error. visitFormalParameters should never be called", ctx);
return null;
}
@Override
public Type visitFormalParameter(TrinityParser.FormalParameterContext ctx) {
return ctx.type().accept(this);
}
@Override
public Type visitBlock(TrinityParser.BlockContext ctx) {
for (int i = 0; i < ctx.stmt().size(); i++) {
ctx.stmt(i).accept(this);
}
if (ctx.returnStmt() != null) {
return ctx.returnStmt().accept(this);
}
return null;
}
@Override
public Type visitReturnStmt(TrinityParser.ReturnStmtContext ctx) {
Type returnType = ctx.semiExpr().accept(this);
try {
if (!returnType.equals(symbolTable.getCurrentFunction().getType())) {
errorReporter.reportError("Incorrect return type for function", ctx.semiExpr());
}
} catch (SymbolNotFoundException e) {
errorReporter.reportError("No function to return from", ctx.semiExpr());
}
return returnType;
}
@Override
public Type visitSemiExpr(TrinityParser.SemiExprContext ctx) {
return ctx.expr().accept(this);
}
@Override
public Type visitForLoop(TrinityParser.ForLoopContext ctx) {
Type type = ctx.expr().accept(this);
Type contextType = ctx.type().accept(this);
symbolTable.openScope();
if (type instanceof MatrixType) {
if (((MatrixType) type).getRows() == 1) {
expect(scalar, contextType, ctx.type());
} else {
expect(new MatrixType(1, ((MatrixType) type).getCols()), contextType, ctx.type());
}
} else {
errorReporter.reportError("Hmm, expected a Matrix or Vector.", ctx.expr().getStart());
}
try {
symbolTable.enterSymbol(ctx.ID().getText(), contextType);
} catch (SymbolAlreadyDefinedException e) {
errorReporter.reportError("ID already exists: " + ctx.ID().getText(), ctx.ID().getSymbol());
}
ctx.block().accept(this);
symbolTable.closeScope();
return null;
}
@Override
public Type visitIfStatement(TrinityParser.IfStatementContext ctx) {
for (TrinityParser.ExprContext expCtx : ctx.expr()) {
expect(bool, expCtx.accept(this), expCtx);
}
for (TrinityParser.BlockContext blockCtx : ctx.block()) {
symbolTable.openScope();
blockCtx.accept(this);
symbolTable.closeScope();
}
return null;
}
@Override
public Type visitBlockStatement(TrinityParser.BlockStatementContext ctx) {
symbolTable.openScope();
ctx.block().accept(this);
symbolTable.closeScope();
return null;
}
@Override
public Type visitVector(TrinityParser.VectorContext ctx) {
if (ctx.range() != null) {
return ctx.range().accept(this);
} else {
List<TrinityParser.ExprContext> exprs = ctx.exprList().expr();
for (TrinityParser.ExprContext expr : exprs) {
expect(scalar, expr.accept(this), expr);
}
return new MatrixType(1, exprs.size()); // Vector
}
}
@Override
public Type visitRange(TrinityParser.RangeContext ctx) {
int from = new Integer(ctx.NUMBER(0).getText());
int to = new Integer(ctx.NUMBER(1).getText());
if (from > to) {
errorReporter.reportError("Range error, " + from + " is larger than " + to + ".", ctx.NUMBER(0).getSymbol());
}
return new MatrixType(1, to - from + 1); // vector
}
@Override
public Type visitExprList(TrinityParser.ExprListContext ctx) {
errorReporter.reportError("internal error, visitExprList", ctx);
return null;
}
@Override
public Type visitMatrix(TrinityParser.MatrixContext ctx) {
List<TrinityParser.VectorContext> vectors = ctx.vector();
int rows = vectors.size();
int cols = -1;
for (int i = 0; i < rows; i++) {
Type type = vectors.get(i).accept(this);
if (type instanceof MatrixType) {
MatrixType vectortyY = (MatrixType) type;
if (cols == -1) {
cols = vectortyY.getCols();
} else if (cols != vectortyY.getCols()) {
errorReporter.reportError("All rows in a Matrix must be of same size.", vectors.get(i));
}
} else {
errorReporter.reportError("hmm error", ctx);
}
}
return new MatrixType(rows, cols);
}
@Override
public Type visitSingleIndexing(TrinityParser.SingleIndexingContext ctx) {
expect(scalar, ctx.expr().accept(this), ctx.expr());
Type symbol;
Type out;
try {
symbol = symbolTable.retrieveSymbol(ctx.ID().getText());
} catch (SymbolNotFoundException e) {
errorReporter.reportError("Symbol not defined!", ctx.ID().getSymbol());
return null;
}
if (symbol instanceof MatrixType) {
MatrixType matrix = (MatrixType) symbol;
if (matrix.getRows() == 1) {
out = scalar;
} else {
out = new MatrixType(1, matrix.getCols()); // vector
}
} else {
errorReporter.reportError("hmm error", ctx);
out = null;
}
ctx.t = out;
return out;
}
@Override
public Type visitDoubleIndexing(TrinityParser.DoubleIndexingContext ctx) {
Type out;
expect(scalar, ctx.expr(0).accept(this), ctx.expr(0));
expect(scalar, ctx.expr(1).accept(this), ctx.expr(1));
Type symbol = null;
try {
symbol = symbolTable.retrieveSymbol(ctx.ID().getText());
} catch (SymbolNotFoundException e) {
errorReporter.reportError("Symbol not found", ctx.ID().getSymbol());
return null;
}
if (symbol instanceof MatrixType) {
out = scalar;
} else {
errorReporter.reportError("hmm error", ctx);
return null;
}
ctx.t = out;
return out;
}
@Override
public Type visitParens(TrinityParser.ParensContext ctx) {
return ctx.t = ctx.expr().accept(this);
}
@Override
public Type visitMatrixLiteral(TrinityParser.MatrixLiteralContext ctx) {
return ctx.t = ctx.matrix().accept(this);
}
@Override
public Type visitVectorLiteral(TrinityParser.VectorLiteralContext ctx) {
return ctx.t = ctx.vector().accept(this);
}
@Override
public Type visitNumber(TrinityParser.NumberContext ctx) {
return ctx.t = scalar;
}
@Override
public Type visitTranspose(TrinityParser.TransposeContext ctx) {
Type exprT = ctx.expr().accept(this);
Type out;
if (exprT instanceof MatrixType) {
MatrixType matrixT = (MatrixType) exprT;
out = new MatrixType(matrixT.getCols(), matrixT.getRows());
} else {
errorReporter.reportError("Only Matrix and Vectors can be transposed.", ctx);
out = null;
}
ctx.t = out;
return out;
}
@Override
public Type visitRelation(TrinityParser.RelationContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
expect(op1, op2, ctx);
if (op1.equals(bool) && op2.equals(bool)) {
errorReporter.reportError("Cannot compare booleans", ctx);
}
return ctx.t = bool;
}
@Override
public Type visitEquality(TrinityParser.EqualityContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
expect(op1, op2, ctx);
return ctx.t = bool;
}
@Override
public Type visitBoolean(TrinityParser.BooleanContext ctx) {
return ctx.t = bool;
}
@Override
public Type visitNot(TrinityParser.NotContext ctx) {
Type exprT = ctx.expr().accept(this);
expect(bool, exprT, ctx.expr());
return ctx.t = exprT;
}
@Override
public Type visitOr(TrinityParser.OrContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
expect(bool, op1, ctx.expr(0));
expect(bool, op2, ctx.expr(1));
return ctx.t = op1;
}
@Override
public Type visitAnd(TrinityParser.AndContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
expect(bool, op1, ctx.expr(0));
expect(bool, op2, ctx.expr(1));
return ctx.t = op1;
}
@Override
public Type visitExponent(TrinityParser.ExponentContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
if (op1.equals(bool)) {
errorReporter.reportError("Can only use exponent operator scalars and square matrices.", ctx);
} else if (op1 instanceof MatrixType) {
MatrixType matrix = (MatrixType) op1;
if (matrix.getRows() != matrix.getCols()) {
errorReporter.reportError("Can only use exponent operator scalars and square matrices.", ctx);
}
}
expect(scalar, op2, ctx.expr(1));
return ctx.t = op1;
}
@Override
public Type visitAddSubtract(TrinityParser.AddSubtractContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
expect(op1, op2, ctx);
if (op1.equals(bool)) {
errorReporter.reportError("Cannot use operator +/- on boolean values.", ctx);
} else {
return ctx.t = op1;
}
return null;
}
@Override
public Type visitMultiplyDivide(TrinityParser.MultiplyDivideContext ctx) {
Type op1 = ctx.expr(0).accept(this);
Type op2 = ctx.expr(1).accept(this);
String operator = ctx.op.getText();
Type out = null;
if (operator.equals("*")) {
if (op1.equals(bool) || op2.equals(bool)) {
errorReporter.reportError("Cannot mult or div boolean", ctx);
out = null;
} else if (op1.equals(scalar)) {
out = op2;
}
// Nx1
else if (op1 instanceof MatrixType) {
if (op2.equals(scalar)) {
out = op1;
} else if (op2 instanceof MatrixType) {
MatrixType matrix1 = (MatrixType) op1;
MatrixType matrix2 = (MatrixType) op2;
// Vector dot product
if (matrix1.getRows() == 1 && matrix2.getRows() == 1 && matrix1.getCols() == matrix2.getCols()) {
out = scalar;
} else
// Matrix multiplication
if (matrix1.getCols() == matrix2.getRows()) {
out = new MatrixType(matrix1.getRows(), matrix2.getCols());
} else {
errorReporter.reportError("Size mismatch", ctx);
out = null;
}
} else {
errorReporter.reportError("Cannot multiply matrix with " + op2, ctx);
out = null;
}
}
} else if (operator.equals("/")) {
expect(scalar, op1, ctx.expr(0));
expect(scalar, op2, ctx.expr(1));
out = scalar;
} else {
errorReporter.reportError("what?", ctx);
out = null;
}
ctx.t = out;
return out;
}
@Override
public Type visitIdentifier(TrinityParser.IdentifierContext ctx) {
try {
return ctx.t = symbolTable.retrieveSymbol(ctx.ID().getText());
} catch (SymbolNotFoundException e) {
errorReporter.reportError("Symbol not found", ctx.ID().getSymbol());
return null;
}
}
@Override
public Type visitNegate(TrinityParser.NegateContext ctx) {
Type out;
Type op = ctx.expr().accept(this);
if (op.equals(bool)) {
errorReporter.reportError("Cannot negate bool", ctx);
out = bool;
} else {
out = op;
}
ctx.t = out;
return out;
}
@Override
public Type visitPrimitiveType(TrinityParser.PrimitiveTypeContext ctx) {
String prim = ctx.getChild(0).getText();
if (prim.contentEquals("Boolean")) {
return bool;
} else if (prim.contentEquals("Scalar")) {
return scalar;
} else {
errorReporter.reportError("hmm", ctx);
return null;
}
}
@Override
public Type visitVectorType(TrinityParser.VectorTypeContext ctx) {
Type out = null;
if (ctx.ID() != null) {
errorReporter.reportError("IDs not supported ... yet", ctx.ID().getSymbol());
} else {
try {
int size = new Integer(ctx.NUMBER().getText());
out = new MatrixType(1, size);
} catch (NumberFormatException ex) {
errorReporter.reportError("Unsupported dimension", ctx.NUMBER().getSymbol());
}
}
return out;
}
@Override
public Type visitMatrixType(TrinityParser.MatrixTypeContext ctx) {
Type out = null;
if (ctx.ID(1) != null || ctx.ID(0) != null) {
errorReporter.reportError("IDs not supported ... yet", ctx.ID(0).getSymbol());
} else {
int rows = 0;
int cols = 0;
try {
rows = new Integer(ctx.NUMBER(0).getText());
} catch (NumberFormatException ex) {
errorReporter.reportError("Unsupported dimension", ctx.NUMBER(0).getSymbol());
}
try {
cols = new Integer(ctx.NUMBER(1).getText());
} catch (NumberFormatException ex) {
errorReporter.reportError("Unsupported dimension", ctx.NUMBER(0).getSymbol());
}
out = new MatrixType(rows, cols);
}
return out;
}
}
|
package org.geoscript.js;
import java.math.BigDecimal;
import java.net.URI;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import org.geotools.feature.FeatureCollection;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.NativeArray;
import org.mozilla.javascript.NativeJSON;
import org.mozilla.javascript.ScriptRuntime;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.Wrapper;
import org.mozilla.javascript.annotations.JSFunction;
import org.mozilla.javascript.annotations.JSGetter;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
public class GeoObject extends ScriptableObject implements Wrapper {
/** serialVersionUID */
private static final long serialVersionUID = 5069578216502688712L;
protected enum Type {
String(String.class),
Integer(Integer.class),
Short(Short.class),
Float(Float.class),
Long(Long.class),
Double(Double.class),
Boolean(Boolean.class),
Geometry(com.vividsolutions.jts.geom.Geometry.class),
Point(com.vividsolutions.jts.geom.Point.class),
LineString(com.vividsolutions.jts.geom.LineString.class),
Polygon(com.vividsolutions.jts.geom.Polygon.class),
GeometryCollection(com.vividsolutions.jts.geom.GeometryCollection.class),
MultiPoint(com.vividsolutions.jts.geom.MultiPoint.class),
MultiLineString(com.vividsolutions.jts.geom.MultiLineString.class),
MultiPolygon(com.vividsolutions.jts.geom.MultiPolygon.class),
Bounds(ReferencedEnvelope.class),
FeatureCollection(FeatureCollection.class),
Filter(org.opengis.filter.Filter.class),
Projection(CoordinateReferenceSystem.class),
Date(Date.class),
Time(Time.class),
Datetime(java.util.Date.class),
Timestamp(Timestamp.class),
BigDecimal(BigDecimal.class),
URI(URI.class);
private Class<?> binding;
Type(Class<?> binding) {
this.binding = binding;
}
public static String getName(Class<?> binding) {
String name = null;
if (binding.isEnum()) {
binding = String.class;
}
for (Type type : Type.values()) {
if (!binding.isPrimitive()) {
if (!binding.isInterface()) {
if (type.getBinding().equals(binding)) {
name = type.name();
break;
}
} else if (type.getBinding().isAssignableFrom(binding)) {
name = type.name();
break;
}
} else {
try {
Class<?> cls = (Class<?>) type.getBinding().getField("TYPE").get(null);
if (cls.equals(binding)) {
name = type.name();
break;
}
} catch (Exception e) {
// no type field on binding, keep looking
}
}
}
return name;
}
/**
* @return the binding
*/
public Class<?> getBinding() {
return binding;
}
}
@JSGetter
public Scriptable getConfig() {
Scriptable scope = getParentScope();
Context cx = getCurrentContext();
Scriptable obj = cx.newObject(scope);
obj.put("type", obj, getClass().getSimpleName());
return obj;
}
@JSGetter
public Object getJson() {
Scriptable config = getConfig();
Scriptable scope = getParentScope();
Context cx = getCurrentContext();
Object json = NativeJSON.stringify(cx, scope, config, null, null);
return json;
}
public Object unwrap() {
return null;
}
@Override
public String getClassName() {
return getClass().getName();
}
/**
* String representation of an array.
* @param array
* @return
*/
protected String arrayRepr(NativeArray array) {
String repr = "[";
int length = array.size();
for (int i=0; i<length; ++i) {
Object item = array.get(i);
if (item instanceof NativeArray) {
repr += arrayRepr((NativeArray) item);
} else if (item instanceof String) {
repr += '"' + (String) item + '"';
} else {
repr += Context.toString(item);
}
if (i < length -1) {
repr += ", ";
}
}
return repr + "]";
}
@JSFunction
public String toString() {
String full = toFullString();
if (full.length() > 0) {
full = " " + full;
}
if (full.length() > 60) {
full = full.substring(0, 61) + "...";
}
return "<" + getClass().getSimpleName() + full + ">";
}
/**
* Descriptive string representation of this object.
* @return
*/
public String toFullString() {
return "";
}
/**
* Get the context associated with the current thread.
* @return The current context.
*/
protected static Context getCurrentContext() {
Context cx = Context.getCurrentContext();
if (cx == null) {
throw new RuntimeException("No context associated with current thread.");
}
return cx;
}
/**
* Get an optional member. If the member is present and the value is not
* null, the value must be an instance of the provided class.
*
* @param obj
* @param name
* @param cls If provided, the member value must be an instance of this
* class.
* @return
*/
protected static Object getOptionalMember(Scriptable obj, String name, Class<?> cls) {
return getOptionalMember(obj, name, cls, cls.getSimpleName());
}
/**
* Get an optional member. If the member is present and the value is not
* null, the value must be an instance of the provided class.
*
* @param obj
* @param name
* @param cls If provided, the member value must be an instance of this
* class.
* @param clsName The constructor name displayed to the user in the case
* of an error.
* @return
*/
protected static Object getOptionalMember(Scriptable obj, String name,
Class<?> cls, String clsName) {
Object result = getMember(obj, name);
if (result != null && !cls.isInstance(result)) {
throw ScriptRuntime.constructError("Error",
"The optional " + name + " member must be a " + clsName);
}
return result;
}
/**
* Get a required member. If the member is present and the value is not
* null, the value must be an instance of the provided class.
*
* @param obj
* @param name
* @param cls The member value must be an instance of this class.
* @return
*/
protected static Object getRequiredMember(Scriptable obj, String name, Class<?> cls) {
return getRequiredMember(obj, name, cls, cls.getSimpleName());
}
/**
* Get a required member. If the member is present and the value is not
* null, the value must be an instance of the provided class.
*
* @param obj
* @param name
* @param cls The member value must be an instance of this class.
* @param clsName The constructor name displayed to the user in the case
* of an error.
* @return
*/
protected static Object getRequiredMember(Scriptable obj, String name,
Class<?> cls, String clsName) {
Object result = getMember(obj, name);
if (result == null) {
throw ScriptRuntime.constructError("Error",
"The required " + name + " member must be non-null");
}
if (!cls.isInstance(result)) {
throw ScriptRuntime.constructError("Error",
"Expected the '" + name + "' member to be a " +
cls.getSimpleName() + ". Got: " + Context.toString(result));
}
return result;
}
/**
* Get an object member. Returns null if the member is not present or if
* the value is null.
* @param obj
* @return
*/
private static Object getMember(Scriptable obj, String name) {
Object result = null;
if (obj.has(name, obj)) {
result = obj.get(name, obj);
}
return result;
}
/**
* Convert a JavaScript object into the appropriate Java type.
* @param value
* @return
*/
public static Object jsToJava(Object value) {
if (value instanceof Wrapper) {
value = ((Wrapper) value).unwrap();
} else if (value instanceof Scriptable) {
if (((Scriptable) value).getClassName().equals("Date")) {
value = Context.jsToJava(value, java.util.Date.class);
}
}
return value;
}
/**
* Convert a Java object into the appropriate JavaScript type.
* @param value
* @param scope
* @return
*/
public static Object javaToJS(Object value, Scriptable scope) {
if (value instanceof java.util.Date) {
java.util.Date date = (java.util.Date) value;
Object[] args = { new Long(date.getTime()) };
Context cx = GeoObject.getCurrentContext();
value = cx.newObject(scope, "Date", args);
}
return Context.javaToJS(value, scope);
}
}
|
package ui.issuecolumn;
import backend.interfaces.IModel;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.HBox;
import prefs.Preferences;
import ui.GUIController;
import ui.UI;
import ui.components.KeyboardShortcuts;
import ui.issuepanel.IssuePanel;
import util.events.ColumnClickedEvent;
import util.events.ColumnClickedEventHandler;
import util.events.IssueSelectedEventHandler;
import util.events.ModelUpdatedEventHandler;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
public class ColumnControl extends HBox {
private final UI ui;
private final Preferences prefs;
private IModel model;
private GUIController guiController;
private Optional<Integer> currentlySelectedColumn = Optional.empty();
public ColumnControl(UI ui, Preferences prefs) {
this.ui = ui;
this.prefs = prefs;
setSpacing(10);
setPadding(new Insets(0, 10, 0, 10));
ui.registerEvent((IssueSelectedEventHandler) e ->
setCurrentlySelectedColumn(Optional.of(e.columnIndex)));
ui.registerEvent((ColumnClickedEventHandler) e ->
setCurrentlySelectedColumn(Optional.of(e.columnIndex)));
setupKeyEvents();
}
/**
* Called on login.
*/
public void init(GUIController guiController) {
this.guiController = guiController;
restoreColumns();
}
public void updateModel(IModel newModel) {
model = newModel;
}
public void saveSession() {
List<String> sessionFilters = new ArrayList<>();
getChildren().forEach(child -> {
if (child instanceof IssueColumn) {
String filter = ((IssueColumn) child).getCurrentFilterString();
sessionFilters.add(filter);
}
});
prefs.setLastOpenFilters(sessionFilters);
}
public void restoreColumns() {
getChildren().clear();
List<String> filters = prefs.getLastOpenFilters();
if (filters.isEmpty()) {
addColumn();
return;
}
for (String filter : filters) {
addColumn().filterByString(filter);
}
}
public void forEach(Consumer<Column> callback) {
getChildren().forEach(child -> callback.accept((Column) child));
}
/**
* For a quick refresh (without requesting updates)
*/
public void refresh() {
forEach(child -> child.refreshItems(true));
}
private IssueColumn addColumn() {
return addColumnAt(getChildren().size());
}
public IssueColumn addColumnAt(int index) {
IssueColumn panel = new IssuePanel(ui, model, this, index);
getChildren().add(index, panel);
// Populates the panel with the default repo issues.
guiController.columnFilterExpressionChanged(panel);
updateColumnIndices();
setCurrentlySelectedColumn(Optional.of(index));
return panel;
}
private void setCurrentlySelectedColumn(Optional<Integer> selectedColumn) {
currentlySelectedColumn = selectedColumn;
updateCSSforColumns();
}
private void updateCSSforColumns() {
if (currentlySelectedColumn.isPresent()) {
for (int index = 0; index < getChildren().size(); index++) {
getColumn(index).getStyleClass().remove("panel-focused");
}
getColumn(currentlySelectedColumn.get()).getStyleClass().add("panel-focused");
}
}
public Column getColumn(int index) {
return (Column) getChildren().get(index);
}
public void closeAllColumns() {
getChildren().clear();
// There aren't any children left, so we don't need to update indices
}
public void openColumnsWithFilters(List<String> filters) {
for (String filter : filters) {
IssueColumn column = addColumn();
column.filterByString(filter);
}
}
public void closeColumn(int index) {
Node child = getChildren().remove(index);
updateColumnIndices();
((Column) child).close();
}
private void updateColumnIndices() {
int i = 0;
for (Node c : getChildren()) {
((Column) c).updateIndex(i++);
}
}
public void createNewPanelAtStart() {
addColumnAt(0);
}
public void createNewPanelAtEnd() {
addColumn();
}
public void swapColumns(int columnIndex, int columnIndex2) {
Column one = getColumn(columnIndex);
Column two = getColumn(columnIndex2);
one.updateIndex(columnIndex2);
two.updateIndex(columnIndex);
// This method of swapping is used because Collections.swap
// will assign one child without removing the other, causing
// a duplicate child exception. HBoxes are constructed because
// null also causes an exception.
getChildren().set(columnIndex, new HBox());
getChildren().set(columnIndex2, new HBox());
getChildren().set(columnIndex, two);
getChildren().set(columnIndex2, one);
}
public Optional<Integer> getCurrentlySelectedColumn() {
return currentlySelectedColumn;
}
// For dragging purposes
private int currentlyDraggedColumnIndex = -1;
public int getCurrentlyDraggedColumnIndex() {
return currentlyDraggedColumnIndex;
}
public void setCurrentlyDraggedColumnIndex(int i) {
currentlyDraggedColumnIndex = i;
}
public void closeCurrentColumn() {
if (currentlySelectedColumn.isPresent()) {
int columnIndex = currentlySelectedColumn.get();
closeColumn(columnIndex);
if (getChildren().size() == 0) {
setCurrentlySelectedColumn(Optional.empty());
} else {
int newColumnIndex = (columnIndex > getChildren().size() - 1)
? columnIndex - 1
: columnIndex;
setCurrentlySelectedColumn(Optional.of(newColumnIndex));
getColumn(currentlySelectedColumn.get()).requestFocus();
}
}
}
public double getPanelWidth() {
// COLUMN_WIDTH is used instead of
// ((Column) getChildren().get(0)).getWidth();
// because when this function is called, columns may not have been sized yet.
// In any case actual column width is COLUMN_WIDTH at minimum, so we can assume
// that they are that large.
return 40 + Column.COLUMN_WIDTH;
}
private void setupKeyEvents() {
addEventHandler(KeyEvent.KEY_RELEASED, event -> {
if (event.getCode() == KeyboardShortcuts.RIGHT_PANEL || event.getCode() == KeyboardShortcuts.LEFT_PANEL) {
handleKeys(event.getCode() == KeyboardShortcuts.RIGHT_PANEL);
assert currentlySelectedColumn.isPresent() : "handleKeys doesn't set selectedIndex!";
}
});
}
private void handleKeys(boolean isForwardKey) {
if (!currentlySelectedColumn.isPresent()) {
return;
}
if (getChildren().size() == 0) {
return;
}
Column selectedColumn = getColumn(currentlySelectedColumn.get());
if (selectedColumn instanceof IssueColumn){
if (((IssueColumn) selectedColumn).filterTextField.isFocused()){
return;
} else {
int newIndex = currentlySelectedColumn.get() + (isForwardKey ? 1 : -1);
if (newIndex < 0) {
newIndex = getChildren().size() - 1;
} else if (newIndex > getChildren().size() - 1) {
newIndex = 0;
}
setCurrentlySelectedColumn(Optional.of(newIndex));
selectedColumn = getColumn(currentlySelectedColumn.get());
selectedColumn.requestFocus();
}
}
ui.triggerEvent(new ColumnClickedEvent(currentlySelectedColumn.get()));
scrollandShowColumn(currentlySelectedColumn.get(), getChildren().size());
}
private void scrollandShowColumn(int selectedColumnIndex, int numOfColumns) {
ui.getMenuControl().scrollTo(selectedColumnIndex, numOfColumns);
}
public GUIController getGUIController() {
return guiController;
}
public int getNumberOfColumns() {
return getChildren().size();
}
public int getNumberOfSavedBoards() {
return prefs.getAllBoards().size();
}
}
|
package denominator.ultradns;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterators.concat;
import static com.google.common.collect.Iterators.filter;
import static com.google.common.collect.Iterators.transform;
import static denominator.model.ResourceRecordSets.typeEqualTo;
import static denominator.ultradns.UltraDNSPredicates.isGeolocationPool;
import static org.jclouds.ultradns.ws.domain.DirectionalPool.RecordType.IPV4;
import static org.jclouds.ultradns.ws.domain.DirectionalPool.RecordType.IPV6;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.inject.Inject;
import org.jclouds.ultradns.ws.UltraDNSWSApi;
import org.jclouds.ultradns.ws.domain.DirectionalGroup;
import org.jclouds.ultradns.ws.domain.DirectionalGroupCoordinates;
import org.jclouds.ultradns.ws.domain.DirectionalPool;
import org.jclouds.ultradns.ws.domain.DirectionalPool.RecordType;
import org.jclouds.ultradns.ws.domain.DirectionalPoolRecord;
import org.jclouds.ultradns.ws.domain.DirectionalPoolRecordDetail;
import org.jclouds.ultradns.ws.domain.IdAndName;
import org.jclouds.ultradns.ws.features.DirectionalGroupApi;
import org.jclouds.ultradns.ws.features.DirectionalPoolApi;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.Multimap;
import com.google.common.collect.Ordering;
import dagger.Lazy;
import denominator.ResourceTypeToValue;
import denominator.model.ResourceRecordSet;
import denominator.profile.GeoResourceRecordSetApi;
public final class UltraDNSGeoResourceRecordSetApi implements GeoResourceRecordSetApi {
private final Set<String> types;
private final Multimap<String, String> regions;
private final DirectionalGroupApi groupApi;
private final DirectionalPoolApi poolApi;
private final GroupGeoRecordByNameTypeIterator.Factory iteratorFactory;
private final String zoneName;
UltraDNSGeoResourceRecordSetApi(Set<String> types, Multimap<String, String> regions, DirectionalGroupApi groupApi,
DirectionalPoolApi poolApi, GroupGeoRecordByNameTypeIterator.Factory iteratorFactory, String zoneName) {
this.types = types;
this.regions = regions;
this.groupApi = groupApi;
this.poolApi = poolApi;
this.iteratorFactory = iteratorFactory;
this.zoneName = zoneName;
}
@Override
public Set<String> getSupportedTypes() {
return types;
}
@Override
public Multimap<String, String> getSupportedRegions() {
return regions;
}
@Override
public Iterator<ResourceRecordSet<?>> list() {
return concat(poolApi.list().filter(isGeolocationPool())
.transform(new Function<DirectionalPool, Iterator<ResourceRecordSet<?>>>() {
@Override
public Iterator<ResourceRecordSet<?>> apply(DirectionalPool pool) {
return allByDName(pool.getDName());
}
}).iterator());
}
private Iterator<ResourceRecordSet<?>> allByDName(final String dname) {
// TODO: remove when type "0" can be specified (UMP-5738 Dodgers release of UltraDNS)
return concat(transform(EnumSet.allOf(RecordType.class).iterator(),
new Function<RecordType, Iterator<ResourceRecordSet<?>>>() {
@Override
public Iterator<ResourceRecordSet<?>> apply(RecordType input) {
return iteratorForDNameAndDirectionalType(dname, input);
}
}));
}
@Override
public Iterator<ResourceRecordSet<?>> listByName(String name) {
return allByDName(checkNotNull(name, "name"));
}
@Override
public Iterator<ResourceRecordSet<?>> listByNameAndType(String name, final String type) {
checkNotNull(name, "name");
checkNotNull(type, "type");
if ("CNAME".equals(type)) {
// retain original type (this will filter out A, AAAA)
return filter(
concat(iteratorForDNameAndDirectionalType(name, IPV4),
iteratorForDNameAndDirectionalType(name, IPV6)),
typeEqualTo(type));
} else if ("A".equals(type) || "AAAA".equals(type)) {
RecordType dirType = "AAAA".equals(type) ? IPV6 : IPV4;
Iterator<ResourceRecordSet<?>> iterator = iteratorForDNameAndDirectionalType(name, dirType);
// retain original type (this will filter out CNAMEs)
return filter(iterator, typeEqualTo(type));
} else {
return iteratorForDNameAndDirectionalType(name, RecordType.valueOf(type));
}
}
@Override
public Optional<ResourceRecordSet<?>> getByNameTypeAndGroup(String name, String type,
String group) {
Iterator<DirectionalPoolRecordDetail> records = recordsByNameTypeAndGroupName(name, type, group);
Iterator<ResourceRecordSet<?>> iterator = iteratorFactory.create(records);
if (iterator.hasNext())
return Optional.<ResourceRecordSet<?>> of(iterator.next());
return Optional.absent();
}
private Iterator<DirectionalPoolRecordDetail> recordsByNameTypeAndGroupName(String name, String type,
String group) {
checkNotNull(name, "name");
checkNotNull(type, "type");
checkNotNull(group, "group");
Iterator<DirectionalPoolRecordDetail> records;
if ("CNAME".equals(type)) {
records = filter(
concat(recordsForNameTypeAndGroup(name, "A", group),
recordsForNameTypeAndGroup(name, "AAAA", group)), isCNAME);
} else {
records = recordsForNameTypeAndGroup(name, type, group);
}
return records;
}
private Iterator<DirectionalPoolRecordDetail> recordsForNameTypeAndGroup(String name, String type, String group) {
int typeValue = checkNotNull(new ResourceTypeToValue().get(type), "typeValue for %s", type);
DirectionalGroupCoordinates coord = DirectionalGroupCoordinates.builder()
.zoneName(zoneName)
.recordName(name)
.recordType(typeValue)
.groupName(group).build();
return groupApi.listRecordsByGroupCoordinates(coord).iterator();
}
@Override
public void applyRegionsToNameTypeAndGroup(Multimap<String, String> regions, String name, String type, String group) {
Iterator<DirectionalPoolRecordDetail> iterator = recordsByNameTypeAndGroupName(name, type, group);
Map<DirectionalPoolRecordDetail, DirectionalGroup> updates = groupsToUpdate(iterator, regions);
if (updates.isEmpty())
return;
for (Entry<DirectionalPoolRecordDetail, DirectionalGroup> update : updates.entrySet()) {
DirectionalPoolRecordDetail detail = update.getKey();
// TODO: ensure forceOverlapTransfer (Dodgers release of UltraDNS)
poolApi.updateRecordAndGroup(detail.getId(), detail.getRecord(), update.getValue());
}
}
private Map<DirectionalPoolRecordDetail, DirectionalGroup> groupsToUpdate(
Iterator<DirectionalPoolRecordDetail> iterator, Multimap<String, String> regions) {
Builder<DirectionalPoolRecordDetail, DirectionalGroup> toUpdate = ImmutableMap.builder();
for (Iterator<DirectionalPoolRecordDetail> i = iterator; i.hasNext();) {
DirectionalPoolRecordDetail detail = i.next();
DirectionalGroup directionalGroup = groupApi.get(detail.getGeolocationGroup().get().getId());
if (!regions.equals(directionalGroup.getRegionToTerritories())) {
toUpdate.put(detail, directionalGroup.toBuilder().regionToTerritories(regions).build());
}
}
return toUpdate.build();
}
@Override
public void applyTTLToNameTypeAndGroup(int ttl, String name, String type, String group) {
for (Iterator<DirectionalPoolRecordDetail> i = recordsByNameTypeAndGroupName(name, type, group); i.hasNext();) {
DirectionalPoolRecordDetail detail = i.next();
DirectionalPoolRecord record = detail.getRecord();
if (record.getTTL() != ttl)
poolApi.updateRecord(detail.getId(), record.toBuilder().ttl(ttl).build());
}
}
private Iterator<ResourceRecordSet<?>> iteratorForDNameAndDirectionalType(String name, RecordType dirType) {
return iteratorFactory.create(poolApi.listRecordsByDNameAndType(name, dirType.getCode())
.toSortedList(byTypeAndGeoGroup).iterator());
}
static Optional<IdAndName> group(DirectionalPoolRecordDetail in) {
return in.getGeolocationGroup().or(in.getGroup());
}
private static final Ordering<DirectionalPoolRecordDetail> byTypeAndGeoGroup = new Ordering<DirectionalPoolRecordDetail>() {
@Override
public int compare(DirectionalPoolRecordDetail left, DirectionalPoolRecordDetail right) {
checkState(group(left).isPresent(), "expected record to be in a geolocation group: %s", left);
checkState(group(right).isPresent(), "expected record to be in a geolocation group: %s", right);
return ComparisonChain.start()
.compare(left.getRecord().getType(), right.getRecord().getType())
.compare(group(left).get().getName(), group(right).get().getName()).result();
}
};
static final class Factory implements GeoResourceRecordSetApi.Factory {
private final Set<String> types;
private final Lazy<Multimap<String, String>> regions;
private final UltraDNSWSApi api;
private final Supplier<IdAndName> account;
private final GroupGeoRecordByNameTypeIterator.Factory iteratorFactory;
@Inject
Factory(@denominator.config.profile.Geo Set<String> types,
@denominator.config.profile.Geo Lazy<Multimap<String, String>> regions, UltraDNSWSApi api,
Supplier<IdAndName> account, GroupGeoRecordByNameTypeIterator.Factory iteratorFactory) {
this.types = types;
this.regions = regions;
this.api = api;
this.account = account;
this.iteratorFactory = iteratorFactory;
}
@Override
public Optional<GeoResourceRecordSetApi> create(String zoneName) {
checkNotNull(zoneName, "zoneName was null");
return Optional.<GeoResourceRecordSetApi> of(
new UltraDNSGeoResourceRecordSetApi(types, regions.get(),
api.getDirectionalGroupApiForAccount(account.get().getId()),
api.getDirectionalPoolApiForZone(zoneName), iteratorFactory, zoneName));
}
}
private final Predicate<DirectionalPoolRecordDetail> isCNAME = new Predicate<DirectionalPoolRecordDetail>() {
@Override
public boolean apply(DirectionalPoolRecordDetail input) {
return "CNAME".equals(input.getRecord().getType());
}
};
}
|
package org.htmlcleaner;
import java.io.*;
import java.util.*;
/**
* <p>Basic abstract serializer - contains common logic for descendants (methods <code>writeXXX()</code>.</p>
*/
public abstract class Serializer {
/**
* Used to implement serialization with missing envelope - omiting open and close tags, just
* serialize children.
*/
private class HeadlessTagNode extends TagNode {
private HeadlessTagNode(TagNode wrappedNode) {
super("");
getAttributes().putAll(wrappedNode.getAttributes());
addChildren(wrappedNode.getAllChildren());
setDocType(wrappedNode.getDocType());
Map<String, String> nsDecls = getNamespaceDeclarations();
if (nsDecls != null) {
Map<String, String> wrappedNSDecls = wrappedNode.getNamespaceDeclarations();
if (wrappedNSDecls != null) {
nsDecls.putAll(wrappedNSDecls);
}
}
}
}
protected CleanerProperties props;
protected Serializer(CleanerProperties props) {
this.props = props;
}
/**
* Writes specified TagNode to the output stream, using specified charset and optionally omits node envelope
* (skips open and close tags of the node).
* @param tagNode Node to be written
* @param out Output stream
* @param charset Charset of the output
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @throws IOException
*/
public void writeToStream(TagNode tagNode, OutputStream out, String charset, boolean omitEnvelope) throws IOException {
write( tagNode, new OutputStreamWriter(out, charset), charset, omitEnvelope );
}
/**
* Writes specified TagNode to the output stream, using specified charset.
* @param tagNode Node to be written
* @param out Output stream
* @param charset Charset of the output
* @throws IOException
*/
public void writeToStream(TagNode tagNode, OutputStream out, String charset) throws IOException {
writeToStream(tagNode, out, charset, false);
}
/**
* Writes specified TagNode to the output stream, using system default charset and optionally omits node envelope
* (skips open and close tags of the node).
* @param tagNode Node to be written
* @param out Output stream
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @throws IOException
*/
public void writeToStream(TagNode tagNode, OutputStream out, boolean omitEnvelope) throws IOException {
writeToStream( tagNode, out, props.getCharset(), omitEnvelope );
}
/**
* Writes specified TagNode to the output stream, using system default charset.
* @param tagNode Node to be written
* @param out Output stream
* @throws IOException
*/
public void writeToStream(TagNode tagNode, OutputStream out) throws IOException {
writeToStream(tagNode, out, false);
}
/**
* Writes specified TagNode to the file, using specified charset and optionally omits node envelope
* (skips open and close tags of the node).
* @param tagNode Node to be written
* @param fileName Output file name
* @param charset Charset of the output
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @throws IOException
*/
public void writeToFile(TagNode tagNode, String fileName, String charset, boolean omitEnvelope) throws IOException {
writeToStream(tagNode, new FileOutputStream(fileName), charset, omitEnvelope );
}
/**
* Writes specified TagNode to the file, using specified charset.
* @param tagNode Node to be written
* @param fileName Output file name
* @param charset Charset of the output
* @throws IOException
*/
public void writeToFile(TagNode tagNode, String fileName, String charset) throws IOException {
writeToFile(tagNode, fileName, charset, false);
}
/**
* Writes specified TagNode to the file, using specified charset and optionally omits node envelope
* (skips open and close tags of the node).
* @param tagNode Node to be written
* @param fileName Output file name
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @throws IOException
*/
public void writeToFile(TagNode tagNode, String fileName, boolean omitEnvelope) throws IOException {
writeToFile(tagNode,fileName, props.getCharset(), omitEnvelope);
}
/**
* Writes specified TagNode to the file, using system default charset.
* @param tagNode Node to be written
* @param fileName Output file name
* @throws IOException
*/
public void writeToFile(TagNode tagNode, String fileName) throws IOException {
writeToFile(tagNode, fileName, false);
}
/**
* @param tagNode Node to serialize to string
* @param charset Charset of the output - stands in xml declaration part
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @return Output as string
*/
public String getAsString(TagNode tagNode, String charset, boolean omitEnvelope) {
StringWriter writer = new StringWriter();
try {
write(tagNode, writer, charset, omitEnvelope);
} catch (IOException e) {
// not writing to the file system so any io errors should be really rare ( and bad)
throw new HtmlCleanerException(e);
}
return writer.getBuffer().toString();
}
/**
* @param tagNode Node to serialize to string
* @param charset Charset of the output - stands in xml declaration part
* @return Output as string
*/
public String getAsString(TagNode tagNode, String charset) {
return getAsString(tagNode, charset, false);
}
/**
* @param tagNode Node to serialize to string
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @return Output as string
* @throws IOException
*/
public String getAsString(TagNode tagNode, boolean omitEnvelope) {
return getAsString(tagNode, props.getCharset(), omitEnvelope);
}
/**
* @param tagNode Node to serialize to string
* @return Output as string
* @throws IOException
*/
public String getAsString(TagNode tagNode) {
return getAsString(tagNode, false);
}
public String getAsString(String htmlContent) {
HtmlCleaner htmlCleaner = new HtmlCleaner(this.props);
TagNode tagNode = htmlCleaner.clean(htmlContent);
return getAsString(tagNode, props.getCharset());
}
/**
* Writes specified node using specified writer.
* @param tagNode Node to serialize.
* @param writer Writer instance
* @param charset Charset of the output
* @throws IOException
*/
public void write(TagNode tagNode, Writer writer, String charset) throws IOException {
write(tagNode, writer, charset, false);
}
/**
* Writes specified node using specified writer.
* @param tagNode Node to serialize.
* @param writer Writer instance
* @param charset Charset of the output
* @param omitEnvelope Tells whether to skip open and close tag of the node.
* @throws IOException
*/
public void write(TagNode tagNode, Writer writer, String charset, boolean omitEnvelope) throws IOException {
if (omitEnvelope) {
tagNode = new HeadlessTagNode(tagNode);
}
writer = new BufferedWriter(writer);
if ( !props.isOmitXmlDeclaration() ) {
String declaration = "<?xml version=\"1.0\"";
if (charset != null) {
declaration += " encoding=\"" + charset + "\"";
}
declaration += "?>";
writer.write(declaration + "\n");
}
if ( !props.isOmitDoctypeDeclaration() ) {
DoctypeToken doctypeToken = tagNode.getDocType();
if ( doctypeToken != null ) {
doctypeToken.serialize(this, writer);
}
}
serialize(tagNode, writer);
writer.flush();
writer.close();
}
protected boolean isScriptOrStyle(TagNode tagNode) {
String tagName = tagNode.getName();
return "script".equalsIgnoreCase(tagName) || "style".equalsIgnoreCase(tagName);
}
protected abstract void serialize(TagNode tagNode, Writer writer) throws IOException;
}
|
package weixin.popular.api;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import javax.imageio.ImageIO;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import weixin.popular.bean.qrcode.QrcodeTicket;
import weixin.popular.bean.qrcode.Wxaqrcode;
import weixin.popular.client.LocalHttpClient;
import weixin.popular.util.JsonUtil;
/**
* API
* @author LiYi
*
*/
public class QrcodeAPI extends BaseAPI{
/**
*
* @param access_token access_token
* @param qrcodeJson json
* @return QrcodeTicket
*/
private static QrcodeTicket qrcodeCreate(String access_token,String qrcodeJson){
HttpUriRequest httpUriRequest = RequestBuilder.post()
.setHeader(jsonHeader)
.setUri(BASE_URI+"/cgi-bin/qrcode/create")
.addParameter(PARAM_ACCESS_TOKEN, API.accessToken(access_token))
.setEntity(new StringEntity(qrcodeJson,Charset.forName("utf-8")))
.build();
return LocalHttpClient.executeJsonResult(httpUriRequest,QrcodeTicket.class);
}
/**
*
* @param access_token access_token
* @param expire_seconds 60480030
* @param scene_id ID320 10
* @return QrcodeTicket
*/
public static QrcodeTicket qrcodeCreateTemp(String access_token,int expire_seconds,long scene_id){
String json = String.format("{\"expire_seconds\": %d, \"action_name\": \"QR_SCENE\", \"action_info\": {\"scene\": {\"scene_id\": %d}}}",expire_seconds,scene_id);
return qrcodeCreate(access_token,json);
}
public static QrcodeTicket qrcodeCreateTemp(String access_token,int expire_seconds,String scene_str){
String json = String.format("{\"expire_seconds\": %d, \"action_name\": \"QR_STR_SCENE\", \"action_info\": {\"scene\": {\"scene_str\": %s}}}", expire_seconds, scene_str);
return qrcodeCreate(access_token,json);
}
/**
*
* @param access_token access_token
* @param scene_id ID 1-100000
* @return QrcodeTicket
*/
public static QrcodeTicket qrcodeCreateFinal(String access_token,int scene_id){
String json = String.format("{\"action_name\": \"QR_LIMIT_SCENE\", \"action_info\": {\"scene\": {\"scene_id\":%d}}}", scene_id);
return qrcodeCreate(access_token,json);
}
/**
*
* @param access_token access_token
* @param scene_str IDID164
* @return QrcodeTicket
*/
public static QrcodeTicket qrcodeCreateFinal(String access_token,String scene_str){
String json = String.format("{\"action_name\": \"QR_LIMIT_STR_SCENE\", \"action_info\": {\"scene\": {\"scene_str\": \"%s\"}}}", scene_str);
return qrcodeCreate(access_token,json);
}
/**
*
* @param ticket UrlEncode
* @return BufferedImage
*/
public static BufferedImage showqrcode(String ticket){
HttpUriRequest httpUriRequest = RequestBuilder.get()
.setUri(MP_URI + "/cgi-bin/showqrcode")
.addParameter("ticket", ticket)
.build();
CloseableHttpResponse httpResponse = LocalHttpClient.execute(httpUriRequest);
return getImage(httpResponse);
}
/**
* <br>
* WxaAPI.getwxacodeWxaAPI.getwxacodeunlimit
* @since 2.8.8
* @param access_token access_token
* @param wxaqrcode wxaqrcode
* @return BufferedImage
*/
public static BufferedImage wxaappCreatewxaqrcode(String access_token,Wxaqrcode wxaqrcode){
String json = JsonUtil.toJSONString(wxaqrcode);
HttpUriRequest httpUriRequest = RequestBuilder.post()
.setHeader(jsonHeader)
.setUri(BASE_URI + "/cgi-bin/wxaapp/createwxaqrcode")
.addParameter(PARAM_ACCESS_TOKEN, API.accessToken(access_token))
.setEntity(new StringEntity(json,Charset.forName("utf-8")))
.build();
CloseableHttpResponse httpResponse = LocalHttpClient.execute(httpUriRequest);
return getImage(httpResponse);
}
private static BufferedImage getImage(CloseableHttpResponse httpResponse) {
try {
int status = httpResponse.getStatusLine().getStatusCode();
if (status == 200) {
byte[] bytes = EntityUtils.toByteArray(httpResponse.getEntity());
return ImageIO.read(new ByteArrayInputStream(bytes));
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
httpResponse.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
}
|
package org.jacis.store;
import org.jacis.container.JacisContainer;
import org.jacis.container.JacisContainer.StoreIdentifier;
import org.jacis.container.JacisObjectTypeSpec;
import org.jacis.container.JacisTransactionHandle;
import org.jacis.exception.JacisStaleObjectException;
import org.jacis.exception.JacisTransactionAlreadyPreparedForCommitException;
import org.jacis.plugin.JacisModificationListener;
import org.jacis.plugin.objectadapter.JacisObjectAdapter;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Storing a single type of objects.
*
* All operations checking or returning entries of the store operate on the committed values merged with the
* current transactional view (obtained with the currently active transaction handle from the map {@link #txViewMap}).
* This means that first the transactional view is checked if it contains an entry for the desired key.
* If so this entry is returned, otherwise the committed value from the core store (see {@link #store}) is returned.
* Note that if an object is deleted in a transaction an entry with the value 'null' remains in the transactional view.
* Therefore also deletions are properly handled with respect to isolation.
*
* @param <K> Key type of the store entry
* @param <TV> Type of the objects in the transaction view. This is the type visible from the outside.
* @param <CV> Type of the objects as they are stored in the internal map of committed values. This type is not visible from the outside.
* @author Jan Wiemer
*/
@SuppressWarnings({"unused", "WeakerAccess"})
public class JacisStore<K, TV, CV> extends JacisContainer.JacisStoreTransactionAdapter {
/** Reference to the JACIS container this store belongs to */
private final JacisContainer container;
/** The store identifier uniquely identifying this store inside the container */
private final StoreIdentifier storeIdentifier;
/** The object type specification for the objects stored in this store*/
private final JacisObjectTypeSpec<K, TV, CV> spec;
/** The map containing the committed values of the objects (the core store) */
private final ConcurrentHashMap<K, StoreEntry<K, TV, CV>> store = new ConcurrentHashMap<>();
/** A Map assigning each active transaction handle the transactional view on this store */
private final Map<JacisTransactionHandle, JacisStoreTxView<K, TV, CV>> txViewMap = Collections.synchronizedMap(new WeakHashMap<JacisTransactionHandle, JacisStoreTxView<K, TV, CV>>());
/** Mutex / Lock to synchronize changes on the committed entries of the store (specially during internalCommit) */
private final ReadWriteLock storeAccessLock = new ReentrantReadWriteLock(true);
/** The object adapter defining how to copy objects from the committed view to a transactional view and back */
private final JacisObjectAdapter<TV, CV> objectAdapter;
/** The registry of tracked views for this store that are kept up to date on each commit automatically */
private final TrackedViewRegistry<K, TV, CV> trackedViewRegistry;
/** List of listeners notified on each modification on the committed values in the store */
private final List<JacisModificationListener<K, TV>> modificationListeners = new ArrayList<>();
public JacisStore(JacisContainer container, StoreIdentifier storeIdentifier, JacisObjectTypeSpec<K, TV, CV> spec) {
this.container = container;
this.storeIdentifier = storeIdentifier;
this.spec = spec;
this.objectAdapter = spec.getObjectAdapter();
this.trackedViewRegistry = new TrackedViewRegistry<>(this, spec.isCheckViewsOnCommit());
registerModificationListener(trackedViewRegistry);
}
/** @return the store identifier uniquely identifying this store inside the container */
public StoreIdentifier getStoreIdentifier() {
return storeIdentifier;
}
/** @return the reference to the JACIS container this store belongs to */
public JacisContainer getContainer() {
return container;
}
/** @return the object type specification for the objects stored in this store */
public JacisObjectTypeSpec<K, TV, CV> getObjectTypeSpec() {
return spec;
}
/** @return the list of listeners notified on each modification on the committed values in the store */
public List<JacisModificationListener<K, TV>> getModificationListeners() {
return modificationListeners;
}
/**
* Add the passed listener (implementing the interface {@link JacisModificationListener}).
* The listener will be notified on each modification on the committed values in the store.
*
* @param listener the listener to notify
* @return this store for method chaining
*/
public JacisStore<K, TV, CV> registerModificationListener(JacisModificationListener<K, TV> listener) {
if (!getObjectTypeSpec().isTrackOriginalValueEnabled()) {
throw new IllegalStateException("Registering modification listeners is only supported if original values are tracked, but they are not tracked for " + this + "! Trying to register listener: " + listener);
}
modificationListeners.add(listener);
return this;
}
/** @return the object adapter defining how to copy objects from the committed view to a transactional view and back */
public JacisObjectAdapter<TV, CV> getObjectAdapter() {
return objectAdapter;
}
/** @return tte registry of tracked views for this store that are kept up to date on each commit automatically */
public TrackedViewRegistry<K, TV, CV> getTrackedViewRegistry() {
return trackedViewRegistry;
}
/**
* Create a read only view of the current transaction context that can be used (read only) in a different thread.
* This can be used to share one single transaction view in several threads.
* Before accessing the object store the other thread should set the returned context
* with the method {@link #startReadOnlyTransactionWithContext(JacisReadOnlyTransactionContext)}.
*
* @param withTxName transaction name used for the read only view.
* @return a read only view of the current transaction context.
*/
public JacisReadOnlyTransactionContext createReadOnlyTransactionView(String withTxName) {
JacisStoreTxView<K, TV, CV> originalTxView = getTxView(true);
return new JacisStoreTxView<>(withTxName, originalTxView);
}
/**
* Starts a new (read only) transaction with the passed transaction context.
* The new transaction will work on a read only snapshot of the original transaction (where the context is obtained from).
*
* @param readOnlyTxContext the transaction context of the original transaction.
*/
public void startReadOnlyTransactionWithContext(JacisReadOnlyTransactionContext readOnlyTxContext) {
if (!(readOnlyTxContext instanceof JacisStoreTxView)) {
throw new IllegalArgumentException("Passed illegal transactional context: " + readOnlyTxContext);
}
JacisStoreTxView<K, TV, CV> oldTxView = getTxView(false);
if (oldTxView != null) {
throw new IllegalStateException("Failed to start a read only transaction while another transaction is active! Active transaction: " + oldTxView.getTransaction() + ", passed transaction context: " + readOnlyTxContext + ", Thread: " + Thread.currentThread().getName());
}
@SuppressWarnings("unchecked") JacisStoreTxView<K, TV, CV> newTx = (JacisStoreTxView<K, TV, CV>) readOnlyTxContext;
setTransactionContext(newTx);
}
/**
* Returns if the store contains an entry for the passed key.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
*
* @param key The key of the entry to check.
* @return if the store contains an entry for the passed key.
*/
public boolean containsKey(K key) {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
if (entryTxView != null) {
return entryTxView.isNotNull();
}
StoreEntry<K, TV, CV> coreEntry = store.get(key);
return coreEntry != null && coreEntry.isNotNull();
}
/**
* Returns if the object for the passed key has been updated in the current transaction.
* Note that an update has to be explicitly called for an object (by calling {@link #update(Object, Object)}).
* The check returns true if there exists a transactional view
* and the updated flag of this entry (see {@link StoreEntryTxView#updated}) is set (set by the 'update' method).
* Note that this method does not cause the referred object to be copied to thr transactional view.
*
* @param key The key of the entry to check.
* @return if the object for the passed key has been updated in the current transaction.
*/
public boolean isUpdated(K key) {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
return entryTxView != null && entryTxView.isUpdated();
}
/**
* Returns if the object for the passed key is stale.
* An object is considered to be stale if after first reading it in the current transaction,
* an updated version of the same object has been committed by another transaction.
* Note that this method does not cause the referred object to be copied to thr transactional view.
* @param key The key of the entry to check.
* @return if the object for the passed key is stale.
*/
public boolean isStale(K key) {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
return entryTxView != null && entryTxView.isStale(txView);
}
/**
* Checks if the object for the passed key is stale and throws a {@link JacisStaleObjectException} if so.
* An object is considered to be stale if after first reading it in the current transaction,
* an updated version of the same object has been committed by another transaction.
* Note that this method does not cause the referred object to be copied to thr transactional view.
*
* @param key The key of the entry to check.
* @throws JacisStaleObjectException thrown if the object for the passed key is stale.
*/
public void checkStale(K key) throws JacisStaleObjectException {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
if (entryTxView != null) {
entryTxView.assertNotStale(txView);
}
}
/**
* Returns the value for the passed key.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
* If the transactional view did not already contain the entry for the key it is copied to the transactional view now.
*
* @param key The key of the desired entry.
* @return the value for the passed key.
*/
public TV get(K key) {
return getOrCreateEntryTxView(getOrCreateTxView(), key).getValue();
}
/**
* Returns the value for the passed key.
* If the object is already stored in the transactional view of the current transaction this value is returned.
* Otherwise the behaviour depends on the object type:
* If the object adapter for the store supports a read only mode, then a read only view on the committed value is returned.
* Otherwise the committed entry for the key it is copied to the transactional view now.
*
* @param key The key of the desired entry.
* @return the value for the passed key.
*/
public TV getReadOnly(K key) {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
if (entryTxView != null) {
return entryTxView.getValue();
} else {
StoreEntry<K, TV, CV> committedEntry = getCommittedEntry(key);
return committedEntry == null ? null : objectAdapter.cloneCommitted2ReadOnlyTxView(committedEntry.getValue());
}
}
/**
* Returns a read only projection of the object for the passed value.
* First a read only view (if supported) of the object is obtained by the {@link #getReadOnly(Object)} method.
* The projected is computed from the object by applying the passed projection function.
*
* @param key The key of the desired entry.
* @param projection The projection function computing the desired return value (of the passed type 'P') from the object.
* @param <P> The result type of the projection
* @return a read only projection of the object for the passed value.
*/
public <P> P getProjectionReadOnly(K key, Function<TV, P> projection) {
return projection.apply(getReadOnly(key));
}
/** @return a stream of all keys currently stored in the store. Note that the keys added by any pending transactions are contained (with null values if not jet committed). */
private Stream<K> keyStream() {
return store.keySet().stream(); // store contains also new entries (with null value)! Therefore iterating the keys is usually enough
}
/**
* Returns a stream of all objects (not 'null') currently stored in the store.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
* If the transactional view did not already contain an entry it is copied to the transactional view now.
*
* @return a stream of all objects (not 'null') currently stored in the store.
*/
public Stream<TV> stream() { // Note this method will clone all objects into the TX view!
return keyStream().map(this::get).filter(v -> v != null);
}
/**
* Returns a stream of read only views for all objects (not 'null') currently stored in the store.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
* Further note that the behavior of the method is equivalent to the behavior of the {@link #getReadOnly} method for a single object.
*
* @return a stream of all objects (not 'null') currently stored in the store.
*/
public Stream<TV> streamReadOnly() {
return keyStream().map(this::getReadOnly).filter(v -> v != null);
}
/**
* Returns a stream of all objects (not 'null') currently stored in the store filtered by the passed filter.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
* If supported the filter predicate is checked on a read only view of the object (without cloning it).
* Only the objects passing the filter are is copied to the transactional view (if they are not yet contained there).
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a stream of all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public Stream<TV> stream(Predicate<TV> filter) {
if (filter != null) {
return keyStream().map(k -> pair(k, getReadOnly(k))).filter(e -> e.val != null && filter.test(e.val)).map(e -> get(e.key));
} else {
return stream();
}
}
/**
* Returns a stream of read only views for all objects (not 'null') currently stored in the store filtered by the passed filter.
* Note that the method operates on the committed values merged with the current transactional view (see class description).
* If supported the filter predicate is checked on a read only view of the object (without cloning it).
* Further note that the behavior of the method is equivalent to the behavior of the {@link #getReadOnly} method for a single object
* (only the objects passing the filter may be copied to the transactional view if no read only view is supported (and they are not yet contained there)).
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a stream of all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public Stream<TV> streamReadOnly(Predicate<TV> filter) {
if (filter != null) {
return keyStream().map(this::getReadOnly).filter(v -> v != null && filter.test(v));
} else {
return streamReadOnly();
}
}
/**
* Returns a list of all objects (not 'null') currently stored in the store.
*
* @return a list of all objects (not 'null') currently stored in the store.
*/
public List<TV> getAll() {
return getAll(null);
}
/**
* Returns a list of all objects (not 'null') currently stored in the store filtered by the passed filter.
* The method uses the {@link #stream(Predicate)} method and collects the results to a list.
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a list of all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public List<TV> getAll(Predicate<TV> filter) {
return stream(filter).collect(Collectors.toList());
}
/**
* Returns a list of read-only views for all objects (not 'null') currently stored in the store.
*
* @return a list of read-only views for all objects (not 'null') currently stored in the store.
*/
public List<TV> getAllReadOnly() {
return getAllReadOnly(null);
}
/**
* Returns a list of read-only views for all objects (not 'null') currently stored in the store filtered by the passed filter.
* The method uses the {@link #streamReadOnly(Predicate)} method and collects the results to a list.
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a list of read-only views for all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public List<TV> getAllReadOnly(Predicate<TV> filter) {
return streamReadOnly(filter).collect(Collectors.toList());
}
/**
* Returns a list of all objects (not 'null') currently stored in the store filtered by the passed filter.
* The method executes the {@link #getAll(Predicate)} method as an atomic operations.
* Therefore this method is passed as functional parameter to the {@link #computeAtomic(Supplier)} method.
* The execution of atomic operations can not overlap with the execution of other atomic operations (but normal operations may overlap).
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a list of all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public List<TV> getAllAtomic(Predicate<TV> filter) {
return computeAtomic(() -> getAll(filter));
}
/**
* Returns a list of read-only views for all objects (not 'null') currently stored in the store filtered by the passed filter.
* The method executes the {@link #getAllReadOnly(Predicate)} method as an atomic operations.
* Therefore this method is passed as functional parameter to the {@link #computeAtomic(Supplier)} method.
* The execution of atomic operations can not overlap with the execution of other atomic operations (but normal operations may overlap).
*
* @param filter a filter predicate deciding if an object should be contained in the resulting stream ('null' means all objects should be contained)
* @return a list of read-only views for all objects (not 'null') currently stored in the store filtered by the passed filter.
*/
public List<TV> getAllReadOnlyAtomic(Predicate<TV> filter) {
return computeAtomic(() -> getAllReadOnly(filter));
}
/**
* Update the object for the passed key with the passed object value.
* Note that the passed object instance may be the same (modified) instance obtained from the store before,
* but also can be another instance.
* Internally the value of the transactional view (see {@link StoreEntryTxView#txValue}) for this object is replaced with the passed value
* and the transactional view is marked as updated (see {@link StoreEntryTxView#updated}).
*
* @param key The key of the object to update.
* @param value The updated object instance.
* @throws JacisTransactionAlreadyPreparedForCommitException if the current transaction has already been prepared for commit
*/
public void update(K key, TV value) throws JacisTransactionAlreadyPreparedForCommitException {
JacisStoreTxView<K, TV, CV> txView = getOrCreateTxView().assertWritable();
if (txView.isCommitPending()) {
throw new JacisTransactionAlreadyPreparedForCommitException("Failed to update " + key + " because transaction is already prepared for commit: " + txView);
}
StoreEntryTxView<K, TV, CV> entryTxView = getOrCreateEntryTxView(txView, key);
entryTxView.updateValue(value);
}
/**
* Remove the object for the passed key from the store (first only in the transactional view of course).
* The method is equivalent to simply calling the {@link #update(Object, Object)} method with a 'null' value.
*
* @param key The key of the object to remove.
*/
public void remove(K key) {
update(key, null);
}
/**
* Refresh the object for the passed key from the committed values. Note that all earlier modifications in the current transaction are lost.
* First the current transactional view (if updated or not) is discarded.
* Afterwards a fresh copy of the current committed value is stored in the transactional view by calling the {@link #get(Object)} method.
*
* @param key The key of the object to refresh.
* @return the object for the passed key refreshed from the committed values. Note that all earlier modifications in the current transaction are lost.
*/
public TV refresh(K key) { // refresh with committed version -> discard all changes made by the current TX
JacisStoreTxView<K, TV, CV> txView = getTxView();
txView.removeTxViewEntry(key, true);
return get(key);
}
/**
* Refresh the object for the passed key from the committed values if the object is not marked as updated.
* Note that all earlier modifications in the current transaction are lost if the object is not marked as updated.
* First the current transactional view (if updated or not) is discarded.
* Afterwards a fresh copy of the current committed value is stored in the transactional view by calling the {@link #get(Object)} method.
*
* @param key The key of the object to refresh.
* @return the object for the passed key refreshed from the committed values if the object is not marked as updated.
*/
public TV refreshIfNotUpdated(K key) { // if not updated: refresh with committed version -> discard all changes made by the current TX
JacisStoreTxView<K, TV, CV> txView = getTxView();
txView.removeTxViewEntry(key, false);
return get(key);
}
/**
* Returns the current size of the store.
* Note that the size is not exact because all entries in the committed values are counted.
* Since objects created or deleted in a pending transaction also have an entry with null value in the committed values
* these objects are counted as well.
* @return The current size of the store.
*/
public int size() { // heuristic (due to concurrent access)
return store.size();
}
/**
* Execute the passed operation (without return value) as an atomic operation.
* The execution of atomic operations can not overlap with the execution of other atomic operations
* (but normal operations may overlap).
* @param atomicOperation The operation to execute atomically
*/
public void executeAtomic(Runnable atomicOperation) { // Execute an atomic operation. No internalCommit of any other TX and no other atomic action will interleave.
withReadLock(runnableWrapper(atomicOperation));
}
/**
* Execute the passed operation (with return value) as an atomic operation.
* @param atomicOperation The operation to execute atomically
* @param <R> The return type of the operation
* @return The return value of the operation
*/
public <R> R computeAtomic(Supplier<R> atomicOperation) { // Execute an atomic operation. No internalCommit of any other TX and no other atomic action will interleave.
return withReadLock(atomicOperation);
}
public <C> C accumulate(C target, BiConsumer<C, TV> accumulator) {
for (TV entryTxView : getAllReadOnly(null)) {
accumulator.accept(target, entryTxView);
}
return target;
}
/**
* Accumulate a value from all objects with the passed accumulator function as an atomic operation.
* The method executes the {@link #accumulate(Object, BiConsumer)} method as an atomic operations.
* Therefore this method is passed as functional parameter to the {@link #computeAtomic(Supplier)} method.
* The execution of atomic operations can not overlap with the execution of other atomic operations (but normal operations may overlap).
*
* @param target The initial value for the target
* @param accumulator The accumulator method getting the current value of the accumulation target (type 'C') and an object (type 'TV').
* @param <C> The type of the accumulation target.
* @return The accumulation result (computed as an atomic operation).
*/
public <C> C accumulateAtomic(C target, BiConsumer<C, TV> accumulator) {
return computeAtomic(() -> accumulate(target, accumulator));
}
/**
* Returns the value that was valid as the object was first accessed by the current TX (null if untouched).
*
* @param key The key of the desired object.
* @return the value that was valid as the object was first accessed by the current TX (null if untouched).
*/
public TV getTransactionStartValue(K key) {
assertTrackOriginalValue();
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
return entryTxView == null ? null : entryTxView.getOrigValue(); // if TX never touched the object we return null
}
/**
* Returns a info object /type {@link StoreEntryInfo}) containing information regarding the current state of the object
* (regarding the committed values and the current transactional view).
*
* @param key The key of the desired object.
* @return a info object /type {@link StoreEntryInfo}) containing information regarding the current state of the object.
*/
public StoreEntryInfo<K, TV, CV> getObjectInfo(K key) {
JacisStoreTxView<K, TV, CV> txView = getTxView();
StoreEntry<K, TV, CV> committedEntry = getCommittedEntry(key);
StoreEntryTxView<K, TV, CV> entryTxView = txView == null ? null : txView.getEntryTxView(key);
return new StoreEntryInfo<>(key, committedEntry, entryTxView, txView);
}
/**
* Clear the complete store, remove all committed values and invalidate all pending transactions.
*/
public synchronized void clear() {
storeAccessLock.writeLock().lock();
try {
for (JacisStoreTxView<K, TV, CV> txCtx : txViewMap.values()) {
txCtx.invalidate("store cleared");
}
store.clear();
trackedViewRegistry.clearViews();
} finally {
storeAccessLock.writeLock().unlock();
}
}
// transaction demarcation methods
@Override
protected void internalPrepare(JacisTransactionHandle transaction) {
withWriteLock(runnableWrapper(() -> new StoreTxDemarcationExecutor().executePrepare(this, transaction)));
}
@Override
protected void internalCommit(JacisTransactionHandle transaction) {
withWriteLock(runnableWrapper(() -> new StoreTxDemarcationExecutor().executeCommit(this, transaction)));
}
@Override
protected void internalRollback(JacisTransactionHandle transaction) {
withWriteLock(runnableWrapper(() -> new StoreTxDemarcationExecutor().executeRollback(this, transaction)));
}
// helper methods to deal with transaction views of entries
private StoreEntryTxView<K, TV, CV> getOrCreateEntryTxView(JacisStoreTxView<K, TV, CV> txView, K key) {
StoreEntryTxView<K, TV, CV> entryTxView = txView.getEntryTxView(key);
if (entryTxView == null) {
entryTxView = withWriteLock(() -> txView.createTxViewEntry(getOrCreateCommittedEntry(key)));
}
return entryTxView;
}
// helper methods to deal with committed entries
private StoreEntry<K, TV, CV> createCommittedEntry(K key) {
StoreEntry<K, TV, CV> newCommittedEntry = new StoreEntry<>(this, key);
StoreEntry<K, TV, CV> oldCommittedEntry = store.putIfAbsent(key, newCommittedEntry);
return oldCommittedEntry != null ? oldCommittedEntry : newCommittedEntry;
}
private StoreEntry<K, TV, CV> getCommittedEntry(K key) {
return store.get(key);
}
private StoreEntry<K, TV, CV> getOrCreateCommittedEntry(K key) {
StoreEntry<K, TV, CV> committedEntry = store.get(key);
if (committedEntry == null) {
committedEntry = createCommittedEntry(key);
}
return committedEntry;
}
void checkRemoveCommittedEntry(StoreEntry<K, TV, CV> entryCommitted, JacisStoreTxView<K, TV, CV> currTxView) {
if (entryCommitted.getValue() != null || entryCommitted.isLocked()) {
return; // cannot remove
}
K key = entryCommitted.getKey();
Collection<JacisStoreTxView<K, TV, CV>> txs;
synchronized (txViewMap) {
txs = new ArrayList<>(txViewMap.values());
}
for (JacisStoreTxView<K, TV, CV> txCtx : txs) {
if (txCtx.isReadOnly()) {
continue;
} else if (currTxView.getTransaction().equals(txCtx.getTransaction())) {
continue; // the current transaction referring a core entry can be ignored
}
if (txCtx.containsTxView(key)) {
return; // still referred by transaction
}
}
store.remove(key);
}
// synchronized execution
private <R> R withWriteLock(Supplier<R> task) {
storeAccessLock.writeLock().lock();
try {
return task.get();
} finally {
storeAccessLock.writeLock().unlock();
}
}
private <R> R withReadLock(Supplier<R> task) {
storeAccessLock.readLock().lock();
try {
return task.get();
} finally {
storeAccessLock.readLock().unlock();
}
}
private Supplier<Object> runnableWrapper(Runnable r) {
return () -> {
r.run();
return null;
};
}
// private methods to maintain the TX view
JacisStoreTxView<K, TV, CV> getTxView() {
return getTxView(false);
}
private JacisStoreTxView<K, TV, CV> getOrCreateTxView() {
return getTxView(true);
}
private JacisStoreTxView<K, TV, CV> getTxView(boolean createIfAbsent) {
JacisTransactionHandle transaction = container.getCurrentTransaction(createIfAbsent);
return getTxView(transaction, createIfAbsent);
}
JacisStoreTxView<K, TV, CV> getTxView(JacisTransactionHandle transaction, boolean createIfAbsent) {
JacisStoreTxView<K, TV, CV> txView = txViewMap.get(transaction);
if (txView == null && createIfAbsent) {
txView = new JacisStoreTxView<>(this, transaction);
txViewMap.put(transaction, txView);
}
return txView;
}
private void setTransactionContext(JacisStoreTxView<K, TV, CV> newTxContext) {
JacisTransactionHandle transaction = container.getCurrentTransaction(true);
txViewMap.put(transaction, newTxContext);
}
void notifyTxViewDestroyed(JacisStoreTxView<K, TV, CV> txView) {
txViewMap.remove(txView.getTransaction());
}
// other private helper methods
private void assertTrackOriginalValue() {
if (!spec.isTrackOriginalValueEnabled()) {
throw new UnsupportedOperationException("Track original values not supported by " + spec + "!");
}
}
private KeyValuePair<K, TV> pair(K key, TV val) {
return new KeyValuePair<>(key, val);
}
@Override
public String toString() {
return getClass().getSimpleName() + "-(" + spec + ": #" + store.size() + " entries)";
}
private static class KeyValuePair<PK, PV> {
PK key;
PV val;
KeyValuePair(PK key, PV val) {
this.key = key;
this.val = val;
}
}
}
|
package mondrian.olap.fun;
import mondrian.olap.*;
import mondrian.olap.type.Type;
import mondrian.olap.type.TupleType;
import mondrian.olap.type.SetType;
import mondrian.resource.MondrianResource;
import mondrian.calc.*;
import mondrian.calc.impl.AbstractListCalc;
import mondrian.mdx.ResolvedFunCall;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* Definition of the <code>CrossJoin</code> MDX function.
*
* @author jhyde
* @version $Id$
* @since Mar 23, 2006
*/
class CrossJoinFunDef extends FunDefBase {
static final ReflectiveMultiResolver Resolver = new ReflectiveMultiResolver(
"Crossjoin",
"Crossjoin(<Set1>, <Set2>)",
"Returns the cross product of two sets.",
new String[]{"fxxx"},
CrossJoinFunDef.class);
static final StarCrossJoinResolver StarResolver = new StarCrossJoinResolver();
public CrossJoinFunDef(FunDef dummyFunDef) {
super(dummyFunDef);
}
public Type getResultType(Validator validator, Exp[] args) {
// CROSSJOIN(<Set1>,<Set2>) has type [Hie1] x [Hie2].
List list = new ArrayList();
for (int i = 0; i < args.length; i++) {
Exp arg = args[i];
final Type type = arg.getType();
if (type instanceof SetType) {
addTypes(type, list);
} else if (getName().equals("*")) {
// The "*" form of CrossJoin is lenient: args can be either
// members/tuples or sets.
addTypes(type, list);
} else {
throw Util.newInternal("arg to crossjoin must be a set");
}
}
final Type[] types = (Type[]) list.toArray(new Type[list.size()]);
final TupleType tupleType = new TupleType(types);
return new SetType(tupleType);
}
/**
* Adds a type to a list of types. If type is a {@link TupleType}, does so
* recursively.
*/
private static void addTypes(final Type type, List list) {
if (type instanceof SetType) {
SetType setType = (SetType) type;
addTypes(setType.getElementType(), list);
} else if (type instanceof TupleType) {
TupleType tupleType = (TupleType) type;
for (int i = 0; i < tupleType.elementTypes.length; i++) {
addTypes(tupleType.elementTypes[i], list);
}
} else {
list.add(type);
}
}
public Calc compileCall(final ResolvedFunCall call, ExpCompiler compiler) {
final ListCalc listCalc1 = toList(compiler, call.getArg(0));
final ListCalc listCalc2 = toList(compiler, call.getArg(1));
return new AbstractListCalc(call, new Calc[] {listCalc1, listCalc2}) {
public List evaluateList(Evaluator evaluator) {
SchemaReader schemaReader = evaluator.getSchemaReader();
NativeEvaluator nativeEvaluator =
schemaReader.getNativeSetEvaluator(
call.getFunDef(), call.getArgs(), evaluator, this);
if (nativeEvaluator != null) {
return (List) nativeEvaluator.execute();
}
Evaluator oldEval = null;
assert (oldEval = evaluator.push()) != null;
final List list1 = listCalc1.evaluateList(evaluator);
assert oldEval.equals(evaluator) : "listCalc1 changed context";
if (list1.isEmpty()) {
return Collections.EMPTY_LIST;
}
final List list2 = listCalc2.evaluateList(evaluator.push());
assert oldEval.equals(evaluator) : "listCalc2 changed context";
return crossJoin(list1, list2, evaluator);
}
};
}
private ListCalc toList(ExpCompiler compiler, final Exp exp) {
final Type type = exp.getType();
if (type instanceof SetType) {
return compiler.compileList(exp);
} else {
return new SetFunDef.SetCalc(
new DummyExp(new SetType(type)),
new Exp[] {exp},
compiler);
}
}
List crossJoin(List list1, List list2, Evaluator evaluator) {
if (list1.isEmpty() || list2.isEmpty()) {
return Collections.EMPTY_LIST;
}
// Optimize nonempty(crossjoin(a,b)) ==
// nonempty(crossjoin(nonempty(a),nonempty(b))
long size = (long)list1.size() * (long)list2.size();
int resultLimit = MondrianProperties.instance().ResultLimit.get();
//if (resultLimit > 0 && size > resultLimit && evaluator.isNonEmpty()) {
if (size > 1000 && evaluator.isNonEmpty()) {
// instead of overflow exception try to further
// optimize nonempty(crossjoin(a,b)) ==
// nonempty(crossjoin(nonempty(a),nonempty(b))
final int missCount = evaluator.getMissCount();
list1 = nonEmptyList(evaluator, list1);
list2 = nonEmptyList(evaluator, list2);
size = (long)list1.size() * (long)list2.size();
// both list1 and list2 may be empty after nonEmpty optimization
if (size == 0)
return Collections.EMPTY_LIST;
final int missCount2 = evaluator.getMissCount();
if (missCount2 > missCount && size > 1000) {
// We've hit some cells which are not in the cache. They
// registered as non-empty, but we won't really know until
// we've populated the cache. The cartesian product is still
// huge, so let's quit now, and try again after the cache has
// been loaded.
return Collections.EMPTY_LIST;
}
}
// throw an exeption, if the crossjoin gets too large
if (resultLimit > 0 && resultLimit < size) {
// result limit exceeded, throw an exception
throw MondrianResource.instance().LimitExceededDuringCrossjoin.ex(
new Long(size), new Long(resultLimit));
}
boolean neitherSideIsTuple = true;
int arity0 = 1;
int arity1 = 1;
if (list1.get(0) instanceof Member[]) {
arity0 = ((Member[]) list1.get(0)).length;
neitherSideIsTuple = false;
}
if (list2.get(0) instanceof Member[]) {
arity1 = ((Member[]) list2.get(0)).length;
neitherSideIsTuple = false;
}
if (size > Integer.MAX_VALUE) {
// If the long "size" value is greater than Integer.MAX_VALUE, then
// it can not be used as the size for an array allocation.
String msg = "Union size \"" +
size +
"\" too big (greater than Integer.MAX_VALUE)";
throw Util.newInternal(msg);
}
List result = new ArrayList((int) size);
if (neitherSideIsTuple) {
// Simpler routine if we know neither side contains tuples.
for (int i = 0, m = list1.size(); i < m; i++) {
Member o0 = (Member) list1.get(i);
for (int j = 0, n = list2.size(); j < n; j++) {
Member o1 = (Member) list2.get(j);
result.add(new Member[]{o0, o1});
}
}
} else {
// More complex routine if one or both sides are arrays
// (probably the product of nested CrossJoins).
Member[] row = new Member[arity0 + arity1];
for (int i = 0, m = list1.size(); i < m; i++) {
int x = 0;
Object o0 = list1.get(i);
if (o0 instanceof Member) {
row[x++] = (Member) o0;
} else {
assertTrue(o0 instanceof Member[]);
final Member[] members = (Member[]) o0;
for (int k = 0; k < members.length; k++) {
row[x++] = members[k];
}
}
for (int j = 0, n = list2.size(); j < n; j++) {
Object o1 = list2.get(j);
if (o1 instanceof Member) {
row[x++] = (Member) o1;
} else {
assertTrue(o1 instanceof Member[]);
final Member[] members = (Member[]) o1;
for (int k = 0; k < members.length; k++) {
row[x++] = members[k];
}
}
result.add(row.clone());
x = arity0;
}
}
}
return result;
}
protected static List nonEmptyList(Evaluator evaluator, List list) {
if (list.isEmpty()) {
return list;
}
List result = new ArrayList();
evaluator = evaluator.push();
if (list.get(0) instanceof Member[]) {
for (Iterator it = list.iterator(); it.hasNext();) {
Member[] m = (Member[]) it.next();
evaluator.setContext(m);
Object value = evaluator.evaluateCurrent();
if (value != null && !(value instanceof Throwable)) {
result.add(m);
}
}
} else {
for (Iterator it = list.iterator(); it.hasNext();) {
Member m = (Member) it.next();
evaluator.setContext(m);
Object value = evaluator.evaluateCurrent();
if (value != null && !(value instanceof Throwable)) {
result.add(m);
}
}
}
return result;
}
private static class StarCrossJoinResolver extends MultiResolver {
public StarCrossJoinResolver() {
super(
"*",
"<Set1> * <Set2>",
"Returns the cross product of two sets.",
new String[]{"ixxx", "ixmx", "ixxm", "ixmm"});
}
public FunDef resolve(
Exp[] args, Validator validator, int[] conversionCount) {
// This function only applies in contexts which require a set.
// Elsewhere, "*" is the multiplication operator.
// This means that [Measures].[Unit Sales] * [Gender].[M] is
// well-defined.
if (validator.requiresExpression()) {
return null;
}
return super.resolve(args, validator, conversionCount);
}
protected FunDef createFunDef(Exp[] args, FunDef dummyFunDef) {
return new CrossJoinFunDef(dummyFunDef);
}
}
}
// End CrossJoinFunDef.java
|
package org.javafunk.funk;
import com.google.common.collect.Multiset;
import org.javafunk.funk.builders.*;
import org.javafunk.funk.datastructures.tuples.*;
import java.util.*;
import static java.util.Arrays.asList;
import static org.javafunk.funk.Classes.uncheckedInstantiate;
public class Literals {
private Literals() {}
/**
* Returns an empty immutable {@code Iterable} instance.
* <p/>
* <p>This form of literal is most suited to direct assignment to a variable
* since in this case, the type {@code E} is inferred from the variable
* declaration. For example:
* <blockquote>
* <pre>
* Iterable<String> strings = iterable();
* </pre>
* </blockquote>
* </p>
*
* @param <E> The type of the elements contained in the {@code Iterable}.
* @return An {@code Iterable} instance over the type {@code E} containing no elements.
*/
public static <E> Iterable<E> iterable() {
return new IterableBuilder<E>().build();
}
@SuppressWarnings("unchecked")
public static <E> Iterable<E> iterable(Class<? extends Iterable> iterableClass) {
return uncheckedInstantiate(iterableClass);
}
/**
* Returns an empty immutable {@code Iterable} instance over the type
* of the supplied {@code Class}.
* <p/>
* <p>This form of literal is most suited to inline usage such as when passing an
* empty iterable as a parameter in a method call since it reads more clearly than
* {@link #iterable()}. For example, compare the following:
* <blockquote>
* <pre>
* public class Account {
* public Account(Money balance, List<Money> transactions) {
* ...
* }
*
* ...
* }
*
* Account account1 = new Account(new Money(0), Literals.<Money>iterable());
* Account account2 = new Account(new Money(0), iterableOf(Money.class));
* </pre>
* </blockquote>
* </p>
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code Iterable}
* @param <E> The type of the elements contained in the {@code Iterable}.
* @return An {@code Iterable} instance over the type {@code E} containing no elements.
*/
public static <E> Iterable<E> iterableOf(Class<E> elementClass) {
return new IterableBuilder<E>().build();
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E}
* containing all elements from the supplied {@code Iterable}. The order of
* the elements in the resulting {@code Iterable} is determined by the order in
* which they are yielded from the supplied {@code Iterable}.
* <p/>
* <p>This form of literal is useful when an immutable copy of an {@code Iterable}
* is required. For example:
* <blockquote>
* <pre>
* public class Product {
* public Iterable<Integer> getSizes() {
* return iterableFrom(sizes);
* }
*
* ...
* }
* </pre>
* </blockquote>
* </p>
*
* @param elements An {@code Iterable} of elements from which an {@code Iterable} should
* be constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Iterable}.
* @return An {@code Iterable} over the type {@code E} containing all elements from the
* supplied {@code Iterable} in the order they are yielded.
*/
public static <E> Iterable<E> iterableFrom(Iterable<? extends E> elements) {
return new IterableBuilder<E>().with(elements).build();
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E}
* containing all elements from the supplied array. The order of the elements
* in the resulting {@code Iterable} is the same as the order of the elements
* in the array.
* <p/>
* <p>For example, the following:
* <blockquote>
* <pre>
* String[] strings = new String[]{"one", "two", "three"};
* Iterable<String> iterableOfStrings = Literals.iterableFrom(strings);
* </pre>
* </blockquote>
* is equivalent to:
* <blockquote>
* <pre>
* Iterable<String> iterableOfStrings = Literals.iterableWith("one", "two", "three");
* </pre>
* </blockquote>
* </p>
*
* @param elementArray An array of elements from which an {@code Iterable} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Iterable}.
* @return An {@code Iterable} over the type {@code E} containing all elements from the
* supplied array in the same order as the supplied array.
*/
public static <E> Iterable<E> iterableFrom(E[] elementArray) {
return new IterableBuilder<E>().with(elementArray).build();
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied element.
*
* @param e An element from which to construct an {@code Iterable}.
* @param <E> The type of the element contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied element.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e) {
return iterableFrom(asList(e));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2) {
return iterableFrom(asList(e1, e2));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3) {
return iterableFrom(asList(e1, e2, e3));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4) {
return iterableFrom(asList(e1, e2, e3, e4));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5) {
return iterableFrom(asList(e1, e2, e3, e4, e5));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return iterableFrom(asList(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param e7 The seventh element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return iterableFrom(asList(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param e7 The seventh element from which to construct an {@code Iterable}.
* @param e8 The eighth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return iterableFrom(asList(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param e7 The seventh element from which to construct an {@code Iterable}.
* @param e8 The eighth element from which to construct an {@code Iterable}.
* @param e9 The ninth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return iterableFrom(asList(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param e7 The seventh element from which to construct an {@code Iterable}.
* @param e8 The eighth element from which to construct an {@code Iterable}.
* @param e9 The ninth element from which to construct an {@code Iterable}.
* @param e10 The tenth element from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return An {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return iterableFrom(asList(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an immutable {@code Iterable} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterable} is the same as the order
* of the elements in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, an {@link IterableBuilder} can be used instead.</p>
*
* @param e1 The first element from which to construct an {@code Iterable}.
* @param e2 The second element from which to construct an {@code Iterable}.
* @param e3 The third element from which to construct an {@code Iterable}.
* @param e4 The fourth element from which to construct an {@code Iterable}.
* @param e5 The fifth element from which to construct an {@code Iterable}.
* @param e6 The sixth element from which to construct an {@code Iterable}.
* @param e7 The seventh element from which to construct an {@code Iterable}.
* @param e8 The eighth element from which to construct an {@code Iterable}.
* @param e9 The ninth element from which to construct an {@code Iterable}.
* @param e10 The tenth element from which to construct an {@code Iterable}.
* @param e11on The remaining elements from which to construct an {@code Iterable}.
* @param <E> The type of the elements contained in the returned {@code Iterable}.
* @return an {@code Iterable} instance over type {@code E} containing the supplied elements.
*/
@SuppressWarnings("unchecked") public static <E> Iterable<E> iterableWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return iterableBuilderFrom(asList(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(asList(e11on)).build();
}
/**
* Returns an {@code IterableBuilder} containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IterableBuilder} can be used to assemble an {@code Iterable} as follows:
* <blockquote>
* <pre>
* Iterable<Integer> iterable = Literals.<Integer>iterableBuilder()
* .with(1, 2, 3)
* .and(4, 5, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterable<Integer> iterable = Literals.iterableWith(1, 2, 3, 4, 5, 6);
* </pre>
* </blockquote>
* The advantage of the {@code IterableBuilder} is that the iterable can be built up from
* individual objects, arrays or existing iterables. See {@link IterableBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> IterableBuilder<E> iterableBuilder() {
return new IterableBuilder<E>();
}
/**
* Returns an {@code IterableBuilder} over the type of the supplied {@code Class}
* containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IterableBuilder} can be used to assemble an {@code Iterable} as follows:
* <blockquote>
* <pre>
* Iterable<String> iterable = iterableBuilderOf(String.class)
* .with("first", "second", "third")
* .and(new String[]{"fourth", "fifth"})
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterable<String> iterable = Literals.iterableWith("first", "second", "third", "fourth", "fifth");
* </pre>
* </blockquote>
* The advantage of the {@code IterableBuilder} is that the iterable can be built
* up from individual objects, arrays or existing iterables. See {@link IterableBuilder}
* for further details.
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code IterableBuilder}
* @param <E> The type of the elements contained in the {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over the type {@code E} containing no
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderOf(Class<E> elementClass) {
return new IterableBuilder<E>();
}
/**
* Returns an {@code IterableBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Iterable}.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IterableBuilder} can be used to assemble an {@code Iterable} from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<Character> firstCollection = Literals.collectionWith('a', 'b', 'c');
* Collection<Character> secondCollection = Literals.collectionWith('d', 'e', 'f');
* Iterable<Character> iterable = iterableBuilderFrom(firstCollection)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterable<Character> iterable = Literals.iterableWith('a', 'b', 'c', 'd', 'e', 'f');
* </pre>
* </blockquote>
* The advantage of the {@code IterableBuilder} is that the iterable can be built up from
* individual objects, arrays or existing iterables. See {@link IterableBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code IterableBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> IterableBuilder<E> iterableBuilderFrom(Iterable<? extends E> elements) {
return new IterableBuilder<E>().with(elements);
}
/**
* Returns an {@code IterableBuilder} over type {@code E} initialised with the elements
* contained in the supplied array.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IterableBuilder} can be used to assemble an {@code Iterable} from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Long[] firstArray = new Long[]{1L, 2L, 3L};
* Long[] secondArray = new Long[]{3L, 4L, 5L};
* Iterable<Long> iterable = iterableBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterable<Long> iterable = Literals.iterableWith(1L, 2L, 3L, 3L, 4L, 5L);
* </pre>
* </blockquote>
* The advantage of the {@code IterableBuilder} is that the iterable can be built up from
* individual objects, arrays or existing iterables. See {@link IterableBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code IterableBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> IterableBuilder<E> iterableBuilderFrom(E[] elementArray) {
return new IterableBuilder<E>().with(elementArray);
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing
* the supplied element.
*
* @param e The element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e) {
return iterableBuilderFrom(iterableWith(e));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2) {
return iterableBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3) {
return iterableBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param e7 The seventh element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param e7 The seventh element to be added to the {@code IterableBuilder}.
* @param e8 The eighth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param e7 The seventh element to be added to the {@code IterableBuilder}.
* @param e8 The eighth element to be added to the {@code IterableBuilder}.
* @param e9 The ninth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param e7 The seventh element to be added to the {@code IterableBuilder}.
* @param e8 The eighth element to be added to the {@code IterableBuilder}.
* @param e9 The ninth element to be added to the {@code IterableBuilder}.
* @param e10 The tenth element to be added to the {@code IterableBuilder}.
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an {@code IterableBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IterableBuilder}
* instance in the same order as they are defined in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, an {@link IterableBuilder} instance can be used directly with
* multiple method calls accumulating the builder contents.</p>
*
* @param e1 The first element to be added to the {@code IterableBuilder}.
* @param e2 The second element to be added to the {@code IterableBuilder}.
* @param e3 The third element to be added to the {@code IterableBuilder}.
* @param e4 The fourth element to be added to the {@code IterableBuilder}.
* @param e5 The fifth element to be added to the {@code IterableBuilder}.
* @param e6 The sixth element to be added to the {@code IterableBuilder}.
* @param e7 The seventh element to be added to the {@code IterableBuilder}.
* @param e8 The eighth element to be added to the {@code IterableBuilder}.
* @param e9 The ninth element to be added to the {@code IterableBuilder}.
* @param e10 The tenth element to be added to the {@code IterableBuilder}.
* @param e11on The remaining elements to be added to the {@code IterableBuilder}. The elements
* will be added to the {@code IterableBuilder} in the order they are defined in the
* variadic argument..
* @param <E> The type of the elements contained in the returned {@code IterableBuilder}.
* @return An {@code IterableBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IterableBuilder<E> iterableBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return iterableBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
/**
* Returns an empty immutable {@code Iterator} instance.
* <p/>
* <p>This form of literal is most suited to direct assignment to a variable
* since in this case, the type {@code E} is inferred from the variable
* declaration. For example:
* <blockquote>
* <pre>
* Iterator<String> strings = iterator();
* </pre>
* </blockquote>
* </p>
*
* @param <E> The type of the elements contained in the {@code Iterator}.
* @return An {@code Iterator} instance over the type {@code E} containing no elements.
*/
public static <E> Iterator<E> iterator() {
return new IteratorBuilder<E>().build();
}
@SuppressWarnings("unchecked") public static <E> Iterator<E> iterator(Class<? extends Iterator> iteratorClass) {
return uncheckedInstantiate(iteratorClass);
}
/**
* Returns an empty immutable {@code Iterator} instance over the type
* of the supplied {@code Class}.
* <p/>
* <p>This form of literal is most suited to inline usage such as when passing an
* empty iterator as a parameter in a method call since it reads more clearly than
* {@link #iterator()}. For example, compare the following:
* <blockquote>
* <pre>
* public class Iterators {
* public static <T> Iterator<T> buffer(Iterator<T> stream) {
* ...
* }
*
* ...
* }
*
* Iterator<Character> bufferedIterator1 = Iterators.buffer(Literals.<Character>iterator());
* Iterator<Character> bufferedIterator2 = Iterators.buffer(iterableOf(Character.class));
* </pre>
* </blockquote>
* </p>
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code Iterator}
* @param <E> The type of the elements contained in the {@code Iterator}.
* @return An {@code Iterator} instance over the type {@code E} containing no elements.
*/
public static <E> Iterator<E> iteratorOf(Class<E> elementClass) {
return new IteratorBuilder<E>().build();
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E}
* containing all elements from the supplied {@code Iterable}. The order of
* the elements in the resulting {@code Iterator} is determined by the order in
* which they are yielded from the supplied {@code Iterable}.
* <p/>
* <p>This form of literal is useful when an immutable {@code Iterator} from an
* {@code Iterable} is required. For example:
* <blockquote>
* <pre>
* public class DataStructure<T> implements Iterable<T> {
* private final Iterable<T> backingStore;
*
* public Iterator<T> iterator() {
* return iteratorFrom(backingStore);
* }
*
* ...
* }
* </pre>
* </blockquote>
* </p>
*
* @param elements An {@code Iterable} of elements from which an {@code Iterator} should
* be constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Iterator}.
* @return An {@code Iterator} over the type {@code E} containing all elements from the
* supplied {@code Iterable} in the order they are yielded.
*/
public static <E> Iterator<E> iteratorFrom(Iterable<? extends E> elements) {
return new IteratorBuilder<E>().with(elements).build();
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E}
* containing all elements from the supplied array. The order of the elements
* in the resulting {@code Iterator} is the same as the order of the elements
* in the array.
* <p/>
* <p>For example, the following:
* <blockquote>
* <pre>
* String[] strings = new String[]{"one", "two", "three"};
* Iterator<String> iteratorOfStrings = Literals.iteratorFrom(strings);
* </pre>
* </blockquote>
* is equivalent to:
* <blockquote>
* <pre>
* Iterator<String> iteratorOfStrings = Literals.iteratorWith("one", "two", "three");
* </pre>
* </blockquote>
* </p>
*
* @param elementArray An array of elements from which an {@code Iterator} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Iterator}.
* @return An {@code Iterator} over the type {@code E} containing all elements from the
* supplied array in the same order as the supplied array.
*/
public static <E> Iterator<E> iteratorFrom(E[] elementArray) {
return new IteratorBuilder<E>().with(elementArray).build();
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied element.
*
* @param e An element from which to construct an {@code Iterator}.
* @param <E> The type of the element contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied element.
*/
public static <E> Iterator<E> iteratorWith(E e) {
return iteratorFrom(iterableWith(e));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2) {
return iteratorFrom(iterableWith(e1, e2));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3) {
return iteratorFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4) {
return iteratorFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param e7 The seventh element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param e7 The seventh element from which to construct an {@code Iterator}.
* @param e8 The eighth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param e7 The seventh element from which to construct an {@code Iterator}.
* @param e8 The eighth element from which to construct an {@code Iterator}.
* @param e9 The ninth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param e7 The seventh element from which to construct an {@code Iterator}.
* @param e8 The eighth element from which to construct an {@code Iterator}.
* @param e9 The ninth element from which to construct an {@code Iterator}.
* @param e10 The tenth element from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return An {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return iteratorFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an immutable {@code Iterator} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Iterator} is the same as the order
* of the elements in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, an {@link IteratorBuilder} can be used instead.</p>
*
* @param e1 The first element from which to construct an {@code Iterator}.
* @param e2 The second element from which to construct an {@code Iterator}.
* @param e3 The third element from which to construct an {@code Iterator}.
* @param e4 The fourth element from which to construct an {@code Iterator}.
* @param e5 The fifth element from which to construct an {@code Iterator}.
* @param e6 The sixth element from which to construct an {@code Iterator}.
* @param e7 The seventh element from which to construct an {@code Iterator}.
* @param e8 The eighth element from which to construct an {@code Iterator}.
* @param e9 The ninth element from which to construct an {@code Iterator}.
* @param e10 The tenth element from which to construct an {@code Iterator}.
* @param e11on The remaining elements from which to construct an {@code Iterator}.
* @param <E> The type of the elements contained in the returned {@code Iterator}.
* @return an {@code Iterator} instance over type {@code E} containing the supplied elements.
*/
public static <E> Iterator<E> iteratorWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on).build();
}
/**
* Returns an {@code IteratorBuilder} containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IteratorBuilder} can be used to assemble an {@code Iterator} as follows:
* <blockquote>
* <pre>
* Iterator<Double> iterator = Literals.<Double>iteratorBuilder()
* .with(1.34, 2.2, 3.5)
* .and(4, 5.78, 6.21)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterator<Double> iterator = Literals.iteratorWith(1.34, 2.2, 3.5, 4, 5.78, 6.21);
* </pre>
* </blockquote>
* The advantage of the {@code IteratorBuilder} is that the iterator can be built up from
* individual objects, arrays or existing iterables. See {@link IteratorBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilder() {
return new IteratorBuilder<E>();
}
/**
* Returns an {@code IteratorBuilder} over the type of the supplied {@code Class}
* containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IteratorBuilder} can be used to assemble an {@code Iterator} as follows:
* <blockquote>
* <pre>
* Iterator<Integer> iterator = iteratorBuilderOf(Integer.class)
* .with(new Integer[]{65, 72})
* .and(95, 43, 20)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterator<Integer> iterator = Literals.iteratorWith(65, 72, 95, 43, 20);
* </pre>
* </blockquote>
* The advantage of the {@code IteratorBuilder} is that the iterator can be built
* up from individual objects, arrays or existing iterables. See {@link IteratorBuilder}
* for further details.
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code IteratorBuilder}
* @param <E> The type of the elements contained in the {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over the type {@code E} containing no
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderOf(Class<E> elementClass) {
return new IteratorBuilder<E>();
}
/**
* Returns an {@code IteratorBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Iterator}.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IteratorBuilder} can be used to assemble an {@code Iterator} from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<String> firstCollection = Literals.collectionWith("a", "b", "c");
* Collection<String> secondCollection = Literals.collectionWith("d", "e", "f");
* Iterator<String> iterator = iteratorBuilderFrom(firstCollection)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterator<String> iterator = Literals.iteratorWith("a", "b", "c", "d", "e", "f");
* </pre>
* </blockquote>
* The advantage of the {@code IteratorBuilder} is that the iterator can be built up from
* individual objects, arrays or existing iterables. See {@link IteratorBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code IteratorBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> IteratorBuilder<E> iteratorBuilderFrom(Iterable<? extends E> elements) {
return new IteratorBuilder<E>().with(elements);
}
/**
* Returns an {@code IteratorBuilder} over type {@code E} initialised with the elements
* contained in the supplied array.
* <p/>
* <h4>Example Usage:</h4>
* An {@code IteratorBuilder} can be used to assemble an {@code Iterator} from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Long[]{3, 4, 5};
* Iterator<Integer> iterator = iteratorBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Iterator<Integer> iterator = Literals.iteratorWith(1, 2, 3, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code IteratorBuilder} is that the iterator can be built up from
* individual objects, arrays or existing iterables. See {@link IteratorBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code IteratorBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> IteratorBuilder<E> iteratorBuilderFrom(E[] elementArray) {
return new IteratorBuilder<E>().with(elementArray);
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing
* the supplied element.
*
* @param e The element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e) {
return iteratorBuilderFrom(iterableWith(e));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2) {
return iteratorBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param e7 The seventh element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param e7 The seventh element to be added to the {@code IteratorBuilder}.
* @param e8 The eighth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param e7 The seventh element to be added to the {@code IteratorBuilder}.
* @param e8 The eighth element to be added to the {@code IteratorBuilder}.
* @param e9 The ninth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param e7 The seventh element to be added to the {@code IteratorBuilder}.
* @param e8 The eighth element to be added to the {@code IteratorBuilder}.
* @param e9 The ninth element to be added to the {@code IteratorBuilder}.
* @param e10 The tenth element to be added to the {@code IteratorBuilder}.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an {@code IteratorBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code IteratorBuilder}
* instance in the same order as they are defined in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, an {@link IteratorBuilder} instance can be used directly with
* multiple method calls accumulating the builder contents.</p>
*
* @param e1 The first element to be added to the {@code IteratorBuilder}.
* @param e2 The second element to be added to the {@code IteratorBuilder}.
* @param e3 The third element to be added to the {@code IteratorBuilder}.
* @param e4 The fourth element to be added to the {@code IteratorBuilder}.
* @param e5 The fifth element to be added to the {@code IteratorBuilder}.
* @param e6 The sixth element to be added to the {@code IteratorBuilder}.
* @param e7 The seventh element to be added to the {@code IteratorBuilder}.
* @param e8 The eighth element to be added to the {@code IteratorBuilder}.
* @param e9 The ninth element to be added to the {@code IteratorBuilder}.
* @param e10 The tenth element to be added to the {@code IteratorBuilder}.
* @param e11on The remaining elements to be added to the {@code IteratorBuilder}. The elements
* will be added to the {@code IteratorBuilder} in the order they are defined in the
* variadic argument.
* @param <E> The type of the elements contained in the returned {@code IteratorBuilder}.
* @return An {@code IteratorBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> IteratorBuilder<E> iteratorBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return iteratorBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
/**
* Returns an empty immutable {@code Collection} instance.
* <p/>
* <p>This form of literal is most suited to direct assignment to a variable
* since in this case, the type {@code E} is inferred from the variable
* declaration. For example:
* <blockquote>
* <pre>
* Collection<Long> numbers = collection();
* </pre>
* </blockquote>
* </p>
*
* @param <E> The type of the elements contained in the {@code Collection}.
* @return A {@code Collection} instance over the type {@code E} containing no elements.
*/
public static <E> Collection<E> collection() {
return new CollectionBuilder<E>().build();
}
public static <E> Collection<E> collection(Class<? extends Collection> collectionClass) {
return new CollectionBuilder<E>().build(collectionClass);
}
/**
* Returns an empty immutable {@code Collection} instance over the type
* of the supplied {@code Class}.
* <p/>
* <p>This form of literal is most suited to inline usage such as when passing an
* empty collection as a parameter in a method call since it reads more clearly than
* {@link #collection()}. For example, compare the following:
* <blockquote>
* <pre>
* public class Tree {
* ...
*
* public void addNode(Node node, Collection<Attribute> attributes) {
* ...
* }
* }
*
* tree.addNode(new LeafNode(), Literals.<Attribute>collection());
* tree.addNode(new LeafNode(), collectionOf(Attribute.class));
* </pre>
* </blockquote>
* </p>
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code Collection}
* @param <E> The type of the elements contained in the {@code Collection}.
* @return A {@code Collection} instance over the type {@code E} containing no elements.
*/
public static <E> Collection<E> collectionOf(Class<E> elementClass) {
return new CollectionBuilder<E>().build();
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E}
* containing all elements from the supplied {@code Iterable}. The order of
* the elements in the resulting {@code Collection} is determined by the order in
* which they are yielded from the supplied {@code Iterable}.
* <p/>
* <p>This form of literal is useful when an object conforming to the {@code Collection}
* interface is required and only an {@code Iterable} is available. For example:
* <blockquote>
* <pre>
* Iterable<Word> words = book.getWords();
* statusBar.displayWordCount(collectionFrom(words).size());
* </pre>
* </blockquote>
* </p>
*
* @param elements An {@code Iterable} of elements from which a {@code Collection} should
* be constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Collection}.
* @return A {@code Collection} over the type {@code E} containing all elements from the
* supplied {@code Iterable} in the order they are yielded.
*/
public static <E> Collection<E> collectionFrom(Iterable<? extends E> elements) {
return new CollectionBuilder<E>().with(elements).build();
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E}
* containing all elements from the supplied array. The order of the elements
* in the resulting {@code Collection} is the same as the order of the elements
* in the array.
* <p/>
* <p>For example, the following:
* <blockquote>
* <pre>
* String[] strings = new String[]{"one", "one", "two", "three"};
* Collection<String> collectionOfStrings = Literals.collectionFrom(strings);
* </pre>
* </blockquote>
* is equivalent to:
* <blockquote>
* <pre>
* Collection<String> collectionOfStrings = Literals.collectionWith("one", "one", "two", "three");
* </pre>
* </blockquote>
* </p>
*
* @param elementArray An array of elements from which a {@code Collection} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned
* {@code Collection}.
* @return A {@code Collection} over the type {@code E} containing all elements from the
* supplied array in the same order as the supplied array.
*/
public static <E> Collection<E> collectionFrom(E[] elementArray) {
return new CollectionBuilder<E>().with(elementArray).build();
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied element.
*
* @param e An element from which to construct a {@code Collection}.
* @param <E> The type of the element contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied element.
*/
public static <E> Collection<E> collectionWith(E e) {
return collectionFrom(iterableWith(e));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2) {
return collectionFrom(iterableWith(e1, e2));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3) {
return collectionFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4) {
return collectionFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param e7 The seventh element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param e7 The seventh element from which to construct a {@code Collection}.
* @param e8 The eighth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param e7 The seventh element from which to construct a {@code Collection}.
* @param e8 The eighth element from which to construct a {@code Collection}.
* @param e9 The ninth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param e7 The seventh element from which to construct a {@code Collection}.
* @param e8 The eighth element from which to construct a {@code Collection}.
* @param e9 The ninth element from which to construct a {@code Collection}.
* @param e10 The tenth element from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return A {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return collectionFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an immutable {@code Collection} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code Collection} is the same as the order
* of the elements in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, an {@link CollectionBuilder} can be used instead.</p>
*
* @param e1 The first element from which to construct a {@code Collection}.
* @param e2 The second element from which to construct a {@code Collection}.
* @param e3 The third element from which to construct a {@code Collection}.
* @param e4 The fourth element from which to construct a {@code Collection}.
* @param e5 The fifth element from which to construct a {@code Collection}.
* @param e6 The sixth element from which to construct a {@code Collection}.
* @param e7 The seventh element from which to construct a {@code Collection}.
* @param e8 The eighth element from which to construct a {@code Collection}.
* @param e9 The ninth element from which to construct a {@code Collection}.
* @param e10 The tenth element from which to construct a {@code Collection}.
* @param e11on The remaining elements from which to construct a {@code Collection}.
* @param <E> The type of the elements contained in the returned {@code Collection}.
* @return a {@code Collection} instance over type {@code E} containing the supplied elements.
*/
public static <E> Collection<E> collectionWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on).build();
}
/**
* Returns a {@code CollectionBuilder} containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code CollectionBuilder} can be used to assemble a {@code Collection} as follows:
* <blockquote>
* <pre>
* Collection<Float> collection = Literals.<Float>collectionBuilder()
* .with(1.34F, 2.2F, 3.5F)
* .and(4F, 5.78F, 6.21F)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Collection<Float> collection = Literals.collectionWith(1.34F, 2.2F, 3.5F, 4F, 5.78F, 6.21F);
* </pre>
* </blockquote>
* The advantage of the {@code CollectionBuilder} is that the collection can be built up from
* individual objects, arrays or existing iterables. See {@link CollectionBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> CollectionBuilder<E> collectionBuilder() {
return new CollectionBuilder<E>();
}
/**
* Returns a {@code CollectionBuilder} over the type of the supplied {@code Class}
* containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code CollectionBuilder} can be used to assemble a {@code Collection} as follows:
* <blockquote>
* <pre>
* Collection<Integer> collection = collectionBuilderOf(Integer.class)
* .with(new Integer[]{65, 72})
* .and(95, 43, 20)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Collection<Integer> collection = Literals.collectionWith(65, 72, 95, 43, 20);
* </pre>
* </blockquote>
* The advantage of the {@code CollectionBuilder} is that the collection can be built
* up from individual objects, arrays or existing iterables. See {@link CollectionBuilder}
* for further details.
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code CollectionBuilder}
* @param <E> The type of the elements contained in the {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over the type {@code E} containing no
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderOf(Class<E> elementClass) {
return new CollectionBuilder<E>();
}
/**
* Returns a {@code CollectionBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Collection}.
* <p/>
* <h4>Example Usage:</h4>
* A {@code CollectionBuilder} can be used to assemble a {@code Collection} from two existing
* {@code List} instances as follows:
* <blockquote>
* <pre>
* List<String> firstList = Literals.listWith("a", "b", "c");
* List<String> secondList = Literals.listWith("d", "e", "f");
* Collection<String> collection = collectionBuilderFrom(firstList)
* .with(secondList)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Collection<String> collection = Literals.collectionWith("a", "b", "c", "d", "e", "f");
* </pre>
* </blockquote>
* The advantage of the {@code CollectionBuilder} is that the collection can be built up from
* individual objects, arrays or existing iterables. See {@link CollectionBuilder} for
* further details.
*
* @param elements An {@code Collection} containing elements with which the
* {@code CollectionBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> CollectionBuilder<E> collectionBuilderFrom(Iterable<? extends E> elements) {
return new CollectionBuilder<E>().with(elements);
}
/**
* Returns a {@code CollectionBuilder} over type {@code E} initialised with the elements
* contained in the supplied array.
* <p/>
* <h4>Example Usage:</h4>
* A {@code CollectionBuilder} can be used to assemble a {@code Collection} from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Long[]{3, 4, 5};
* Collection<Integer> collection = collectionBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Collection<Integer> collection = Literals.collectionWith(1, 2, 3, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code CollectionBuilder} is that the collection can be built up from
* individual objects, arrays or existing iterables. See {@link CollectionBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code CollectionBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> CollectionBuilder<E> collectionBuilderFrom(E[] elementArray) {
return new CollectionBuilder<E>().with(elementArray);
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing
* the supplied element.
*
* @param e The element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e) {
return collectionBuilderFrom(iterableWith(e));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2) {
return collectionBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3) {
return collectionBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param e7 The seventh element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param e7 The seventh element to be added to the {@code CollectionBuilder}.
* @param e8 The eighth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param e7 The seventh element to be added to the {@code CollectionBuilder}.
* @param e8 The eighth element to be added to the {@code CollectionBuilder}.
* @param e9 The ninth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param e7 The seventh element to be added to the {@code CollectionBuilder}.
* @param e8 The eighth element to be added to the {@code CollectionBuilder}.
* @param e9 The ninth element to be added to the {@code CollectionBuilder}.
* @param e10 The tenth element to be added to the {@code CollectionBuilder}.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return A {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns a {@code CollectionBuilder} instance over the type {@code E} containing the
* supplied elements. The supplied elements are added to the {@code CollectionBuilder}
* instance in the same order as they are defined in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, a {@link CollectionBuilder} instance can be used directly with
* multiple method calls accumulating the builder contents.</p>
*
* @param e1 The first element to be added to the {@code CollectionBuilder}.
* @param e2 The second element to be added to the {@code CollectionBuilder}.
* @param e3 The third element to be added to the {@code CollectionBuilder}.
* @param e4 The fourth element to be added to the {@code CollectionBuilder}.
* @param e5 The fifth element to be added to the {@code CollectionBuilder}.
* @param e6 The sixth element to be added to the {@code CollectionBuilder}.
* @param e7 The seventh element to be added to the {@code CollectionBuilder}.
* @param e8 The eighth element to be added to the {@code CollectionBuilder}.
* @param e9 The ninth element to be added to the {@code CollectionBuilder}.
* @param e10 The tenth element to be added to the {@code CollectionBuilder}.
* @param e11on The remaining elements to be added to the {@code CollectionBuilder}. The elements
* will be added to the {@code CollectionBuilder} in the order they are defined in the
* variadic argument.
* @param <E> The type of the elements contained in the returned {@code CollectionBuilder}.
* @return An {@code CollectionBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> CollectionBuilder<E> collectionBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return collectionBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
/**
* Returns an empty immutable {@code List} instance.
* <p/>
* <p>This form of literal is most suited to direct assignment to a variable
* since in this case, the type {@code E} is inferred from the variable
* declaration. For example:
* <blockquote>
* <pre>
* List<String> strings = list();
* </pre>
* </blockquote>
* </p>
*
* @param <E> The type of the elements contained in the {@code List}.
* @return A {@code List} instance over the type {@code E} containing no elements.
*/
public static <E> List<E> list() {
return new ListBuilder<E>().build();
}
public static <E> List<E> list(Class<? extends List> listClass) {
return new ListBuilder<E>().build(listClass);
}
/**
* Returns an empty immutable {@code List} instance over the type
* of the supplied {@code Class}.
* <p/>
* <p>This form of literal is most suited to inline usage such as when passing an
* empty list as a parameter in a method call since it reads more clearly than
* {@link #list()}. For example, compare the following:
* <blockquote>
* <pre>
* public class Calculation {
* public Calculation(Type type, List<Integer> values) {
* ...
* }
*
* ...
* }
*
* Calculation sum1 = new Calculation(Type.SUM, Literals.<Integer>list());
* Calculation sum2 = new Calculation(Type.SUM, listOf(Integer.class));
* </pre>
* </blockquote>
* </p>
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code List}
* @param <E> The type of the elements contained in the {@code List}.
* @return A {@code List} instance over the type {@code E} containing no elements.
*/
public static <E> List<E> listOf(Class<E> elementClass) {
return new ListBuilder<E>().build();
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing
* all elements from the supplied {@code Iterable}. The order of the elements
* in the resulting {@code List} is determined by the order in which they are
* yielded from the {@code Iterable}.
* <p/>
* <p>This form of literal is useful when a number of lazy operations have been
* performed resulting in an {@code Iterable} where a collection implementing
* the {@code List} contract is required. For example:
* <blockquote>
* <pre>
* List<Person> people = Literals.listWith(firstPerson, secondPerson, thirdPerson);
* Iterable<Address> addresses = Lazily.map(people, toAddress());
* Iterable<StreetName> streetNames = Lazily.map(addresses, toStreetName());
* Iterable<StreetName> avenueStreetNames = Lazily.filter(streetNames, whereIsAvenue());
* List<StreetName> listOfAvenues = Literals.listFrom(avenueStreetNames);
* </pre>
* </blockquote>
* </p>
*
* @param elements An {@code Iterable} of elements from which a {@code List} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned {@code List}.
* @return A {@code List} over the type {@code E} containing all elements from the
* supplied {@code Iterable} in the order they are yielded.
*/
public static <E> List<E> listFrom(Iterable<? extends E> elements) {
return new ListBuilder<E>().with(elements).build();
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing all
* elements from the supplied array. The order of the elements in the resulting
* {@code List} is the same as the order of the elements in the array.
* <p/>
* <p>For example, the following:
* <blockquote>
* <pre>
* String[] strings = new String[]{"one", "two", "three"};
* List<String> listOfStrings = Literals.listFrom(strings);
* </pre>
* </blockquote>
* is equivalent to:
* <blockquote>
* <pre>
* List<String> listOfStrings = Literals.listWith("one", "two", "three");
* </pre>
* </blockquote>
* </p>
*
* @param elementArray An array of elements from which a {@code List} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned {@code List}.
* @return A {@code List} over the type {@code E} containing all elements from the
* supplied array in the same order as the supplied array.
*/
public static <E> List<E> listFrom(E[] elementArray) {
return new ListBuilder<E>().with(elementArray).build();
}
/**
* Returns a {@code List} instance over the type {@code E} containing the supplied element.
*
* @param e An element from which to construct a {@code List}.
* @param <E> The type of the element contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied element.
*/
public static <E> List<E> listWith(E e) {
return listFrom(iterableWith(e));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2) {
return listFrom(iterableWith(e1, e2));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3) {
return listFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4) {
return listFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5) {
return listFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return listFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param e7 The seventh element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return listFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param e7 The seventh element from which to construct a {@code List}.
* @param e8 The eighth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return listFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order
* of the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param e7 The seventh element from which to construct a {@code List}.
* @param e8 The eighth element from which to construct a {@code List}.
* @param e9 The ninth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return listFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order of
* the elements in the argument list.
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param e7 The seventh element from which to construct a {@code List}.
* @param e8 The eighth element from which to construct a {@code List}.
* @param e9 The ninth element from which to construct a {@code List}.
* @param e10 The tenth element from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return listFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an immutable {@code List} instance over the type {@code E} containing the
* supplied elements. The order of the resultant {@code List} is the same as the order of the
* elements in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, a {@link ListBuilder} can be used instead.</p>
*
* @param e1 The first element from which to construct a {@code List}.
* @param e2 The second element from which to construct a {@code List}.
* @param e3 The third element from which to construct a {@code List}.
* @param e4 The fourth element from which to construct a {@code List}.
* @param e5 The fifth element from which to construct a {@code List}.
* @param e6 The sixth element from which to construct a {@code List}.
* @param e7 The seventh element from which to construct a {@code List}.
* @param e8 The eighth element from which to construct a {@code List}.
* @param e9 The ninth element from which to construct a {@code List}.
* @param e10 The tenth element from which to construct a {@code List}.
* @param e11on The remaining elements from which to construct a {@code List}.
* @param <E> The type of the elements contained in the returned {@code List}.
* @return A {@code List} instance over type {@code E} containing the supplied elements.
*/
public static <E> List<E> listWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on).build();
}
/**
* Returns a {@code ListBuilder} containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code ListBuilder} can be used to assemble a {@code List} as follows:
* <blockquote>
* <pre>
* List<Integer> list = Literals.<Integer>listBuilder()
* .with(1, 2, 3)
* .and(4, 5, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* List<Integer> list = Literals.listWith(1, 2, 3, 4, 5, 6);
* </pre>
* </blockquote>
* The advantage of the {@code ListBuilder} is that the list can be built up from
* individual objects, arrays or existing iterables. See {@link ListBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code ListBuilder}.
* @return A {@code ListBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> ListBuilder<E> listBuilder() {
return new ListBuilder<E>();
}
/**
* Returns a {@code ListBuilder} over the type of the supplied {@code Class}
* containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code ListBuilder} can be used to assemble a {@code List} as follows:
* <blockquote>
* <pre>
* List<Integer> list = listBuilderOf(Integer.class)
* .with(1, 2, 3)
* .and(4, 5, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* List<Integer> list = Literals.listWith(1, 2, 3, 4, 5, 6);
* </pre>
* </blockquote>
* The advantage of the {@code ListBuilder} is that the list can be built up from
* individual objects, arrays or existing iterables. See {@link ListBuilder} for
* further details.
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code ListBuilder}
* @param <E> The type of the elements contained in the {@code ListBuilder}.
* @return A {@code ListBuilder} instance over the type {@code E} containing no
* elements.
*/
public static <E> ListBuilder<E> listBuilderOf(Class<E> elementClass) {
return new ListBuilder<E>();
}
/**
* Returns a {@code ListBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Iterable}.
* <p/>
* <h4>Example Usage:</h4>
* A {@code ListBuilder} can be used to assemble a {@code List} from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<Integer> firstCollection = Literals.collectionWith(1, 2, 3);
* Collection<Integer> secondCollection = Literals.collectionWith(3, 4, 5);
* List<Integer> list = listBuilderFrom(firstCollection)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* List<Integer> list = Literals.listWith(1, 2, 3, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code ListBuilder} is that the list can be built up from
* individual objects, arrays or existing iterables. See {@link ListBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code ListBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code ListBuilder}.
* @return A {@code ListBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> ListBuilder<E> listBuilderFrom(Iterable<? extends E> elements) {
return new ListBuilder<E>().with(elements);
}
/**
* Returns a {@code ListBuilder} over type {@code E} initialised with the elements
* contained in the supplied array.
* <p/>
* <h4>Example Usage:</h4>
* A {@code ListBuilder} can be used to assemble a {@code List} from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Integer[]{3, 4, 5};
* List<Integer> list = listBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* List<Integer> list = Literals.listWith(1, 2, 3, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code ListBuilder} is that the list can be built up from
* individual objects, arrays or existing iterables. See {@link ListBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code ListBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code ListBuilder}.
* @return A {@code ListBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> ListBuilder<E> listBuilderFrom(E[] elementArray) {
return new ListBuilder<E>().with(elementArray);
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied element.
*
* @param e The element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> ListBuilder<E> listBuilderWith(E e) {
return listBuilderFrom(iterableWith(e));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2) {
return listBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3) {
return listBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param e7 The seventh element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param e7 The seventh element to be added to the {@code ListBuilder}.
* @param e8 The eighth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param e7 The seventh element to be added to the {@code ListBuilder}.
* @param e8 The eighth element to be added to the {@code ListBuilder}.
* @param e9 The ninth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param e7 The seventh element to be added to the {@code ListBuilder}.
* @param e8 The eighth element to be added to the {@code ListBuilder}.
* @param e9 The ninth element to be added to the {@code ListBuilder}.
* @param e10 The tenth element to be added to the {@code ListBuilder}.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns a {@code ListBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ListBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that this literal uses a generic varargs parameter as the last argument in the
* argument list and as such will cause unchecked cast warnings. Explicit argument
* lists for up to ten arguments have been provided for convenience. In order to avoid
* the unchecked cast warnings, a {@link ListBuilder} instance can be used directly with
* multiple method calls accumulating the builder contents.</p>
*
* @param e1 The first element to be added to the {@code ListBuilder}.
* @param e2 The second element to be added to the {@code ListBuilder}.
* @param e3 The third element to be added to the {@code ListBuilder}.
* @param e4 The fourth element to be added to the {@code ListBuilder}.
* @param e5 The fifth element to be added to the {@code ListBuilder}.
* @param e6 The sixth element to be added to the {@code ListBuilder}.
* @param e7 The seventh element to be added to the {@code ListBuilder}.
* @param e8 The eighth element to be added to the {@code ListBuilder}.
* @param e9 The ninth element to be added to the {@code ListBuilder}.
* @param e10 The tenth element to be added to the {@code ListBuilder}.
* @param e11on The remaining elements to be added to the {@code ListBuilder}. The elements
* will be added to the {@code ListBuilder} in the order they are defined in the
* variadic argument.
* @param <E> The type of the elements contained in the returned {@code ListBuilder}.
* @return A {@code ListBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ListBuilder<E> listBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return listBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
/**
* Returns an empty immutable {@code Set} instance.
* <p/>
* <p>This form of literal is most suited to direct assignment to a variable
* since in this case, the type {@code E} is inferred from the variable
* declaration. For example:
* <blockquote>
* <pre>
* Set<String> strings = set();
* </pre>
* </blockquote>
* </p>
*
* @param <E> The type of the elements contained in the {@code Set}.
* @return A {@code Set} instance over the type {@code E} containing no elements.
*/
public static <E> Set<E> set() {
return new SetBuilder<E>().build();
}
public static <E> Set<E> set(Class<? extends Set> setClass) {
return new SetBuilder<E>().build(setClass);
}
/**
* Returns an empty immutable {@code Set} instance over the type
* of the supplied {@code Class}.
* <p/>
* <p>This form of literal is most suited to inline usage such as when passing an
* empty set as a parameter in a method call since it reads more clearly than
* {@link #set()}. For example, compare the following:
* <blockquote>
* <pre>
* public class OrderRepository {
* public void save(Customer customer, Set<LineItem> lineItems) {
* ...
* }
*
* ...
* }
*
* orderRepository.save(customer, Literals.<LineItem>set());
* orderRepository.save(customer, setOf(LineItem.class));
* </pre>
* </blockquote>
* </p>
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code Set}.
* @param <E> The type of the elements contained in the {@code List}.
* @return A {@code Set} instance over the type {@code E} containing no elements.
*/
public static <E> Set<E> setOf(Class<E> elementClass) {
return new SetBuilder<E>().build();
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing
* all elements from the supplied {@code Iterable}. Due to the nature of a
* {@code Set}, any duplicate elements in the supplied {@code Iterable} will
* be removed.
* <p/>
* <p>This form of literal is useful when a number of lazy operations have been
* performed resulting in an {@code Iterable} where a collection implementing
* the {@code Set} contract is required. For example:
* <blockquote>
* <pre>
* Set<Person> people = Literals.setWith(firstPerson, secondPerson, thirdPerson);
* Iterable<Address> addresses = Lazily.map(people, toAddress());
* Iterable<StreetName> streetNames = Lazily.map(addresses, toStreetName());
* Iterable<StreetName> avenueStreetNames = Lazily.filter(streetNames, whereIsAvenue());
* Set<StreetName> relevantAvenues = Literals.setFrom(avenueStreetNames);
* </pre>
* </blockquote>
* </p>
*
* @param elements An {@code Iterable} of elements from which a {@code Set} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned {@code Set}.
* @return A {@code Set} over the type {@code E} containing all unique elements from the
* supplied {@code Iterable}.
*/
public static <E> Set<E> setFrom(Iterable<? extends E> elements) {
return new SetBuilder<E>().with(elements).build();
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing
* all elements from the supplied array. Due to the nature of a {@code Set}, any
* duplicate elements in the supplied {@code Iterable} will be removed.
* <p/>
* <p>For example, the following:
* <blockquote>
* <pre>
* String[] strings = new String[]{"one", "two", "two", "three"};
* Set<String> setOfStrings = Literals.setFrom(strings);
* </pre>
* </blockquote>
* is equivalent to:
* <blockquote>
* <pre>
* Set<String> listOfStrings = Literals.setWith("one", "two", "three");
* </pre>
* </blockquote>
* </p>
*
* @param elementArray An array of elements from which a {@code Set} should be
* constructed.
* @param <E> The type of the elements to be contained in the returned {@code Set}.
* @return A {@code Set} over the type {@code E} containing all unique elements from the
* supplied array.
*/
public static <E> Set<E> setFrom(E[] elementArray) {
return new SetBuilder<E>().with(elementArray).build();
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied element.
*
* @param e An element from which to construct a {@code Set}.
* @param <E> The type of the element contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied element.
*/
public static <E> Set<E> setWith(E e) {
return setFrom(iterableWith(e));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2) {
return setFrom(iterableWith(e1, e2));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate elements
* will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3) {
return setFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4) {
return setFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5) {
return setFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return setFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param e7 The seventh element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return setFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param e7 The seventh element from which to construct a {@code Set}.
* @param e8 The eighth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return setFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param e7 The seventh element from which to construct a {@code Set}.
* @param e8 The eighth element from which to construct a {@code Set}.
* @param e9 The ninth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return setFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param e7 The seventh element from which to construct a {@code Set}.
* @param e8 The eighth element from which to construct a {@code Set}.
* @param e9 The ninth element from which to construct a {@code Set}.
* @param e10 The tenth element from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return setFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an immutable {@code Set} instance over the type {@code E} containing the
* supplied elements. Due to the nature of a {@code Set}, any supplied duplicate
* elements will be removed.
*
* @param e1 The first element from which to construct a {@code Set}.
* @param e2 The second element from which to construct a {@code Set}.
* @param e3 The third element from which to construct a {@code Set}.
* @param e4 The fourth element from which to construct a {@code Set}.
* @param e5 The fifth element from which to construct a {@code Set}.
* @param e6 The sixth element from which to construct a {@code Set}.
* @param e7 The seventh element from which to construct a {@code Set}.
* @param e8 The eighth element from which to construct a {@code Set}.
* @param e9 The ninth element from which to construct a {@code Set}.
* @param e10 The tenth element from which to construct a {@code Set}.
* @param e11on The remaining elements from which to construct a {@code Set}.
* @param <E> The type of the elements contained in the returned {@code Set}.
* @return A {@code Set} instance over type {@code E} containing the supplied elements.
*/
public static <E> Set<E> setWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E e11on) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on).build();
}
/**
* Returns a {@code SetBuilder} containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code SetBuilder} can be used to assemble a {@code Set} as follows:
* <blockquote>
* <pre>
* Set<Double> set = Literals.<Double>setBuilder()
* .with(1.56, 2.33, 3.1)
* .and(4.04, 5.3, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Set<Double> set = Literals.setWith(4.04, 2.33, 3.1, 5.3, 6, 1.56);
* </pre>
* </blockquote>
* The advantage of the {@code SetBuilder} is that the set can be built up from
* individual objects, arrays or existing iterables. See {@link SetBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code SetBuilder}.
* @return A {@code SetBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> SetBuilder<E> setBuilder() {
return new SetBuilder<E>();
}
/**
* Returns a {@code SetBuilder} over the type of the supplied {@code Class}
* containing no elements.
* <p/>
* <h4>Example Usage:</h4>
* A {@code SetBuilder} can be used to assemble a {@code Set} as follows:
* <blockquote>
* <pre>
* Set<Integer> set = setBuilderOf(Integer.class)
* .with(1, 1, 2)
* .and(4, 5, 5)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Set<Integer> set = Literals.setWith(1, 1, 2, 4, 5, 5);
* </pre>
* </blockquote>
* The advantage of the {@code SetBuilder} is that the set can be built up from
* individual objects, arrays or existing iterables. See {@link SetBuilder} for
* further details.
*
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code SetBuilder}
* @param <E> The type of the elements contained in the {@code SetBuilder}.
* @return A {@code SetBuilder} instance over the type {@code E} containing no
* elements.
*/
public static <E> SetBuilder<E> setBuilderOf(Class<E> elementClass) {
return new SetBuilder<E>();
}
/**
* Returns a {@code SetBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Iterable}.
* <p/>
* <h4>Example Usage:</h4>
* A {@code SetBuilder} can be used to assemble a {@code Set} from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<Integer> firstCollection = Literals.collectionWith(1, 2, 3);
* Collection<Integer> secondCollection = Literals.collectionWith(3, 4, 5);
* Set<Integer> set = setBuilderFrom(firstCollection)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Set<Integer> set = Literals.setWith(1, 2, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code SetBuilder} is that the set can be built up from
* individual objects, arrays or existing iterables. See {@link SetBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code SetBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code SetBuilder}.
* @return A {@code SetBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> SetBuilder<E> setBuilderFrom(Iterable<? extends E> elements) {
return new SetBuilder<E>().with(elements);
}
/**
* Returns a {@code SetBuilder} over type {@code E} initialised with the elements
* contained in the supplied array.
* <p/>
* <h4>Example Usage:</h4>
* A {@code SetBuilder} can be used to assemble a {@code Set} from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Integer[]{3, 4, 5};
* Set<Integer> set = setBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Set<Integer> set = Literals.setWith(1, 2, 3, 4, 5);
* </pre>
* </blockquote>
* The advantage of the {@code SetBuilder} is that the set can be built up from
* individual objects, arrays or existing iterables. See {@link SetBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code SetBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code SetBuilder}.
* @return A {@code SetBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> SetBuilder<E> setBuilderFrom(E[] elementArray) {
return new SetBuilder<E>().with(elementArray);
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* element.
*
* @param e The element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> SetBuilder<E> setBuilderWith(E e) {
return setBuilderFrom(iterableWith(e));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2) {
return setBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3) {
return setBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param e7 The seventh element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param e7 The seventh element to be added to the {@code SetBuilder}.
* @param e8 The eighth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param e7 The seventh element to be added to the {@code SetBuilder}.
* @param e8 The eighth element to be added to the {@code SetBuilder}.
* @param e9 The ninth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param e7 The seventh element to be added to the {@code SetBuilder}.
* @param e8 The eighth element to be added to the {@code SetBuilder}.
* @param e9 The ninth element to be added to the {@code SetBuilder}.
* @param e10 The tenth element to be added to the {@code SetBuilder}.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns a {@code SetBuilder} instance over the type {@code E} containing the supplied
* elements. Due to the nature of a {@code Set}, any supplied duplicate elements will be
* removed.
*
* @param e1 The first element to be added to the {@code SetBuilder}.
* @param e2 The second element to be added to the {@code SetBuilder}.
* @param e3 The third element to be added to the {@code SetBuilder}.
* @param e4 The fourth element to be added to the {@code SetBuilder}.
* @param e5 The fifth element to be added to the {@code SetBuilder}.
* @param e6 The sixth element to be added to the {@code SetBuilder}.
* @param e7 The seventh element to be added to the {@code SetBuilder}.
* @param e8 The eighth element to be added to the {@code SetBuilder}.
* @param e9 The ninth element to be added to the {@code SetBuilder}.
* @param e10 The tenth element to be added to the {@code SetBuilder}.
* @param e11on The remaining elements to be added to the {@code SetBuilder}. The elements
* will be added to the {@code SetBuilder} in the order they are defined in the
* variadic argument.
* @param <E> The type of the elements contained in the returned {@code SetBuilder}.
* @return A {@code SetBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> SetBuilder<E> setBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E e11on) {
return setBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
public static <E> Multiset<E> multiset() {
return new MultisetBuilder<E>().build();
}
public static <E> Multiset<E> multiset(Class<? extends Multiset> multisetClass) {
return new MultisetBuilder<E>().build(multisetClass);
}
public static <E> Multiset<E> multisetOf(Class<E> elementClass) {
return new MultisetBuilder<E>().build();
}
public static <E> Multiset<E> multisetFrom(Iterable<? extends E> elements) {
return new MultisetBuilder<E>().with(elements).build();
}
public static <E> Multiset<E> multisetFrom(E[] elementArray) {
return new MultisetBuilder<E>().with(elementArray).build();
}
public static <E> MultisetBuilder<E> multisetBuilder() {
return new MultisetBuilder<E>();
}
public static <E> MultisetBuilder<E> multisetBuilderOf(Class<E> elementClass) {
return new MultisetBuilder<E>();
}
public static <E> MultisetBuilder<E> multisetBuilderFrom(Iterable<? extends E> elements) {
return new MultisetBuilder<E>().with(elements);
}
public static <E> MultisetBuilder<E> multisetBuilderFrom(E[] elementArray) {
return new MultisetBuilder<E>().with(elementArray);
}
public static <K, V> Map<K, V> map() {
return new MapBuilder<K, V>().build();
}
public static <K, V> Map<K, V> map(Class<? extends Map> mapClass) {
return new MapBuilder<K, V>().build(mapClass);
}
public static <K, V> Map<K, V> mapOf(Class<K> keyClass, Class<V> valueClass) {
return new MapBuilder<K, V>().build();
}
public static <K, V> Map<K, V> mapFromEntries(Iterable<? extends Map.Entry<K, V>> elements) {
return new MapBuilder<K, V>().with(elements).build();
}
public static <K, V> Map<K, V> mapFromEntries(Map.Entry<K, V>[] elementArray) {
return new MapBuilder<K, V>().with(elementArray).build();
}
public static <K, V> Map<K, V> mapFromPairs(Iterable<? extends Pair<K, V>> elements) {
return new MapBuilder<K, V>().withPairs(elements).build();
}
public static <K, V> Map<K, V> mapFromPairs(Pair<K, V>[] elementArray) {
return new MapBuilder<K, V>().withPairs(elementArray).build();
}
public static <K, V> Map<K, V> mapFromMaps(Iterable<? extends Map<K, V>> elements) {
return new MapBuilder<K, V>().withMaps(elements).build();
}
public static <K, V> Map<K, V> mapFromMaps(Map<K, V>[] elementArray) {
return new MapBuilder<K, V>().withMaps(elementArray).build();
}
public static <K, V> MapBuilder<K, V> mapBuilder() {
return new MapBuilder<K, V>();
}
public static <K, V> MapBuilder<K, V> mapBuilderOf(Class<K> keyClass, Class<V> valueClass) {
return new MapBuilder<K, V>();
}
public static <K, V> MapBuilder<K, V> mapBuilderFromEntries(Iterable<? extends Map.Entry<K, V>> entries) {
return new MapBuilder<K, V>().with(entries);
}
public static <K, V> MapBuilder<K, V> mapBuilderFromEntries(Map.Entry<K, V>[] entries) {
return new MapBuilder<K, V>().with(entries);
}
public static <K, V> MapBuilder<K, V> mapBuilderFromPairs(Iterable<? extends Pair<K, V>> entries) {
return new MapBuilder<K, V>().withPairs(entries);
}
public static <K, V> MapBuilder<K, V> mapBuilderFromPairs(Pair<K, V>[] entries) {
return new MapBuilder<K, V>().withPairs(entries);
}
public static <K, V> MapBuilder<K, V> mapBuilderFromMaps(Iterable<? extends Map<K, V>> entries) {
return new MapBuilder<K, V>().withMaps(entries);
}
public static <K, V> MapBuilder<K, V> mapBuilderFromMaps(Map<K, V>[] entries) {
return new MapBuilder<K, V>().withMaps(entries);
}
public static <K, V> Map.Entry<K, V> mapEntryFor(K key, V value) {
return new AbstractMap.SimpleImmutableEntry<K, V>(key, value);
}
public static <K, V> Map.Entry<K, V> mapEntryFor(Pair<K, V> pair) {
return mapEntryFor(pair.getFirst(), pair.getSecond());
}
/**
* Returns an empty array instance over the type {@code E}.
*
* @param <E> The type of the elements that would be contained by this array
* if it contained any.
* @return An array instance over the type {@code E} containing no elements.
*/
@SuppressWarnings("unchecked")
public static <E> E[] arrayOf(Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass).build();
}
@SuppressWarnings("unchecked")
public static <E> E[] arrayFrom(Iterable<E> elements) {
return new ArrayBuilder<E>().with(elements).build();
}
public static <E> E[] arrayFrom(Iterable<? extends E> elements, Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass).with(elements).build();
}
public static <E> E[] arrayFrom(E[] elementArray) {
return new ArrayBuilder<E>().with(elementArray).build();
}
public static <E> E[] arrayFrom(E[] elementArray, Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass).with(elementArray).build();
}
/**
* Returns an array over the type {@code E} containing the supplied element.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e An element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e) {
return arrayFrom(iterableWith(e));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2) {
return arrayFrom(iterableWith(e1, e2));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3) {
return arrayFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4) {
return arrayFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param e7 The seventh element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param e7 The seventh element from which to construct an array.
* @param e8 The eighth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param e7 The seventh element from which to construct an array.
* @param e8 The eighth element from which to construct an array.
* @param e9 The ninth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param e7 The seventh element from which to construct an array.
* @param e8 The eighth element from which to construct an array.
* @param e9 The ninth element from which to construct an array.
* @param e10 The tenth element from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return arrayFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an array over the type {@code E} containing the supplied elements.
* <p/>
* <p>The {@code arrayWith} literals are useful in cases such as when an API
* is written to accept arrays of objects as arguments where a varargs style
* is more appropriate.</p>
*
* @param e1 The first element from which to construct an array.
* @param e2 The second element from which to construct an array.
* @param e3 The third element from which to construct an array.
* @param e4 The fourth element from which to construct an array.
* @param e5 The fifth element from which to construct an array.
* @param e6 The sixth element from which to construct an array.
* @param e7 The seventh element from which to construct an array.
* @param e8 The eighth element from which to construct an array.
* @param e9 The ninth element from which to construct an array.
* @param e10 The tenth element from which to construct an array.
* @param e11on The remaining elements from which to construct an array.
* @param <E> The type of the element contained in the returned array.
* @return An array instance over type {@code E} containing the supplied element.
*/
public static <E> E[] arrayWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return arrayFrom(iterableBuilderWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10).and(e11on).build());
}
/**
* Returns an {@code ArrayBuilder} containing no elements. When asked to
* build an array, the element class will be inferred from the added elements
* which means empty arrays and mixed concrete type arrays cannot be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array as follows:
* <blockquote>
* <pre>
* Integer[] array = Literals.<Integer>arrayBuilder()
* .with(1, 2, 3)
* .and(4, 5, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 4, 5, 6}
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> ArrayBuilder<E> arrayBuilder() {
return new ArrayBuilder<E>();
}
/**
* Returns an {@code ArrayBuilder} over the type of the supplied {@code Class}
* containing no elements. When asked to build an array, the supplied element
* class will be used allowing empty arrays and mixed concrete type arrays to
* be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array as follows:
* <blockquote>
* <pre>
* Integer[] array = arrayBuilderOf(Integer.class)
* .with(1, 2, 3)
* .and(4, 5, 6)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 4, 5, 6}
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code ArrayBuilder} and the type represented
* by the built array.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing no elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderOf(Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass);
}
/**
* Returns an {@code ArrayBuilder} over type {@code E} initialised with the elements
* contained in the supplied {@code Iterable}. When asked to build an array, the
* element class will be inferred from the added elements which means empty arrays
* and mixed concrete type arrays cannot be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<Integer> firstCollection = Literals.collectionWith(1, 2, 3);
* Collection<Integer> secondCollection = Literals.collectionWith(3, 4, 5);
* Integer[] array = arrayBuilderFrom(firstCollection)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 3, 4, 5};
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code ArrayBuilder} should be initialised.
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> ArrayBuilder<E> arrayBuilderFrom(Iterable<E> elements) {
return new ArrayBuilder<E>().with(elements);
}
/**
* Returns an {@code ArrayBuilder} over the type of the supplied {@code Class}
* initialised with the elements contained in the supplied {@code Iterable}.
* When asked to build an array, the supplied element class will be used allowing
* empty arrays and mixed concrete type arrays to be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array from two existing
* {@code Collection} instances as follows:
* <blockquote>
* <pre>
* Collection<Integer> firstCollection = Literals.collectionWith(1, 2, 3);
* Collection<Integer> secondCollection = Literals.collectionWith(3, 4, 5);
* Integer[] array = arrayBuilderFrom(firstCollection, Integer.class)
* .with(secondCollection)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 3, 4, 5};
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param elements An {@code Iterable} containing elements with which the
* {@code ArrayBuilder} should be initialised.
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code ArrayBuilder} and the type represented
* by the built array.
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing
* the elements from the supplied {@code Iterable}.
*/
public static <E> ArrayBuilder<E> arrayBuilderFrom(Iterable<? extends E> elements, Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass).with(elements);
}
/**
* Returns an {@code ArrayBuilder} over type {@code E} initialised with the elements
* contained in the supplied array. When asked to build an array, the element class
* will be inferred from the added elements which means empty arrays and mixed
* concrete type arrays cannot be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Integer[]{3, 4, 5};
* Integer[] array = arrayBuilderFrom(firstArray)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 3, 4, 5};
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the {@code ArrayBuilder}
* should be initialised.
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> ArrayBuilder<E> arrayBuilderFrom(E[] elementArray) {
return new ArrayBuilder<E>().with(elementArray);
}
/**
* Returns an {@code ArrayBuilder} over the type of the supplied {@code Class}
* initialised with the elements contained in the supplied array. When asked to
* build an array, the supplied element class will be used allowing empty arrays
* and mixed concrete type arrays to be constructed.
* <p/>
* <h4>Example Usage:</h4>
* An {@code ArrayBuilder} can be used to assemble an array from two existing
* arrays as follows:
* <blockquote>
* <pre>
* Integer[] firstArray = new Integer[]{1, 2, 3};
* Integer[] secondArray = new Integer[]{3, 4, 5};
* Integer[] array = arrayBuilderFrom(firstArray, Integer.class)
* .with(secondArray)
* .build()
* </pre>
* </blockquote>
* This is equivalent to the following:
* <blockquote>
* <pre>
* Integer[] array = new Integer[]{1, 2, 3, 3, 4, 5};
* </pre>
* </blockquote>
* The advantage of the {@code ArrayBuilder} is that the array can be built up from
* individual objects, iterables or existing arrays. See {@link ArrayBuilder} for
* further details.
*
* @param elementArray An array containing elements with which the
* {@code ArrayBuilder} should be initialised.
* @param elementClass A {@code Class} representing the type of elements
* contained in this {@code ArrayBuilder} and the type represented
* by the built array.
* @param <E> The type of the elements contained in the {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over the type {@code E} containing
* the elements from the supplied array.
*/
public static <E> ArrayBuilder<E> arrayBuilderFrom(E[] elementArray, Class<E> elementClass) {
return new ArrayBuilder<E>(elementClass).with(elementArray);
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the
* supplied element.
*
* @param e The element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* element.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e) {
return arrayBuilderFrom(iterableWith(e));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2) {
return arrayBuilderFrom(iterableWith(e1, e2));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3) {
return arrayBuilderFrom(iterableWith(e1, e2, e3));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param e7 The seventh element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param e7 The seventh element to be added to the {@code ArrayBuilder}.
* @param e8 The eighth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param e7 The seventh element to be added to the {@code ArrayBuilder}.
* @param e8 The eighth element to be added to the {@code ArrayBuilder}.
* @param e9 The ninth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param e7 The seventh element to be added to the {@code ArrayBuilder}.
* @param e8 The eighth element to be added to the {@code ArrayBuilder}.
* @param e9 The ninth element to be added to the {@code ArrayBuilder}.
* @param e10 The tenth element to be added to the {@code ArrayBuilder}.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return arrayBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
/**
* Returns an {@code ArrayBuilder} instance over the type {@code E} containing the supplied
* elements. The supplied elements are added to the {@code ArrayBuilder} instance in the same
* order as they are defined in the argument list.
* <p/>
* <p>Note that due to type erasure, the supplied elements must all be of the same concrete
* type otherwise the array cannot be instantiated. If an array needs to be constructed
* from elements of different concrete types, use an {@code ArrayBuilder} directly, passing
* in the class of the elements to be contained in the resulting array. For example:
* <blockquote>
* <pre>
* PartTimeEmployee partTimeEmployee = new PartTimeEmployee("Designer", "John");
* FullTimeEmployee fullTimeEmployee = new FullTimeEmployee("Manufacturer", "Fred");
* HourlyEmployee hourlyEmployee = new HourlyEmployee("Materials Consultant", "Andy");
*
* Employee[] employees = arrayBuilderOf(Employee.class)
* .with(partTimeEmployee, fullTimeEmployee, hourlyEmployee)
* .build();
* </pre>
* </blockquote>
* </p>
*
* @param e1 The first element to be added to the {@code ArrayBuilder}.
* @param e2 The second element to be added to the {@code ArrayBuilder}.
* @param e3 The third element to be added to the {@code ArrayBuilder}.
* @param e4 The fourth element to be added to the {@code ArrayBuilder}.
* @param e5 The fifth element to be added to the {@code ArrayBuilder}.
* @param e6 The sixth element to be added to the {@code ArrayBuilder}.
* @param e7 The seventh element to be added to the {@code ArrayBuilder}.
* @param e8 The eighth element to be added to the {@code ArrayBuilder}.
* @param e9 The ninth element to be added to the {@code ArrayBuilder}.
* @param e10 The tenth element to be added to the {@code ArrayBuilder}.
* @param e11on The remaining elements to be added to the {@code ArrayBuilder}. The elements
* will be added to the {@code ArrayBuilder} in the order they are defined in the
* variadic argument.
* @param <E> The type of the elements contained in the returned {@code ArrayBuilder}.
* @return An {@code ArrayBuilder} instance over type {@code E} containing the supplied
* elements.
*/
public static <E> ArrayBuilder<E> arrayBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return arrayBuilderFrom(iterableBuilderWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10).and(e11on).build());
}
public static <R> Single<R> tuple(R first) {
return new Single<R>(first);
}
public static <R, S> Pair<R, S> tuple(R first, S second) {
return new Pair<R, S>(first, second);
}
public static <R, S, T> Triple<R, S, T> tuple(R first, S second, T third) {
return new Triple<R, S, T>(first, second, third);
}
public static <R, S, T, U> Quadruple<R, S, T, U> tuple(R first, S second, T third, U fourth) {
return new Quadruple<R, S, T, U>(first, second, third, fourth);
}
public static <R, S, T, U, V> Quintuple<R, S, T, U, V> tuple(R first, S second, T third, U fourth, V fifth) {
return new Quintuple<R, S, T, U, V>(first, second, third, fourth, fifth);
}
public static <R, S, T, U, V, W> Sextuple<R, S, T, U, V, W> tuple(R first, S second, T third, U fourth, V fifth, W sixth) {
return new Sextuple<R, S, T, U, V, W>(first, second, third, fourth, fifth, sixth);
}
public static <R, S, T, U, V, W, X> Septuple<R, S, T, U, V, W, X> tuple(R first, S second, T third, U fourth, V fifth, W sixth, X seventh) {
return new Septuple<R, S, T, U, V, W, X>(first, second, third, fourth, fifth, sixth, seventh);
}
public static <R, S, T, U, V, W, X, Y> Octuple<R, S, T, U, V, W, X, Y> tuple(R first, S second, T third, U fourth, V fifth, W sixth, X seventh, Y eighth) {
return new Octuple<R, S, T, U, V, W, X, Y>(first, second, third, fourth, fifth, sixth, seventh, eighth);
}
public static <R, S, T, U, V, W, X, Y, Z> Nonuple<R, S, T, U, V, W, X, Y, Z> tuple(R first, S second, T third, U fourth, V fifth, W sixth, X seventh, Y eighth, Z ninth) {
return new Nonuple<R, S, T, U, V, W, X, Y, Z>(first, second, third, fourth, fifth, sixth, seventh, eighth, ninth);
}
public static <E> Multiset<E> multisetWith(E e) {
return multisetFrom(iterableWith(e));
}
public static <E> Multiset<E> multisetWith(E e1, E e2) {
return multisetFrom(iterableWith(e1, e2));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3) {
return multisetFrom(iterableWith(e1, e2, e3));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4) {
return multisetFrom(iterableWith(e1, e2, e3, e4));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return multisetFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <E> Multiset<E> multisetWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on).build();
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e) {
return multisetBuilderFrom(iterableWith(e));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2) {
return multisetBuilderFrom(iterableWith(e1, e2));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3) {
return multisetBuilderFrom(iterableWith(e1, e2, e3));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <E> MultisetBuilder<E> multisetBuilderWith(E e1, E e2, E e3, E e4, E e5, E e6, E e7, E e8, E e9, E e10, E... e11on) {
return multisetBuilderFrom(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).with(e11on);
}
public static <K, V> Map<K, V> mapWithKeyValuePair(K k1, V v1) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8), mapEntryFor(k9, v9)).build();
}
public static <K, V> Map<K, V> mapWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9, K k10, V v10) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8), mapEntryFor(k9, v9), mapEntryFor(k10, v10)).build();
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1) {
return mapFromEntries(iterableWith(e1));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2) {
return mapFromEntries(iterableWith(e1, e2));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3) {
return mapFromEntries(iterableWith(e1, e2, e3));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4) {
return mapFromEntries(iterableWith(e1, e2, e3, e4));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9, Map.Entry<K, V> e10) {
return mapFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <K, V> Map<K, V> mapWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9, Map.Entry<K, V> e10, Map.Entry<K, V>... e11on) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).and(e11on).build();
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1) {
return mapFromPairs(iterableWith(e1));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2) {
return mapFromPairs(iterableWith(e1, e2));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3) {
return mapFromPairs(iterableWith(e1, e2, e3));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4) {
return mapFromPairs(iterableWith(e1, e2, e3, e4));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9, Pair<K, V> e10) {
return mapFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <K, V> Map<K, V> mapWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9, Pair<K, V> e10, Pair<K, V>... e11on) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).andPairs(e11on).build();
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1) {
return mapFromMaps(iterableWith(m1));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2) {
return mapFromMaps(iterableWith(m1, m2));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3) {
return mapFromMaps(iterableWith(m1, m2, m3));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4) {
return mapFromMaps(iterableWith(m1, m2, m3, m4));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5, m6));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9, Map<K, V> m10) {
return mapFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9, m10));
}
public static <K, V> Map<K, V> mapWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9, Map<K, V> m10, Map<K, V>... m11on) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9, m10)).andMaps(m11on).build();
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePair(K k1, V v1) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8), mapEntryFor(k9, v9));
}
public static <K, V> MapBuilder<K, V> mapBuilderWithKeyValuePairs(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9, K k10, V v10) {
return new MapBuilder<K, V>().with(mapEntryFor(k1, v1), mapEntryFor(k2, v2), mapEntryFor(k3, v3), mapEntryFor(k4, v4), mapEntryFor(k5, v5), mapEntryFor(k6, v6), mapEntryFor(k7, v7), mapEntryFor(k8, v8), mapEntryFor(k9, v9), mapEntryFor(k10, v10));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1) {
return mapBuilderFromEntries(iterableWith(e1));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2) {
return mapBuilderFromEntries(iterableWith(e1, e2));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9, Map.Entry<K, V> e10) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map.Entry<K, V> e1, Map.Entry<K, V> e2, Map.Entry<K, V> e3, Map.Entry<K, V> e4, Map.Entry<K, V> e5, Map.Entry<K, V> e6, Map.Entry<K, V> e7, Map.Entry<K, V> e8, Map.Entry<K, V> e9, Map.Entry<K, V> e10, Map.Entry<K, V>... e11on) {
return mapBuilderFromEntries(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).and(e11on);
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1) {
return mapBuilderFromPairs(iterableWith(e1));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2) {
return mapBuilderFromPairs(iterableWith(e1, e2));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9, Pair<K, V> e10) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Pair<K, V> e1, Pair<K, V> e2, Pair<K, V> e3, Pair<K, V> e4, Pair<K, V> e5, Pair<K, V> e6, Pair<K, V> e7, Pair<K, V> e8, Pair<K, V> e9, Pair<K, V> e10, Pair<K, V>... e11on) {
return mapBuilderFromPairs(iterableWith(e1, e2, e3, e4, e5, e6, e7, e8, e9, e10)).andPairs(e11on);
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1) {
return mapBuilderFromMaps(iterableWith(m1));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2) {
return mapBuilderFromMaps(iterableWith(m1, m2));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9, Map<K, V> m10) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9, m10));
}
public static <K, V> MapBuilder<K, V> mapBuilderWith(Map<K, V> m1, Map<K, V> m2, Map<K, V> m3, Map<K, V> m4, Map<K, V> m5, Map<K, V> m6, Map<K, V> m7, Map<K, V> m8, Map<K, V> m9, Map<K, V> m10, Map<K, V>... m11on) {
return mapBuilderFromMaps(iterableWith(m1, m2, m3, m4, m5, m6, m7, m8, m9, m10)).andMaps(m11on);
}
}
|
package core;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.binary.Hex;
/**
*
* @author dark
*/
public class Hash {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
MessageDigest md = null;
String password = "password D:";
try {
//SHA-512
md= MessageDigest.getInstance("SHA-512");
md.update(password.getBytes());
byte[] mb = md.digest();
System.out.println(Hex.encodeHex(mb));
//SHA-1
md= MessageDigest.getInstance("SHA-1");
md.update(password.getBytes());
mb = md.digest();
System.out.println(Hex.encodeHex(mb));
//MD5
md= MessageDigest.getInstance("MD5");
md.update(password.getBytes());
mb = md.digest();
System.out.println(Hex.encodeHex(mb));
} catch (NoSuchAlgorithmException e) {
//Error
}
}
}
|
package com.arnaudpiroelle.muzei.marvel.source.command;
import android.content.Context;
import android.content.Intent;
import com.arnaudpiroelle.muzei.marvel.R;
import com.arnaudpiroelle.muzei.marvel.core.inject.Injector;
import com.google.android.apps.muzei.api.Artwork;
import com.google.android.apps.muzei.api.UserCommand;
import static com.arnaudpiroelle.muzei.marvel.core.inject.Injector.getContext;
public class PublishCommand extends UserCommand implements Command {
public PublishCommand() {
super(2, getContext().getString(R.string.command_publish));
}
@Override
public void execute(Artwork artwork) {
Context context = Injector.getContext();
String detailUrl = artwork.getImageUri().toString();
String artist = context.getResources().getString(R.string.marvel_copyright).trim();
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.setType("text/plain");
shareIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
String appUrl = context.getString(R.string.app_url);
String notifText = String.format(context.getString(R.string.share_template), artwork.getTitle().trim(), artist, detailUrl, appUrl);
shareIntent.putExtra(Intent.EXTRA_TEXT, notifText);
context.startActivity(shareIntent);
}
}
|
package com.example.u_nation.passcodelocksample.activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.View;
import com.example.u_nation.passcodelocksample.AppConfig;
import com.example.u_nation.passcodelocksample.InitPassCodeActivity;
import com.example.u_nation.passcodelocksample.R;
import com.example.u_nation.passcodelocksample.util.LogUtil;
import com.example.u_nation.passcodelocksample.util.PrefUtil;
import com.example.u_nation.passcodelocksample.util.ShowToast;
public class MainActivity extends ActionBarActivity {
private static final String KEY_PASSWORD = "key_password";
public static Intent createIntent(Context context) {
Intent intent = new Intent(context, MainActivity.class);
return intent;
}
public static Intent createIntent(Context context, int password) {
Intent intent = new Intent(context, MainActivity.class);
intent.putExtra(KEY_PASSWORD, password);
return intent;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LogUtil.d("onCreate");
setContentView(R.layout.activity_main);
getSupportActionBar().setTitle("MainActivity");
}
@Override
protected void onResume() {
super.onResume();
LogUtil.d("onResume");
}
@Override
protected void onPause() {
super.onPause();
LogUtil.d("onPause");
}
@Override
protected void onStop() {
super.onStop();
LogUtil.d("onStop");
}
@Override
protected void onUserLeaveHint() {
super.onUserLeaveHint();
LogUtil.d("onUserLeaveHint");
}
@Override
protected void onDestroy() {
super.onDestroy();
LogUtil.i("onDestroy");
}
public void onSample1(View view) {
startActivity(Sample1Activity.createIntent(getApplicationContext()));
finish();
}
/*(Activity)*/
public void onSample2(View view) {
startActivity(Sample2Activity.createIntent(getApplicationContext()));
}
public void onLock(View view) {
startActivity(InitPassCodeActivity.createIntent(getApplicationContext()));
}
public void onUnlock(View view) {
PrefUtil.setBool(getApplicationContext(), AppConfig.PREF_KEY_IS_LOCKED, false);
PrefUtil.setInt(getApplicationContext(), AppConfig.PREF_KEY_PASSWORD, 0);
ShowToast.show("", this);
}
}
|
package jnacl.crypto;
public class curve25519
{
final int CRYPTO_BYTES = 32;
final int CRYPTO_SCALARBYTES = 32;
static byte[] basev = { 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
static long[] minusp = { 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128 };
public static int crypto_scalarmult_base(byte[] q, byte[] n)
{
byte[] basevp = basev;
return crypto_scalarmult(q, n, basevp);
}
static void add(long[] outv, int outvoffset, long[] a, int aoffset, long[] b, int boffset)
{
long u = 0;
for (int j = 0; j < 31; ++j)
{
u += a[aoffset + j] + b[boffset + j];
outv[outvoffset + j] = u & 255; u = (u & 0xFFFFFFFF) >> 8;
}
u += a[aoffset + 31] + b[boffset + 31];
outv[outvoffset + 31] = u;
}
static void sub(long[] outv, int outvoffset, long[] a, int aoffset, long[] b, int boffset)
{
long u = 218;
for (int j = 0; j < 31; ++j)
{
u += a[aoffset + j] + 65280 - b[boffset + j];
outv[outvoffset + j] = u & 255;
u = (u & 0xFFFFFFFF) >> 8;
}
u += a[aoffset + 31] - b[boffset + 31];
outv[outvoffset + 31] = u;
}
static void squeeze(long[] a, int aoffset)
{
long u = 0;
for (int j = 0; j < 31; ++j)
{
u += a[aoffset + j];
a[aoffset + j] = u & 255;
u = (u & 0xFFFFFFFF) >> 8;
}
u += a[aoffset + 31];
a[aoffset + 31] = u & 127;
u = 19 * (u >> 7);
for (int j = 0; j < 31; ++j)
{
u += a[aoffset + j];
a[aoffset + j] = u & 255;
u = (u & 0xFFFFFFFF) >> 8;
}
u += a[aoffset + 31];
a[aoffset + 31] = u;
}
static void freeze(long[] a, int aoffset)
{
long[] aorig = new long[32];
for (int j = 0; j < 32; ++j)
aorig[j] = a[aoffset + j];
long[] minuspp = minusp;
add(a, 0, a, 0, minuspp, 0);
long negative = (long) (-((a[aoffset + 31] >> 7) & 1));
negative &= 0xFFFFFFFF;
for (int j = 0; j < 32; ++j)
a[aoffset + j] ^= negative & (aorig[j] ^ a[aoffset + j]);
}
static void mult(long[] outv, int outvoffset, long[] a, int aoffset, long[] b, int boffset)
{
int j;
for (int i = 0; i < 32; ++i)
{
long u = 0;
for (j = 0; j <= i; ++j)
u += a[aoffset + j] * b[boffset + i - j];
for (j = i + 1; j < 32; ++j)
u += 38 * a[aoffset + j] * b[boffset + i + 32 - j];
outv[outvoffset + i] = u & 0xFFFFFFFF;
}
squeeze(outv, 0);
}
static void mult121665(long[] outv, long[] a)
{
int j;
long u = 0;
for (j = 0; j < 31; ++j)
{
u += 121665 * a[j];
outv[j] = u & 255;
u = (u & 0xFFFFFFFF) >> 8;
}
u += 121665 * a[31];
outv[31] = u & 127;
u = 19 * ((u & 0xFFFFFFFF) >> 7);
for (j = 0; j < 31; ++j)
{
u += outv[j];
outv[j] = u & 255;
u = (u & 0xFFFFFFFF) >> 8;
}
u += outv[j];
outv[j] = u & 0xFFFFFFFF;
}
static void square(long[] outv, int outvoffset, long[] a, int aoffset)
{
int j;
for (int i = 0; i < 32; ++i)
{
long u = 0;
for (j = 0; j < i - j; ++j)
u += a[aoffset + j] * a[aoffset + i - j];
for (j = i + 1; j < i + 32 - j; ++j)
u += 38 * a[aoffset + j] * a[aoffset + i + 32 - j];
u *= 2;
if ((i & 1) == 0)
{
u += a[aoffset + i / 2] * a[aoffset + i / 2];
u += 38 * a[aoffset + i / 2 + 16] * a[aoffset + i / 2 + 16];
}
outv[outvoffset + i] = u & 0xFFFFFFFF;
}
squeeze(outv, 0);
}
static void select(long[] p, long[] q, long[] r, long[] s, long b)
{
long bminus1 = b - 1;
for (int j = 0; j < 64; ++j)
{
long t = bminus1 & (r[j] ^ s[j]);
p[j] = s[j] ^ t;
q[j] = r[j] ^ t;
}
}
static void mainloop(long[] work, byte[] e)
{
long[] xzm1 = new long[64];
long[] xzm = new long[64];
long[] xzmb = new long[64];
long[] xzm1b = new long[64];
long[] xznb = new long[64];
long[] xzn1b = new long[64];
long[] a0 = new long[64];
long[] a1 = new long[64];
long[] b0 = new long[64];
long[] b1 = new long[64];
long[] c1 = new long[64];
long[] r = new long[32];
long[] s = new long[32];
long[] t = new long[32];
long[] u = new long[32];
for (int j = 0; j < 32; ++j)
xzm1[j] = work[j];
xzm1[32] = 1;
for (int j = 33; j < 64; ++j)
xzm1[j] = 0;
xzm[0] = 1;
for (int j = 1; j < 64; ++j)
xzm[j] = 0;
long[] xzmbp = xzmb, a0p = a0, xzm1bp = xzm1b;
long[] a1p = a1, b0p = b0, b1p = b1, c1p = c1;
long[] xznbp = xznb, up = u, xzn1bp = xzn1b;
long[] workp = work, sp = s, rp = r;
for (int pos = 254; pos >= 0; --pos)
{
long b = (long)(e[pos / 8] >> (pos & 7));
b &= 1;
select(xzmb, xzm1b, xzm, xzm1, b);
add(a0, 0, xzmb, 0, xzmbp, 32);
sub(a0p, 32, xzmb, 0, xzmbp, 32);
add(a1, 0, xzm1b, 0, xzm1bp, 32);
sub(a1p, 32, xzm1b, 0, xzm1bp, 32);
square(b0p, 0, a0p, 0);
square(b0p, 32, a0p, 32);
mult(b1p, 0, a1p, 0, a0p, 32);
mult(b1p, 32, a1p, 32, a0p, 0);
add(c1, 0, b1, 0, b1p, 32);
sub(c1p, 32, b1, 0, b1p, 32);
square(rp, 0, c1p, 32);
sub(sp, 0, b0, 0, b0p, 32);
mult121665(t, s);
add(u, 0, t, 0, b0p, 0);
mult(xznbp, 0, b0p, 0, b0p, 32);
mult(xznbp, 32, sp, 0, up, 0);
square(xzn1bp, 0, c1p, 0);
mult(xzn1bp, 32, rp, 0, workp, 0);
select(xzm, xzm1, xznb, xzn1b, b);
}
for (int j = 0; j < 64; ++j)
work[j] = xzm[j];
}
static void recip(long[] outv, int outvoffset, long[] z, int zoffset)
{
long[] z2 = new long[32];
long[] z9 = new long[32];
long[] z11 = new long[32];
long[] z2_5_0 = new long[32];
long[] z2_10_0 = new long[32];
long[] z2_20_0 = new long[32];
long[] z2_50_0 = new long[32];
long[] z2_100_0 = new long[32];
long[] t0 = new long[32];
long[] t1 = new long[32];
long[] z2p = z2;
square(z2p, 0, z, zoffset);
square(t1, 0, z2, 0);
square(t0, 0, t1, 0);
long[] z9p = z9, t0p = t0;
mult(z9p, 0, t0p, 0, z, zoffset);
mult(z11, 0, z9, 0, z2, 0);
square(t0, 0, z11, 0);
/* 2^5 - 2^0 = 31 */
mult(z2_5_0, 0, t0, 0, z9, 0);
/* 2^6 - 2^1 */
square(t0, 0, z2_5_0, 0);
/* 2^7 - 2^2 */
square(t1, 0, t0, 0);
/* 2^8 - 2^3 */
square(t0, 0, t1, 0);
/* 2^9 - 2^4 */
square(t1, 0, t0, 0);
/* 2^10 - 2^5 */
square(t0, 0, t1, 0);
/* 2^10 - 2^0 */
mult(z2_10_0, 0, t0, 0, z2_5_0, 0);
/* 2^11 - 2^1 */
square(t0, 0, z2_10_0, 0);
/* 2^12 - 2^2 */
square(t1, 0, t0, 0);
/* 2^20 - 2^10 */
for (int i = 2; i < 10; i += 2)
{
square(t0, 0, t1, 0);
square(t1, 0, t0, 0);
}
/* 2^20 - 2^0 */
mult(z2_20_0, 0, t1, 0, z2_10_0, 0);
/* 2^21 - 2^1 */
square(t0, 0, z2_20_0, 0);
/* 2^22 - 2^2 */
square(t1, 0, t0, 0);
/* 2^40 - 2^20 */
for (int i = 2; i < 20; i += 2)
{
square(t0, 0, t1, 0);
square(t1, 0, t0, 0);
}
/* 2^40 - 2^0 */
mult(t0, 0, t1, 0, z2_20_0, 0);
/* 2^41 - 2^1 */
square(t1, 0, t0, 0);
/* 2^42 - 2^2 */
square(t0, 0, t1, 0);
/* 2^50 - 2^10 */
for (int i = 2; i < 10; i += 2)
{
square(t1, 0, t0, 0);
square(t0, 0, t1, 0);
}
/* 2^50 - 2^0 */
mult(z2_50_0, 0, t0, 0, z2_10_0, 0);
/* 2^51 - 2^1 */
square(t0, 0, z2_50_0, 0);
/* 2^52 - 2^2 */
square(t1, 0, t0, 0);
/* 2^100 - 2^50 */
for (int i = 2; i < 50; i += 2)
{
square(t0, 0, t1, 0);
square(t1, 0, t0, 0);
}
/* 2^100 - 2^0 */
mult(z2_100_0, 0, t1, 0, z2_50_0, 0);
/* 2^101 - 2^1 */
square(t1, 0, z2_100_0, 0);
/* 2^102 - 2^2 */
square(t0, 0, t1, 0);
/* 2^200 - 2^100 */
for (int i = 2; i < 100; i += 2)
{
square(t1, 0, t0, 0);
square(t0, 0, t1, 0);
}
/* 2^200 - 2^0 */
mult(t1, 0, t0, 0, z2_100_0, 0);
/* 2^201 - 2^1 */
square(t0, 0, t1, 0);
/* 2^202 - 2^2 */
square(t1, 0, t0, 0);
/* 2^250 - 2^50 */
for (int i = 2; i < 50; i += 2)
{
square(t0, 0, t1, 0);
square(t1, 0, t0, 0);
}
/* 2^250 - 2^0 */
mult(t0, 0, t1, 0, z2_50_0, 0);
/* 2^251 - 2^1 */
square(t1, 0, t0, 0);
/* 2^252 - 2^2 */
square(t0, 0, t1, 0);
/* 2^253 - 2^3 */
square(t1, 0, t0, 0);
/* 2^254 - 2^4 */
square(t0, 0, t1, 0);
/* 2^255 - 2^5 */
square(t1, 0, t0, 0);
/* 2^255 - 21 */
long[] t1p = t1, z11p = z11;
mult(outv, outvoffset, t1p, 0, z11p, 0);
}
public static int crypto_scalarmult(byte[] q, byte[] n, byte[] p)
{
long[] work = new long[96];
byte[] e = new byte[32];
for (int i = 0; i < 32; ++i)
e[i] = n[i];
e[0] &= 248;
e[31] &= 127;
e[31] |= 64;
for (int i = 0; i < 32; ++i)
work[i] = p[i];
mainloop(work, e);
long[] workp = work;
recip(workp, 32, workp, 32);
mult(workp, 64, workp, 0, workp, 32);
freeze(workp, 64);
for (int i = 0; i < 32; ++i)
q[i] = (byte) work[64 + i];
return 0;
}
}
|
package com.josenaves.android.cloud.json;
import com.google.gson.JsonArray;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.josenaves.android.cloud.model.ForecastResponse;
import com.josenaves.android.cloud.model.ForecastWeather;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
/**
"cnt":3,
"list":[
{
"dt":1431874800,
"temp":{
"day":23.17,
"min":13.67,
"max":23.17,
"night":13.67,
"eve":19.02,
"morn":23.17
},
"pressure":986.4,
"humidity":74,
"weather":[
{
"id":801,
"main":"Clouds",
"description":"few clouds",
"icon":"02d"
}
],
"speed":1.66,
"deg":149,
"clouds":12
},
{
"dt":1431961200,
"temp":{
"day":22.95,
"min":14.61,
"max":23.48,
"night":15.37,
"eve":19.76,
"morn":14.61
},
"pressure":985.6,
"humidity":73,
"weather":[
{
"id":800,
"main":"Clear",
"description":"sky is clear",
"icon":"02d"
}
],
"speed":1.66,
"deg":175,
"clouds":8
},
{
"dt":1432047600,
"temp":{
"day":22.4,
"min":17.62,
"max":22.4,
"night":17.62,
"eve":18.54,
"morn":19.65
},
"pressure":982.86,
"humidity":0,
"weather":[
{
"id":500,
"main":"Rain",
"description":"light rain",
"icon":"10d"
}
],
"speed":2.27,
"deg":144,
"clouds":49,
"rain":2.95
}
]
} */
public class ForecastResponseDeserializer implements JsonDeserializer<ForecastResponse> {
private static final SimpleDateFormat SDF = new SimpleDateFormat("dd/MM/yyyy");
@Override
public ForecastResponse deserialize(JsonElement json,
Type type,
JsonDeserializationContext context) throws JsonParseException {
List<ForecastWeather> forecastList = new ArrayList<>();
JsonArray list = json.getAsJsonObject().get("list").getAsJsonArray();
for (JsonElement e : list) {
ForecastWeather forecast = new ForecastWeather();
forecast.setDate(SDF.format(e.getAsJsonObject().get("dt").getAsLong() * 1000l));
JsonElement temp = e.getAsJsonObject().get("temp");
forecast.setMinTemp(temp.getAsJsonObject().get("min").getAsDouble());
forecast.setMaxTemp(temp.getAsJsonObject().get("max").getAsDouble());
JsonElement weather = e.getAsJsonObject().get("weather");
JsonElement description =
weather.getAsJsonArray().get(0).getAsJsonObject().get("description");
forecast.setForecast(description.getAsString());
forecastList.add(forecast);
}
return new ForecastResponse(forecastList);
}
}
|
package es.makingtests.main
public class Dummy {
private String dummy = "Dummy!";
}
|
package io.dwak.holohackernews.app.ui.storydetail;
import android.annotation.SuppressLint;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.AppCompatEditText;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.Html;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.melnykov.fab.FloatingActionButton;
import com.sothree.slidinguppanel.SlidingUpPanelLayout;
import javax.inject.Inject;
import butterknife.ButterKnife;
import butterknife.InjectView;
import io.dwak.holohackernews.app.HackerNewsApplication;
import io.dwak.holohackernews.app.R;
import io.dwak.holohackernews.app.base.BaseViewModelFragment;
import io.dwak.holohackernews.app.dagger.component.DaggerViewModelComponent;
import io.dwak.holohackernews.app.models.Comment;
import io.dwak.holohackernews.app.models.StoryDetail;
import io.dwak.holohackernews.app.preferences.UserPreferenceManager;
import io.dwak.holohackernews.app.util.HNLog;
import io.dwak.holohackernews.app.util.ToastUtils;
import io.dwak.holohackernews.app.util.UIUtils;
import io.dwak.holohackernews.app.widget.ObservableWebView;
import rx.Subscriber;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
public class StoryDetailFragment extends BaseViewModelFragment<StoryDetailViewModel> implements ObservableWebView.OnScrollChangedCallback {
public static final String HACKER_NEWS_ITEM_BASE_URL = "https://news.ycombinator.com/item?id=";
public static final String HACKER_NEWS_BASE_URL = "https://news.ycombinator.com/";
public static final String LINK_DRAWER_OPEN = "LINK_DRAWER_OPEN";
public static final String TOP_VISIBLE_COMMENT = "TOP_VISIBLE_COMMENT";
public static final String LOADING_FROM_SAVED = "LOADING_FROM_SAVED";
private static final String STORY_ID = "story_id";
private static final String TAG = StoryDetailFragment.class.getSimpleName();
@InjectView(R.id.button_bar) RelativeLayout mButtonBar;
@InjectView(R.id.action_1) Button mButtonBarAction1;
@InjectView(R.id.action_main) Button mButtonBarMainAction;
@InjectView(R.id.action_2) Button mButtonBarAction2;
@InjectView(R.id.swipe_container) SwipeRefreshLayout mSwipeRefreshLayout;
@InjectView(R.id.comments_recycler) RecyclerView mCommentsRecyclerView;
@InjectView(R.id.story_web_view) ObservableWebView mWebView;
@InjectView(R.id.link_layout) RelativeLayout mLinkLayout;
@InjectView(R.id.fabbutton) FloatingActionButton mFloatingActionButton;
@InjectView(R.id.saved_banner) TextView mSavedBanner;
@InjectView(R.id.link_panel) SlidingUpPanelLayout mSlidingUpPanelLayout;
@InjectView(R.id.web_progress_bar) ProgressBar mWebProgressBar;
@Inject StoryDetailViewModel mViewModel;
private Bundle mWebViewBundle;
private SlidingUpPanelLayout.PanelState mOldPanelState;
private StoryDetailRecyclerAdapter mAdapter;
private int mCurrentFirstCompletelyVisibleItemIndex = 0;
private LinearLayoutManager mLayoutManager;
public static StoryDetailFragment newInstance(long id, boolean saved) {
StoryDetailFragment fragment = StoryDetailFragment.newInstance(id);
Bundle args = fragment.getArguments();
args.putBoolean(LOADING_FROM_SAVED, saved);
fragment.setArguments(args);
return fragment;
}
public static StoryDetailFragment newInstance(long param1) {
StoryDetailFragment fragment = new StoryDetailFragment();
Bundle args = new Bundle();
args.putLong(STORY_ID, param1);
fragment.setArguments(args);
return fragment;
}
private void refresh() {
showProgress(true);
mSubscription = getViewModel().getStoryDetailObservable()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Subscriber<StoryDetail>() {
@Override
public void onCompleted() {
showProgress(false);
mSwipeRefreshLayout.setRefreshing(false);
}
@Override
public void onError(Throwable e) {
Toast.makeText(getActivity(), R.string.story_details_error_toast_message, Toast.LENGTH_SHORT).show();
if (HackerNewsApplication.isDebug()) {
e.printStackTrace();
}
}
@Override
public void onNext(StoryDetail storyDetail) {
updateHeader(storyDetail);
updateSlidingPanel(getViewModel().startDrawerExpanded());
updateRecyclerView(storyDetail);
openLink(storyDetail);
}
});
}
private void updateRecyclerView(StoryDetail storyDetail) {
mAdapter.clear();
for (Comment comment : storyDetail.getCommentList()) {
mAdapter.addComment(comment);
}
}
private void updateHeader(StoryDetail storyDetail) {
mAdapter.updateHeader(storyDetail);
}
private void openLink(StoryDetail storyDetail) {
if (UserPreferenceManager.getInstance().showLinkFirst() && UserPreferenceManager.getInstance().isExternalBrowserEnabled()) {
openLinkInExternalBrowser();
}
else {
if (mWebViewBundle == null && !UserPreferenceManager.getInstance().isExternalBrowserEnabled()) {
mWebView.loadUrl(storyDetail.getUrl());
}
else {
mWebView.restoreState(mWebViewBundle);
}
}
}
private void openLinkInExternalBrowser() {
Intent browserIntent = new Intent();
browserIntent.setAction(Intent.ACTION_VIEW);
browserIntent.setData(Uri.parse(getViewModel().getStoryDetail().getUrl()));
startActivity(browserIntent);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
DaggerViewModelComponent.builder()
.appComponent(HackerNewsApplication.getAppComponent())
.build()
.inject(this);
if (getArguments() != null) {
if (getArguments().containsKey(STORY_ID)) {
long storyId = getArguments().getLong(STORY_ID);
getViewModel().setStoryId(storyId);
}
if (getArguments().containsKey(LOADING_FROM_SAVED)) {
getViewModel().setLoadFromSaved(getArguments().getBoolean(LOADING_FROM_SAVED));
}
}
setHasOptionsMenu(true);
}
@SuppressLint("SetJavaScriptEnabled")
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_story_comments, container, false);
if (savedInstanceState != null) {
mOldPanelState = (SlidingUpPanelLayout.PanelState) savedInstanceState.getSerializable(LINK_DRAWER_OPEN);
}
ButterKnife.inject(this, rootView);
mContainer = rootView.findViewById(R.id.container);
mProgressBar = (ProgressBar) rootView.findViewById(R.id.progress_bar);
mFloatingActionButton.setOnClickListener(view -> readability());
mSavedBanner.setVisibility(getViewModel().isSaved() ? View.VISIBLE : View.GONE);
setupWebViewDrawer();
ActionBar actionBar = ((AppCompatActivity) getActivity()).getSupportActionBar();
if (actionBar != null) {
actionBar.show();
actionBar.setTitle(getString(R.string.app_name));
}
mAdapter = new StoryDetailRecyclerAdapter(getActivity(), new StoryDetailRecyclerAdapter.StoryDetailRecyclerListener() {
@Override
public void onCommentClicked(int position) {
if (mAdapter.areChildrenHidden(position)) {
mAdapter.showChildComments(position);
}
else {
mAdapter.hideChildComments(position);
}
}
@Override
public void onCommentActionClicked(Comment comment) {
final CharSequence[] commentActions = getResources().getStringArray(getViewModel().getCommentActions());
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setItems(commentActions, (dialogInterface, j) -> {
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
switch (j) {
case 0:
sendIntent.putExtra(Intent.EXTRA_TEXT,
String.format("https://news.ycombinator.com/item?id=%d", comment.getCommentId()));
break;
case 1:
sendIntent.putExtra(Intent.EXTRA_TEXT,
String.format("%s: %s", comment.getUser(), Html.fromHtml(comment.getContent())));
break;
case 2:
AlertDialog.Builder replyDialog = new AlertDialog.Builder(getActivity())
.setTitle(getString(R.string.action_reply));
AppCompatEditText editText = new AppCompatEditText(getActivity());
replyDialog.setView(editText)
.setPositiveButton(getString(R.string.action_submit), (dialog, which) -> {
ProgressDialog progressDialog = new ProgressDialog(getActivity());
progressDialog.setMessage(getString(R.string.submitting_progress));
progressDialog.setCancelable(false);
progressDialog.show();
getViewModel().reply(comment, editText.getText().toString())
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Subscriber<Object>() {
@Override
public void onCompleted() {
progressDialog.dismiss();
}
@Override
public void onError(Throwable e) {
progressDialog.dismiss();
}
@Override
public void onNext(Object o) {
}
});
dialog.dismiss();
})
.setNegativeButton(android.R.string.cancel, null)
.show();
dialogInterface.dismiss();
return;
case 3:
getViewModel().upvote(comment)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Subscriber<Object>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Object o) {
}
});
return;
}
sendIntent.setType("text/plain");
getActivity().startActivity(sendIntent);
});
builder.create().show();
}
});
mLayoutManager = new LinearLayoutManager(getActivity(), LinearLayoutManager.VERTICAL, false);
mCommentsRecyclerView.setLayoutManager(mLayoutManager);
mCommentsRecyclerView.setAdapter(mAdapter);
mCommentsRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
mCurrentFirstCompletelyVisibleItemIndex = mLayoutManager.findFirstVisibleItemPosition();
}
});
mSwipeRefreshLayout.setColorSchemeResources(android.R.color.holo_orange_dark,
android.R.color.holo_orange_light,
android.R.color.holo_orange_dark,
android.R.color.holo_orange_light);
mSwipeRefreshLayout.setOnRefreshListener(() -> {
mSwipeRefreshLayout.setRefreshing(true);
refresh();
});
refresh();
return rootView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
mWebViewBundle = savedInstanceState;
mWebView.restoreState(mWebViewBundle);
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
mWebView.saveState(outState);
outState.putSerializable(LINK_DRAWER_OPEN, mSlidingUpPanelLayout.getPanelState());
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
@Override
public void onDestroyView() {
super.onDestroyView();
ButterKnife.reset(this);
}
@Override
public void onDestroy() {
if (mSubscription != null) mSubscription.unsubscribe();
if(mWebView != null) {
mWebView.destroy();
}
super.onDestroy();
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.menu_story_detail, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_share:
final CharSequence[] shareItems = {getString(R.string.action_share_link), getString(R.string.action_share_comments)};
new AlertDialog.Builder(getActivity())
.setItems(shareItems, (dialogInterface, i) -> {
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
switch (i) {
case 0:
if(getViewModel().getStoryDetail().getUrl() != null) {
sendIntent.setData(Uri.parse(getViewModel().getStoryDetail().getUrl()));
}
else {
ToastUtils.showToast(getActivity(), R.string.open_in_browser_failure_toast);
}
break;
case 1:
sendIntent.putExtra(Intent.EXTRA_TEXT, HACKER_NEWS_ITEM_BASE_URL + getViewModel().getStoryId());
break;
}
sendIntent.setType("text/plain");
startActivity(sendIntent);
})
.create()
.show();
break;
case R.id.action_open_browser:
final CharSequence[] openInBrowserItems = {getString(R.string.action_open_in_browser_link),
getString(R.string.action_open_in_browser_comments)};
new AlertDialog.Builder(getActivity())
.setItems(openInBrowserItems, (dialogInterface, i) -> {
Intent browserIntent = new Intent();
browserIntent.setAction(Intent.ACTION_VIEW);
switch (i){
case 0:
if(getViewModel().getStoryDetail().getUrl() != null) {
browserIntent.setData(Uri.parse(getViewModel().getStoryDetail().getUrl()));
}
break;
case 1:
browserIntent.setData(Uri.parse(HACKER_NEWS_ITEM_BASE_URL + getViewModel().getStoryId()));
break;
}
if(browserIntent.getData() != null) {
startActivity(browserIntent);
}
else {
ToastUtils.showToast(getActivity(), R.string.open_in_browser_error);
}
})
.create()
.show();
break;
}
return super.onOptionsItemSelected(item);
}
@Override
protected StoryDetailViewModel getViewModel() {
return mViewModel;
}
@Override
public void onPause() {
super.onPause();
mWebViewBundle = new Bundle();
mWebView.saveState(mWebViewBundle);
}
private void updateSlidingPanel(boolean expanded) {
if (getViewModel().useExternalBrowser()) {
mSlidingUpPanelLayout.setTouchEnabled(false);
}
mButtonBarMainAction.setOnClickListener(v -> {
if (getViewModel().useExternalBrowser()) {
openLinkInExternalBrowser();
}
else {
mSlidingUpPanelLayout.setPanelState(mSlidingUpPanelLayout.getPanelState()
.equals(SlidingUpPanelLayout.PanelState.COLLAPSED) ? SlidingUpPanelLayout.PanelState.EXPANDED
: SlidingUpPanelLayout.PanelState.COLLAPSED);
}
});
mSlidingUpPanelLayout.post(() -> {
if (expanded) {
mButtonBarMainAction.setText(getString(R.string.show_comments));
mButtonBarAction1.setBackgroundDrawable(getResources().getDrawable(R.drawable.ic_arrow_back));
mButtonBarAction1.setOnClickListener(view -> {
if (mWebView.canGoBack()) {
mWebView.goBack();
}
});
mButtonBarAction2.setBackgroundDrawable(getResources().getDrawable(R.drawable.ic_arrow_forward));
mButtonBarAction2.setOnClickListener(view -> {
if (mWebView.canGoForward()) {
mWebView.goForward();
}
});
}
else {
mButtonBarMainAction.setText(getString(R.string.show_link));
mButtonBarAction1.setBackgroundDrawable(getResources().getDrawable(R.drawable.ic_keyboard_arrow_up));
mButtonBarAction1.setOnClickListener(view -> {
for (int i = mCurrentFirstCompletelyVisibleItemIndex - 1; i >= 0; i
final Object item = mAdapter.getItem(i);
if (item instanceof Comment && ((Comment) item).getLevel() == 0) {
HNLog.d(TAG, String.valueOf(i));
mCurrentFirstCompletelyVisibleItemIndex = i;
mLayoutManager.scrollToPositionWithOffset(i, 0);
if(HackerNewsApplication.isDebug()) UIUtils.showToast(getActivity(), String.valueOf(i));
return;
}
}
});
mButtonBarAction2.setBackgroundDrawable(getResources().getDrawable(R.drawable.ic_keyboard_arrow_down));
mButtonBarAction2.setOnClickListener(view -> {
for (int i = mCurrentFirstCompletelyVisibleItemIndex + 1; i < mAdapter.getItemCount(); i++) {
final Object item = mAdapter.getItem(i);
if (item instanceof Comment && ((Comment) item).getLevel() == 0) {
HNLog.d(TAG, String.valueOf(i));
mCurrentFirstCompletelyVisibleItemIndex = i;
mLayoutManager.scrollToPositionWithOffset(i, 0);
if(HackerNewsApplication.isDebug()) UIUtils.showToast(getActivity(), String.valueOf(i));
return;
}
}
});
mButtonBarAction1.setVisibility(View.VISIBLE);
mButtonBarAction2.setVisibility(View.VISIBLE);
}
});
}
@SuppressLint("SetJavaScriptEnabled")
private void setupWebViewDrawer() {
mSlidingUpPanelLayout.setDragView(mButtonBar);
mSlidingUpPanelLayout.setPanelSlideListener(new SlidingUpPanelLayout.PanelSlideListener() {
@Override
public void onPanelSlide(View view, float v) {
}
@Override
public void onPanelCollapsed(View panelView) {
updateSlidingPanel(false);
}
@Override
public void onPanelExpanded(View panelView) {
updateSlidingPanel(true);
}
@Override
public void onPanelAnchored(View view) {
}
@Override
public void onPanelHidden(View view) {
}
});
if (!UserPreferenceManager.getInstance().isExternalBrowserEnabled()) {
if (mOldPanelState == SlidingUpPanelLayout.PanelState.EXPANDED || (UserPreferenceManager.getInstance().showLinkFirst())) {
mButtonBarMainAction.setText(getResources().getString(R.string.show_comments));
mSlidingUpPanelLayout.postDelayed(() -> mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.EXPANDED), getResources().getInteger(R.integer.fragment_animation_times));
}
else {
mButtonBarMainAction.setText(getResources().getString(R.string.show_link));
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
}
}
mWebProgressBar.setVisibility(View.VISIBLE);
mWebProgressBar.setMax(100);
WebSettings webSettings = mWebView.getSettings();
webSettings.setLoadWithOverviewMode(true);
webSettings.setUseWideViewPort(true);
webSettings.setSupportZoom(true);
webSettings.setBuiltInZoomControls(true);
webSettings.setDisplayZoomControls(false);
webSettings.setJavaScriptEnabled(true);
mWebView.setWebViewClient(new WebViewClient() {
@Override
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
if (mWebProgressBar != null) {
mWebProgressBar.setVisibility(View.GONE);
}
}
});
mWebView.setWebChromeClient(new WebChromeClient() {
@Override
public void onProgressChanged(WebView view, int newProgress) {
super.onProgressChanged(view, newProgress);
if (mWebProgressBar != null) {
if (mWebProgressBar.getVisibility() == View.GONE) {
mWebProgressBar.setVisibility(View.VISIBLE);
}
mWebProgressBar.setProgress(newProgress);
}
}
});
mWebView.setOnScrollChangedCallback(this);
}
public boolean isLinkViewVisible() {
return mSlidingUpPanelLayout.getPanelState() == SlidingUpPanelLayout.PanelState.EXPANDED;
}
public void hideLinkView() {
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
}
private void readability() {
getViewModel().setIsViewingReadability(!getViewModel().isViewingReadability());
if (getViewModel().isViewingReadability()) {
if (getViewModel().getReadabilityUrl() != null) {
mWebView.loadUrl(getViewModel().getReadabilityUrl());
}
}
else {
mWebView.loadUrl(getViewModel().getStoryDetail().getUrl());
}
}
@Override
public void onScroll(int l, int t, int oldL, int oldT) {
if(mFloatingActionButton != null) {
if (t >= oldT) {
mFloatingActionButton.hide();
}
else {
mFloatingActionButton.show();
}
}
}
}
|
package io.github.froger.instamaterial.ui.activity;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import io.github.froger.instamaterial.R;
public class FreshStartFragment extends Fragment {
Button btn;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_start, container, false);
ImageView img = (ImageView)view.findViewById(R.id.imageView);
img.setImageBitmap(
decodeSampledBitmapFromResource(getResources(), R.drawable.MainBack, 400, 400));
btn = (Button)view.findViewById(R.id.button4);
btn.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
//Adding 1 as the default value for new instance of ReplacementFragment
//because I'm not sure what it's purpose is from your code's static constructor.
Fragment frag = new SelectTags();
FragmentManager fm = getActivity().getSupportFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
ft.replace(R.id.LoginContent, frag);
ft.commit();
}
}
);
return view;
}
public static Bitmap decodeSampledBitmapFromResource(Resources res, int resId,
int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeResource(res, resId, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeResource(res, resId, options);
}
public static int calculateInSampleSize(
BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and width
final int heightRatio = Math.round((float) height / (float) reqHeight);
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will guarantee
// a final image with both dimensions larger than or equal to the
// requested height and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
}
return inSampleSize;
}
}
|
package it.polimi.dima.giftlist.presentation.module;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import org.greenrobot.eventbus.EventBus;
import java.util.List;
import javax.inject.Named;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
import it.polimi.dima.giftlist.BuildConfig;
import it.polimi.dima.giftlist.UIThread;
import it.polimi.dima.giftlist.data.DummyInterface;
import it.polimi.dima.giftlist.data.DummyList;
import it.polimi.dima.giftlist.data.net.currency.CurrencyApi;
import it.polimi.dima.giftlist.data.repository.datasource.CurrencyDataSource;
import it.polimi.dima.giftlist.data.executor.JobExecutor;
import it.polimi.dima.giftlist.data.model.EtsyProduct;
import it.polimi.dima.giftlist.data.net.etsy.EtsyApi;
import it.polimi.dima.giftlist.data.net.etsy.EtsyResultsDeserializer;
import it.polimi.dima.giftlist.data.net.etsy.EtsySigningInterceptor;
import it.polimi.dima.giftlist.domain.repository.ProductRepository;
import it.polimi.dima.giftlist.data.repository.datasource.EtsyProductDataSource;
import it.polimi.dima.giftlist.domain.executor.PostExecutionThread;
import it.polimi.dima.giftlist.domain.executor.ThreadExecutor;
import it.polimi.dima.giftlist.presentation.navigation.IntentStarter;
import it.polimi.dima.giftlist.util.ErrorMessageDeterminer;
import it.polimi.dima.giftlist.util.HttpLoggingInterceptor;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory;
import retrofit2.converter.gson.GsonConverterFactory;
import retrofit2.converter.simplexml.SimpleXmlConverterFactory;
@Module
public class ApplicationModule {
private Application application;
public ApplicationModule(Application application) {
this.application = application;
}
@Provides
@Singleton
Context provideApplicationContext() {
return this.application;
}
@Provides
@Singleton
SharedPreferences providesSharedPreferences() {
return PreferenceManager.getDefaultSharedPreferences(application);
}
@Provides
@Singleton
EventBus providesEventBus() {
return EventBus.getDefault();
}
@Provides
@Singleton
ThreadExecutor provideThreadExecutor(JobExecutor jobExecutor) {
return jobExecutor;
}
@Provides
@Singleton
PostExecutionThread providePostExecutionThread(UIThread uiThread) {
return uiThread;
}
@Provides
@Singleton
public ProductRepository providesRepository(EtsyApi etsyApi, CurrencyDataSource currencyDataSource, EventBus eventBus) {
return new EtsyProductDataSource(etsyApi, currencyDataSource, eventBus);
}
@Provides
@Singleton
public DummyInterface providesDummyInterface() {
return new DummyList();
}
@Provides
@Singleton
public ErrorMessageDeterminer providesErrorMessageDeterminer(){
return new ErrorMessageDeterminer();
}
@Provides
@Singleton
IntentStarter providesIntentStarter() {
return new IntentStarter();
}
@Provides
@Singleton
HttpLoggingInterceptor providesHttpLoggingInterceptor() {
HttpLoggingInterceptor httpLoggingInterceptor = new HttpLoggingInterceptor();
httpLoggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
return httpLoggingInterceptor;
}
@Provides
@Singleton
EtsySigningInterceptor providesEtsySigningInterceptor() {
return new EtsySigningInterceptor(BuildConfig.ETSY_API_KEY);
}
@Provides
@Singleton
@Named("EtsyOkHttp")
OkHttpClient providesEtsyOkHttpClient(EtsySigningInterceptor etsySigningInterceptor, HttpLoggingInterceptor httpLoggingInterceptor) {
return new OkHttpClient.Builder()
.addInterceptor(etsySigningInterceptor)
.addInterceptor(httpLoggingInterceptor)
.build();
}
@Provides
@Singleton
@Named("CurrencyOkHttp")
OkHttpClient providesCurrencyOkHttpClient(HttpLoggingInterceptor httpLoggingInterceptor) {
return new OkHttpClient.Builder()
.addInterceptor(httpLoggingInterceptor)
.build();
}
@Provides
@Singleton
EtsyResultsDeserializer providesEtsyResultsDeserializer() {
return new EtsyResultsDeserializer();
}
@Provides
@Singleton
@Named("EtsyGson")
Gson providesEtsyGsonInstance(EtsyResultsDeserializer etsyResultsDeserializer) {
return new GsonBuilder()
.registerTypeAdapter(new TypeToken<List<EtsyProduct>>() {}.getType(), etsyResultsDeserializer)
.create();
}
@Provides
@Singleton
@Named("EtsyRetrofit")
Retrofit providesEtsyApiAdapter(@Named("EtsyGson") Gson EtsyGsonInstance, @Named("EtsyOkHttp") OkHttpClient etsyOkHttpClient) {
return new Retrofit.Builder()
.baseUrl(EtsyApi.END_POINT)
.addConverterFactory(GsonConverterFactory.create(EtsyGsonInstance))
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.client(etsyOkHttpClient)
.build();
}
@Provides
@Singleton
EtsyApi providesEtsyApi(@Named("EtsyRetrofit") Retrofit etsyApiAdapter) {
return etsyApiAdapter.create(EtsyApi.class);
}
@Provides
@Singleton
@Named("CurrencyRetrofit")
Retrofit providesCurrencyApiAdapter(@Named("CurrencyOkHttp") OkHttpClient currencyOkHttpClient) {
return new Retrofit.Builder()
.baseUrl(CurrencyApi.BASE_URL)
.addConverterFactory(SimpleXmlConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.client(currencyOkHttpClient)
.build();
}
@Provides
@Singleton
CurrencyApi providesCurrencyApi(@Named("CurrencyRetrofit") Retrofit currencyApiAdapter) {
return currencyApiAdapter.create(CurrencyApi.class);
}
@Provides
@Singleton
CurrencyDataSource providesCurrencyDataSource(CurrencyApi currencyApi, EventBus eventBus) {
return new CurrencyDataSource(currencyApi, eventBus);
}
}
|
package com.exedio.cope.instrument;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileList;
import org.apache.tools.ant.types.FileSet;
public final class AntTask extends Task
{
private final ArrayList fileSets = new ArrayList();
private final ArrayList fileLists = new ArrayList();
private boolean verbose = true;
public void addFileset(final FileSet fileSet)
{
fileSets.add(fileSet);
}
public void addFilelist(final FileList fileList)
{
fileLists.add(fileList);
}
public void setVerbose(final boolean verbose)
{
this.verbose = verbose;
}
public void execute() throws BuildException
{
try
{
final ArrayList sourcefiles = new ArrayList();
for(final Iterator i = fileSets.iterator(); i.hasNext(); )
{
final FileSet fileSet = (FileSet)i.next();
final DirectoryScanner directoryScanner = fileSet.getDirectoryScanner(getProject());
final File dir = fileSet.getDir(getProject());
final String[] fileNames = directoryScanner.getIncludedFiles();
for(int j = 0; j<fileNames.length; j++)
sourcefiles.add(new File(dir, fileNames[j]));
}
for(final Iterator i = fileLists.iterator(); i.hasNext(); )
{
final FileList fileList = (FileList)i.next();
final File dir = fileList.getDir(getProject());
final String[] fileNames = fileList.getFiles(getProject());
for(int j = 0; j<fileNames.length; j++)
sourcefiles.add(new File(dir, fileNames[j]));
}
(new Main()).run(sourcefiles, verbose);
}
catch(Exception e)
{
e.printStackTrace();
throw new BuildException(e);
}
}
}
|
package org.intermine.bio.dataconversion;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.intermine.dataconversion.ItemWriter;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.MetaDataException;
import org.intermine.metadata.Model;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.sql.Database;
import org.intermine.sql.DatabaseUtil;
import org.intermine.util.StringUtil;
import org.intermine.util.TypeUtil;
import org.intermine.util.XmlUtil;
import org.intermine.xml.full.Attribute;
import org.intermine.xml.full.Item;
import org.intermine.xml.full.Reference;
import org.intermine.xml.full.ReferenceList;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.commons.collections.keyvalue.MultiKey;
import org.apache.commons.collections.map.MultiKeyMap;
import org.apache.log4j.Logger;
/**
* DataConverter to read from a Chado database into items
* @author Kim Rutherford
*/
public class ChadoDBConverter extends BioDBConverter
{
private static class FeatureData
{
String uniqueName;
// the synonyms that have already been created
Set<String> existingSynonyms = new HashSet<String>();
String itemIdentifier;
String interMineType;
Integer intermineObjectId;
short flags = 0;
static final short EVIDENCE_CREATED_BIT = 0;
static final short EVIDENCE_CREATED = 1 << EVIDENCE_CREATED_BIT;
}
protected static final Logger LOG = Logger.getLogger(ChadoDBConverter.class);
private Map<Integer, FeatureData> features = new HashMap<Integer, FeatureData>();
private String dataSourceName;
private String dataSetTitle;
private int taxonId = -1;
private String genus;
private String species;
private String sequenceFeatureTypesString = "'chromosome', 'chromosome_arm'";
private String featureTypesString =
"'gene', 'mRNA', 'transcript', 'CDS', 'intron', 'exon', "
+ "'regulatory_region', 'enhancer', "
// ignore for now: + "'EST', 'cDNA_clone', "
+ "'miRNA', 'snRNA', 'ncRNA', 'rRNA', 'ncRNA', 'snoRNA', 'tRNA', "
+ "'chromosome_band', 'transposable_element_insertion_site', "
+ "'chromosome_structure_variation', 'protein', "
+ "'five_prime_untranslated_region', "
+ "'five_prime_UTR', 'three_prime_untranslated_region', 'three_prime_UTR', 'transcript', "
+ sequenceFeatureTypesString;
private int chadoOrganismId;
private Model model = Model.getInstanceByName("genomic");
private MultiKeyMap config = null;
private static final List<String> PARTOF_RELATIONS = Arrays.asList("partof", "part_of");
private static final List<Item> EMPTY_ITEM_LIST = Collections.emptyList();
/**
* A class that represents an action while processing synonyms, dbxrefs, etc.
* @author Kim Rutherford
*/
protected static class ConfigAction
{
protected ConfigAction() {
// empty
}
}
/**
* An action that sets an attribute in a new Item.
*/
protected static class SetFieldConfigAction extends ConfigAction
{
private String fieldName;
SetFieldConfigAction() {
fieldName = null;
}
SetFieldConfigAction(String fieldName) {
this.fieldName = fieldName;
}
}
/**
* An action that sets a Synonym.
*/
protected static class CreateSynonymAction extends ConfigAction
{
private String synonymType;
// make a synonym and use the type from chado ("symbol", "identifier" etc.) as the Synonym
// type
CreateSynonymAction() {
synonymType = null;
}
// make a synonym and use given type as the Synonym type
CreateSynonymAction(String synonymType) {
this.synonymType = synonymType;
}
}
private static class DoNothingAction extends ConfigAction
{
// do nothing for this data
}
/**
* An action that make a synonym.
*/
protected static final ConfigAction CREATE_SYNONYM_ACTION = new CreateSynonymAction();
protected static final ConfigAction DO_NOTHING_ACTION = new DoNothingAction();
/**
* Create a new ChadoDBConverter object.
* @param database the database to read from
* @param tgtModel the Model used by the object store we will write to with the ItemWriter
* @param writer an ItemWriter used to handle the resultant Items
*/
public ChadoDBConverter(Database database, Model tgtModel, ItemWriter writer) {
super(database, tgtModel, writer);
}
@SuppressWarnings("unchecked")
protected Map<MultiKey, List<ConfigAction>> getConfig() {
if (config == null) {
config = new MultiKeyMap();
}
return config;
}
/**
* Set the name of the DataSet Item to create for this converter.
* @param title the title
*/
public void setDataSetTitle(String title) {
this.dataSetTitle = title;
}
/**
* Set the name of the DataSource Item to create for this converter.
* @param name the name
*/
public void setDataSourceName(String name) {
this.dataSourceName = name;
}
/**
* Set the taxonId to use when creating the Organism Item for the new features.
* @param taxonId the taxon id
*/
public void setTaxonId(String taxonId) {
this.taxonId = Integer.valueOf(taxonId).intValue();
}
/**
* Get the taxonId to use when creating the Organism Item for the
* @return the taxon id
*/
public int getTaxonIdInt() {
return taxonId;
}
/**
* The genus to use when querying for features.
* @param genus the genus
*/
public void setGenus(String genus) {
this.genus = genus;
}
/**
* The species to use when querying for features.
* @param species the species
*/
public void setSpecies(String species) {
this.species = species;
}
/**
* Process the data from the Database and write to the ItemWriter.
* {@inheritDoc}
*/
@Override
public void process() throws Exception {
Connection connection;
if (getDatabase() == null) {
// no Database when testing and no connectio needed
connection = null;
} else {
connection = getDatabase().getConnection();
}
if (dataSetTitle == null) {
throw new IllegalArgumentException("dataSetTitle not set in ChadoDBConverter");
}
if (dataSourceName == null) {
throw new IllegalArgumentException("dataSourceName not set in ChadoDBConverter");
}
if (getTaxonIdInt() == -1) {
throw new IllegalArgumentException("taxonId not set in ChadoDBConverter");
}
if (species == null) {
throw new IllegalArgumentException("species not set in ChadoDBConverter");
}
if (genus == null) {
throw new IllegalArgumentException("genus not set in ChadoDBConverter");
}
chadoOrganismId = getChadoOrganismId(connection);
processFeatureTable(connection);
processPubTable(connection);
processLocationTable(connection);
processRelationTable(connection);
processDbxrefTable(connection);
processSynonymTable(connection);
processFeaturePropTable(connection);
addMissingDataEvidence();
}
private void processFeatureTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSet = getDataSetItem(dataSetTitle); // Stores DataSet
Item dataSource = getDataSourceItem(dataSourceName); // Stores DataSource
Item organismItem = getOrganismItem(getTaxonIdInt()); // Stores Organism
ResultSet res = getFeatureResultSet(connection);
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String name = res.getString("name");
String uniqueName = res.getString("uniquename");
String type = res.getString("type");
int seqlen = 0;
if (res.getObject("seqlen") != null) {
seqlen = res.getInt("seqlen");
}
List<String> primaryIds = new ArrayList<String>();
primaryIds.add(uniqueName);
String interMineType = TypeUtil.javaiseClassName(fixFeatureType(type));
uniqueName = fixIdentifier(interMineType, uniqueName);
Item feature = makeFeature(featureId, type, interMineType, name, uniqueName, seqlen);
if (feature != null) {
FeatureData fdat = new FeatureData();
fdat.itemIdentifier = feature.getIdentifier();
fdat.uniqueName = uniqueName;
fdat.interMineType = XmlUtil.getFragmentFromURI(feature.getClassName());
feature.setReference("organism", organismItem);
MultiKey nameKey =
new MultiKey("feature", fdat.interMineType, dataSourceName, "name");
List<ConfigAction> nameActionList = getConfig().get(nameKey);
MultiKey uniqueNameKey =
new MultiKey("feature", fdat.interMineType, dataSourceName, "uniquename");
List<ConfigAction> uniqueNameActionList = getConfig().get(uniqueNameKey);
if (name != null) {
if (nameActionList == null || nameActionList.size() == 0) {
if (feature.checkAttribute("symbol")) {
feature.setAttribute("symbol", name);
} else {
if (feature.checkAttribute("symbol")) {
feature.setAttribute("symbol", name);
} else {
// do nothing, if the name needs to go in a different attribute
// it will need to be configured
}
}
} else {
for (ConfigAction action: nameActionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction attrAction =
(SetFieldConfigAction) action;
feature.setAttribute(attrAction.fieldName, name);
}
}
}
}
if (uniqueNameActionList == null || uniqueNameActionList.size() == 0) {
feature.setAttribute("identifier", uniqueName);
} else {
for (ConfigAction action: uniqueNameActionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction attrAction = (SetFieldConfigAction) action;
feature.setAttribute(attrAction.fieldName, uniqueName);
}
}
}
// don't set the evidence collection - that's done by processPubTable()
fdat.intermineObjectId = store(feature); // Stores Feature
// always create a synonym for the uniquename
createSynonym(fdat, "identifier", uniqueName, true, dataSet, EMPTY_ITEM_LIST,
dataSource); // Stores Synonym
if (name != null) {
if (nameActionList == null || nameActionList.size() == 0
|| nameActionList.contains(CREATE_SYNONYM_ACTION)) {
name = fixIdentifier(interMineType, name);
if (!fdat.existingSynonyms.contains(name)) {
createSynonym(fdat, "name", name, false, dataSet, EMPTY_ITEM_LIST,
dataSource); // Stores Synonym
}
}
}
features.put(featureId, fdat);
count++;
}
}
LOG.info("created " + count + " features");
res.close();
}
/**
* Make and store a new feature
* @param featureId the chado feature id
* @param chadoFeatureType the chado feature type (a SO term)
* @param interMineType the InterMine type of the feature
* @param name the name
* @param uniqueName the uniquename
* @param seqlen the sequence length (if known)
*/
protected Item makeFeature(Integer featureId, String chadoFeatureType, String interMineType,
String name, String uniqueName,
int seqlen) {
return createItem(interMineType);
}
/**
* Fix types from the feature table, perhaps by changing non-SO type into their SO equivalent.
* Types that don't need fixing will be returned unchanged.
* @param type the input type
* @return the fixed type
*/
protected String fixFeatureType(String type) {
if (type.equals("five_prime_untranslated_region")) {
return "five_prime_UTR";
} else {
if (type.equals("three_prime_untranslated_region")) {
return "three_prime_UTR";
} else {
return type;
}
}
}
private void processLocationTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getFeatureLocResultSet(connection);
int count = 0;
int featureWarnings = 0;
while (res.next()) {
Integer featureLocId = new Integer(res.getInt("featureloc_id"));
Integer featureId = new Integer(res.getInt("feature_id"));
Integer srcFeatureId = new Integer(res.getInt("srcfeature_id"));
int start = res.getInt("fmin") + 1;
int end = res.getInt("fmax");
int strand = res.getInt("strand");
if (features.containsKey(srcFeatureId)) {
FeatureData srcFeatureData = features.get(srcFeatureId);
if (features.containsKey(featureId)) {
FeatureData featureData = features.get(featureId);
makeLocation(srcFeatureData.itemIdentifier, featureData.itemIdentifier,
start, end, strand, getTaxonIdInt(), dataSet); // Stores Location
count++;
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("featureId (" + featureId + ") from location " + featureLocId
+ " was not found in the feature table");
} else {
LOG.warn("further location warnings ignored");
}
featureWarnings++;
}
}
} else {
throw new RuntimeException("srcfeature_id (" + srcFeatureId + ") from location "
+ featureLocId + " was not found in the feature table");
}
}
LOG.info("created " + count + " locations");
res.close();
}
private void processRelationTable(Connection connection)
throws SQLException, ObjectStoreException {
ResultSet res = getFeatureRelationshipResultSet(connection);
Integer lastSubjectId = null;
// Map from relation type to Map from object type to FeatureData
Map<String, Map<String, List<FeatureData>>> relTypeMap =
new HashMap<String, Map<String, List<FeatureData>>>();
int featureWarnings = 0;
int count = 0;
int collectionTotal = 0;
while (res.next()) {
Integer featRelationshipId = new Integer(res.getInt("feature_relationship_id"));
Integer subjectId = new Integer(res.getInt("subject_id"));
Integer objectId = new Integer(res.getInt("object_id"));
String relationTypeName = res.getString("type_name");
if (lastSubjectId != null && subjectId != lastSubjectId) {
processCollectionData(lastSubjectId, relTypeMap); // Stores stuff
collectionTotal += relTypeMap.size();
relTypeMap = new HashMap<String, Map<String, List<FeatureData>>>();
}
if (features.containsKey(subjectId)) {
if (features.containsKey(objectId)) {
FeatureData objectFeatureData = features.get(objectId);
Map<String, List<FeatureData>> objectClassFeatureDataMap;
if (relTypeMap.containsKey(relationTypeName)) {
objectClassFeatureDataMap = relTypeMap.get(relationTypeName);
} else {
objectClassFeatureDataMap = new HashMap<String, List<FeatureData>>();
relTypeMap.put(relationTypeName, objectClassFeatureDataMap);
}
List<FeatureData> featureDataList;
if (objectClassFeatureDataMap.containsKey(objectFeatureData.interMineType)) {
featureDataList =
objectClassFeatureDataMap.get(objectFeatureData.interMineType);
} else {
featureDataList = new ArrayList<FeatureData>();
objectClassFeatureDataMap.put(objectFeatureData.interMineType,
featureDataList);
}
featureDataList.add(objectFeatureData);
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("object_id " + objectId + " from feature_relationship "
+ featRelationshipId + " was not found in the feature table");
} else {
LOG.warn("further feature_relationship warnings ignored");
}
featureWarnings++;
}
}
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("subject_id " + subjectId + " from feature_relationship "
+ featRelationshipId
+ " was not found in the feature table");
} else {
LOG.warn("further feature_relationship warnings ignored");
}
featureWarnings++;
}
}
count++;
lastSubjectId = subjectId;
}
if (lastSubjectId != null) {
processCollectionData(lastSubjectId, relTypeMap); // Stores stuff
collectionTotal += relTypeMap.size();
}
LOG.info("processed " + count + " relations");
LOG.info("total collection elements created: " + collectionTotal);
res.close();
}
/**
* Create collections and references for the Item given by chadoSubjectId.
*/
private void processCollectionData(Integer chadoSubjectId,
Map<String, Map<String, List<FeatureData>>> relTypeMap)
throws ObjectStoreException {
FeatureData subjectData = features.get(chadoSubjectId);
if (subjectData == null) {
LOG.warn("unknown feature " + chadoSubjectId + " passed to processCollectionData - "
+ "ignoring");
return;
}
// map from collection name to list of item ids
Map<String, List<String>> collectionsToStore = new HashMap<String, List<String>>();
String subjectInterMineType = subjectData.interMineType;
Integer intermineItemId = subjectData.intermineObjectId;
for (Map.Entry<String, Map<String, List<FeatureData>>> entry: relTypeMap.entrySet()) {
String relationType = entry.getKey();
Map<String, List<FeatureData>> objectClassFeatureDataMap = entry.getValue();
Set<Entry<String, List<FeatureData>>> mapEntries = objectClassFeatureDataMap.entrySet();
for (Map.Entry<String, List<FeatureData>> featureDataMap: mapEntries) {
String objectClass = featureDataMap.getKey();
List<FeatureData> featureDataCollection = featureDataMap.getValue();
ClassDescriptor cd = model.getClassDescriptorByName(subjectInterMineType);
List<FieldDescriptor> fds = null;
FeatureData subjectFeatureData = features.get(chadoSubjectId);
// key example: ("relationship", "Translation", "producedby", "MRNA")
MultiKey key = new MultiKey("relationship", subjectFeatureData.interMineType,
relationType, objectClass);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList != null) {
if (actionList.size() == 0
|| actionList.size() == 1 && actionList.get(0) instanceof DoNothingAction) {
// do nothing
continue;
}
fds = new ArrayList<FieldDescriptor>();
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
String fieldName = setAction.fieldName;
FieldDescriptor fd = cd.getFieldDescriptorByName(fieldName);
if (fd == null) {
throw new RuntimeException("can't find field " + fieldName
+ " in class " + cd + " configured for "
+ key);
} else {
fds.add(fd);
}
}
}
if (fds.size() == 0) {
throw new RuntimeException("no actions found for " + key);
}
} else {
if (PARTOF_RELATIONS.contains(relationType)) {
// special case for part_of relations - try to find a reference or
// collection that has a name that looks right for these objects (of class
// objectClass). eg. If the subject is a Transcript and the objectClass
// is Exon then find collections called "exons", "geneParts" (GenePart is
// a superclass of Exon)
fds = getReferenceForRelationship(objectClass, cd);
} else {
continue;
}
}
if (fds.size() == 0) {
LOG.error("can't find collection for type " + relationType
+ " in " + subjectInterMineType + " while processing feature "
+ chadoSubjectId);
continue;
}
for (FieldDescriptor fd: fds) {
if (fd.isReference()) {
if (objectClassFeatureDataMap.size() > 1) {
throw new RuntimeException("found more than one object for reference "
+ fd + " in class "
+ subjectInterMineType
+ " current subject identifier: "
+ subjectData.uniqueName);
} else {
if (objectClassFeatureDataMap.size() == 1) {
Reference reference = new Reference();
reference.setName(fd.getName());
FeatureData referencedFeatureData = featureDataCollection.get(0);
reference.setRefId(referencedFeatureData.itemIdentifier);
store(reference, intermineItemId); // Stores Reference for Feature
// special case for 1-1 relations - we need to set the reverse
// reference
ReferenceDescriptor rd = (ReferenceDescriptor) fd;
ReferenceDescriptor reverseRD = rd.getReverseReferenceDescriptor();
if (reverseRD != null && !reverseRD.isCollection()) {
Reference revReference = new Reference();
revReference.setName(reverseRD.getName());
revReference.setRefId(subjectData.itemIdentifier);
store(revReference, referencedFeatureData.intermineObjectId);
}
}
}
} else {
List<String> itemIds;
if (collectionsToStore.containsKey(fd.getName())) {
itemIds = collectionsToStore.get(fd.getName());
} else {
itemIds = new ArrayList<String>();
collectionsToStore.put(fd.getName(), itemIds);
}
for (FeatureData featureData: featureDataCollection) {
itemIds.add(featureData.itemIdentifier);
}
}
}
}
}
for (Map.Entry<String, List<String>> entry: collectionsToStore.entrySet()) {
ReferenceList referenceList = new ReferenceList();
referenceList.setName(entry.getKey());
referenceList.setRefIds(entry.getValue());
store(referenceList, intermineItemId); // Stores ReferenceList for Feature
}
}
/**
* Search ClassDescriptor cd class for refs/collections with the right name for the objectType
* eg. find CDSs collection for objectType = CDS and find gene reference for objectType = Gene.
*/
private List<FieldDescriptor> getReferenceForRelationship(String objectType,
ClassDescriptor cd) {
List<FieldDescriptor> fds = new ArrayList<FieldDescriptor>();
LinkedHashSet<String> allClasses = new LinkedHashSet<String>();
allClasses.add(objectType);
try {
Set<String> parentClasses = ClassDescriptor.findSuperClassNames(model, objectType);
allClasses.addAll(parentClasses);
} catch (MetaDataException e) {
throw new RuntimeException("class not found in the model", e);
}
for (String clsName: allClasses) {
List<String> possibleRefNames = new ArrayList<String>();
String unqualifiedClsName = TypeUtil.unqualifiedName(clsName);
possibleRefNames.add(unqualifiedClsName);
possibleRefNames.add(unqualifiedClsName + 's');
possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName));
possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName) + 's');
for (String possibleRefName: possibleRefNames) {
FieldDescriptor fd = cd.getFieldDescriptorByName(possibleRefName);
if (fd != null) {
fds.add(fd);
}
}
}
return fds;
}
private void processDbxrefTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getDbxrefResultSet(connection);
Set<String> existingAttributes = new HashSet<String>();
Integer currentFeatureId = null;
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String accession = res.getString("accession");
String dbName = res.getString("db_name");
Boolean isCurrent = res.getBoolean("is_current");
if (currentFeatureId != null && currentFeatureId != featureId) {
existingAttributes = new HashSet<String>();
}
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
accession = fixIdentifier(fdat.interMineType, accession);
MultiKey key = new MultiKey("dbxref", fdat.interMineType, dbName, isCurrent);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// try ignoring isCurrent
MultiKey key2 = new MultiKey("dbxref", fdat.interMineType, dbName, null);
actionList = getConfig().get(key2);
}
if (actionList == null) {
// no actions configured for this synonym
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
if (!existingAttributes.contains(setAction.fieldName)) {
setAttribute(fdat, setAction.fieldName, accession); // Stores
// Attribute for Feature
existingAttributes.add(setAction.fieldName);
}
} else {
if (action instanceof CreateSynonymAction) {
if (fdat.existingSynonyms.contains(accession)) {
continue;
} else {
createSynonym(fdat, "identifier", accession, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
currentFeatureId = featureId;
}
LOG.info("created " + count + " synonyms from the dbxref table");
res.close();
}
private void processFeaturePropTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getFeaturePropResultSet(connection);
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String identifier = res.getString("value");
String propTypeName = res.getString("type_name");
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
MultiKey key = new MultiKey("prop", fdat.interMineType, propTypeName);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// no actions configured for this prop
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
setAttribute(fdat, setAction.fieldName, identifier); // Stores
// Attribute for Feature
} else {
if (action instanceof CreateSynonymAction) {
CreateSynonymAction synonymAction = (CreateSynonymAction) action;
Set<String> existingSynonyms = fdat.existingSynonyms;
if (existingSynonyms.contains(identifier)) {
continue;
} else {
String synonymType = synonymAction.synonymType;
if (synonymType == null) {
synonymType = propTypeName;
}
createSynonym(fdat, synonymType, identifier, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
}
LOG.info("created " + count + " synonyms from the featureprop table");
res.close();
}
private void processSynonymTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getSynonymResultSet(connection);
Set<String> existingAttributes = new HashSet<String>();
Integer currentFeatureId = null;
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String identifier = res.getString("synonym_name");
String synonymTypeName = res.getString("type_name");
Boolean isCurrent = res.getBoolean("is_current");
identifier = fixIdentifier(synonymTypeName, identifier);
if (currentFeatureId != null && currentFeatureId != featureId) {
existingAttributes = new HashSet<String>();
}
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
MultiKey key =
new MultiKey("synonym", fdat.interMineType, synonymTypeName, isCurrent);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// try ignoring isCurrent
MultiKey key2 =
new MultiKey("synonym", fdat.interMineType, synonymTypeName, null);
actionList = getConfig().get(key2);
}
if (actionList == null) {
// no actions configured for this synonym
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
if (!existingAttributes.contains(setAction.fieldName)) {
setAttribute(fdat, setAction.fieldName, identifier); // Stores
// Attribute for Feature
existingAttributes.add(setAction.fieldName);
}
} else {
if (action instanceof CreateSynonymAction) {
if (fdat.existingSynonyms.contains(identifier)) {
continue;
} else {
createSynonym(fdat, synonymTypeName, identifier, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
currentFeatureId = featureId;
}
LOG.info("created " + count + " synonyms from the synonym table");
res.close();
}
/**
* Process the identifier and return a "cleaned" version. Implement in sub-classes to fix
* data problem.
* @param the (SO) type of the feature that this identifier came from
* @param identifier the identifier
* @return a cleaned identifier
*/
protected String fixIdentifier(String type, String identifier) {
/*
* default implementation should be: return identifier
*/
// XXX FIXME TODO - for wormbase - move to WormBaseDBConverter
if (identifier.startsWith(type + ":")) {
return identifier.substring(type.length() + 1);
} else {
return identifier;
}
}
/**
* Set an attribute in an Item by creating an Attribute object and storing it.
* @param fdat the data about the feature
* @param attributeName the attribute name
* @param value the value to set
*/
private void setAttribute(FeatureData fdat, String attributeName, String value)
throws ObjectStoreException {
Attribute att = new Attribute();
att.setName(attributeName);
att.setValue(value);
store(att, fdat.intermineObjectId);
}
private void processPubTable(Connection connection)
throws SQLException, ObjectStoreException {
ResultSet res = getPubResultSet(connection);
List<String> currentEvidenceIds = new ArrayList<String>();
Integer lastPubFeatureId = null;
int featureWarnings = 0;
int count = 0;
Map<String, String> pubs = new HashMap<String, String>();
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
if (!features.containsKey(featureId)) {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("feature " + featureId + " not found in features Map while "
+ "processing publications");
} else {
LOG.warn("further feature id warnings ignored in processPubTable()");
}
featureWarnings++;
}
continue;
}
String pubMedId = res.getString("pub_db_identifier");
if (lastPubFeatureId != null && !featureId.equals(lastPubFeatureId)) {
makeFeaturePublications(lastPubFeatureId, currentEvidenceIds);
makeFeatureEvidence(lastPubFeatureId, currentEvidenceIds); // Stores ReferenceList
currentEvidenceIds = new ArrayList<String>();
}
String publicationId;
if (pubs.containsKey(pubMedId)) {
publicationId = pubs.get(pubMedId);
} else {
Item publication = createItem("Publication");
publication.setAttribute("pubMedId", pubMedId);
store(publication); // Stores Publication
publicationId = publication.getIdentifier();
pubs.put(pubMedId, publicationId);
}
currentEvidenceIds.add(publicationId);
lastPubFeatureId = featureId;
count++;
}
if (lastPubFeatureId != null) {
makeFeaturePublications(lastPubFeatureId, currentEvidenceIds);
makeFeatureEvidence(lastPubFeatureId, currentEvidenceIds);
}
LOG.info("Created " + count + " publications");
res.close();
}
/**
* Set the publications collection of the feature with the given (chado) feature id.
*/
private void makeFeaturePublications(Integer featureId, List<String> argPublicationIds)
throws ObjectStoreException {
FeatureData fdat = features.get(featureId);
if (fdat == null) {
throw new RuntimeException("feature " + featureId + " not found in features Map");
}
if (!fdat.interMineType.equals("Gene")) {
// only Gene has a publications collection
return;
}
List<String> publicationIds = new ArrayList<String>(argPublicationIds);
ReferenceList referenceList = new ReferenceList();
referenceList.setName("publications");
referenceList.setRefIds(publicationIds);
store(referenceList, fdat.intermineObjectId);
}
/**
* Set the evidence collection of the feature with the given (chado) feature id.
*/
private void makeFeatureEvidence(Integer featureId, List<String> argEvidenceIds)
throws ObjectStoreException {
FeatureData fdat = features.get(featureId);
if (fdat == null) {
throw new RuntimeException("feature " + featureId + " not found in features Map");
}
List<String> evidenceIds = new ArrayList<String>(argEvidenceIds);
Item dataSet = getDataSetItem(dataSetTitle);
evidenceIds.add(0, dataSet.getIdentifier());
ReferenceList referenceList = new ReferenceList();
referenceList.setName("evidence");
referenceList.setRefIds(evidenceIds);
store(referenceList, fdat.intermineObjectId);
fdat.flags |= FeatureData.EVIDENCE_CREATED;
}
/**
* For those features in the features Map that don't yet have a evidence collection, create one
* containing the DataSet. We know if a feature doesn't have an evidence collection if it
* doesn't have it's EVIDENCE_CREATED flag set.
*/
private void addMissingDataEvidence() throws ObjectStoreException {
List<String> emptyList = Collections.emptyList();
for (Map.Entry<Integer, FeatureData> entry: features.entrySet()) {
Integer featureId = entry.getKey();
FeatureData featureData = entry.getValue();
if ((featureData.flags & FeatureData.EVIDENCE_CREATED) == 0) {
makeFeatureEvidence(featureId, emptyList);
}
}
}
/**
* Return the interesting rows from the features table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureResultSet(Connection connection)
throws SQLException {
String query =
"SELECT feature_id, feature.name, uniquename, cvterm.name as type, seqlen, is_analysis"
+ " FROM feature, cvterm"
+ " WHERE feature.type_id = cvterm.cvterm_id"
+ " AND cvterm.name IN (" + featureTypesString + ")"
+ " AND organism_id = " + chadoOrganismId
+ " AND NOT feature.is_obsolete";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the chado organism id for the given genus/species. This is a protected method so
* that it can be overriden for testing
* @param connection the db connection
* @return the internal id (organism_id from the organism table)
* @throws SQLException if the is a database problem
*/
protected int getChadoOrganismId(Connection connection)
throws SQLException {
String query = "select organism_id from organism where genus = "
+ DatabaseUtil.objectToString(genus) + " and species = "
+ DatabaseUtil.objectToString(species);
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
if (res.next()) {
return res.getInt(1);
} else {
throw new RuntimeException("no rows returned when querying organism table for genus \""
+ genus + "\" and species \"" + species + "\"");
}
}
/**
* Return a SQL query string the gets all non-obsolete interesting features.
*/
private String getFeatureIdQuery() {
return
" SELECT feature_id FROM feature, cvterm"
+ " WHERE cvterm.name IN (" + featureTypesString + ")"
+ " AND organism_id = " + chadoOrganismId
+ " AND NOT feature.is_obsolete"
+ " AND feature.type_id = cvterm.cvterm_id";
}
/**
* Return the interesting rows from the feature_relationship table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureRelationshipResultSet(Connection connection) throws SQLException {
String query =
"SELECT feature_relationship_id, subject_id, object_id, cvterm.name AS type_name"
+ " FROM feature_relationship, cvterm"
+ " WHERE cvterm.cvterm_id = type_id"
+ " AND subject_id IN (" + getFeatureIdQuery() + ")"
+ " AND object_id IN (" + getFeatureIdQuery() + ")"
+ " ORDER BY subject_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the featureloc table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureLocResultSet(Connection connection) throws SQLException {
String query =
"SELECT featureloc_id, feature_id, srcfeature_id, fmin, is_fmin_partial,"
+ " fmax, is_fmax_partial, strand"
+ " FROM featureloc"
+ " WHERE feature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND srcfeature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND locgroup = 0";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the dbxref table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getDbxrefResultSet(Connection connection) throws SQLException {
String query =
"SELECT feature.feature_id, accession, db.name AS db_name, is_current"
+ " FROM dbxref, feature_dbxref, feature, db"
+ " WHERE feature_dbxref.dbxref_id = dbxref.dbxref_id "
+ " AND feature_dbxref.feature_id = feature.feature_id "
+ " AND feature.feature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND dbxref.db_id = db.db_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the featureprop table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeaturePropResultSet(Connection connection) throws SQLException {
String query =
"select feature_id, value, cvterm.name AS type_name FROM featureprop, cvterm"
+ " WHERE featureprop.type_id = cvterm.cvterm_id"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the synonym table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getSynonymResultSet(Connection connection) throws SQLException {
String query =
"SELECT DISTINCT feature_id, synonym.name AS synonym_name,"
+ " cvterm.name AS type_name, is_current"
+ " FROM feature_synonym, synonym, cvterm"
+ " WHERE feature_synonym.synonym_id = synonym.synonym_id"
+ " AND synonym.type_id = cvterm.cvterm_id"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the pub table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getPubResultSet(Connection connection) throws SQLException {
String query =
"SELECT DISTINCT feature_pub.feature_id, dbxref.accession as pub_db_identifier"
+ " FROM feature_pub, dbxref, db, pub, pub_dbxref"
+ " WHERE feature_pub.pub_id = pub.pub_id"
+ " AND pub_dbxref.dbxref_id = dbxref.dbxref_id"
+ " AND dbxref.db_id = db.db_id"
+ " AND pub.pub_id = pub_dbxref.pub_id"
+ " AND db.name = 'pubmed'"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")"
+ " ORDER BY feature_pub.feature_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Call super.createSynonym(), store the Item then record in fdat that we've created it.
*/
private Item createSynonym(FeatureData fdat, String type, String identifier,
boolean isPrimary, Item dataSet, List<Item> otherEvidence,
Item dataSource)
throws ObjectStoreException {
if (fdat.existingSynonyms.contains(identifier)) {
throw new IllegalArgumentException("feature identifier " + identifier
+ " is already a synonym for: "
+ fdat.existingSynonyms);
}
List<Item> allEvidence = new ArrayList<Item>();
allEvidence.add(dataSet);
allEvidence.addAll(otherEvidence);
Item returnItem = createSynonym(fdat.itemIdentifier, type, identifier, isPrimary,
allEvidence, dataSource);
fdat.existingSynonyms.add(identifier);
return returnItem;
}
}
|
package org.intermine.bio.dataconversion;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.intermine.dataconversion.ItemWriter;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.MetaDataException;
import org.intermine.metadata.Model;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.sql.Database;
import org.intermine.sql.DatabaseUtil;
import org.intermine.util.StringUtil;
import org.intermine.util.TypeUtil;
import org.intermine.util.XmlUtil;
import org.intermine.xml.full.Attribute;
import org.intermine.xml.full.Item;
import org.intermine.xml.full.Reference;
import org.intermine.xml.full.ReferenceList;
import org.flymine.model.genomic.LocatedSequenceFeature;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.commons.collections.keyvalue.MultiKey;
import org.apache.commons.collections.map.MultiKeyMap;
import org.apache.log4j.Logger;
/**
* DataConverter to read from a Chado database into items
* @author Kim Rutherford
*/
public class ChadoDBConverter extends BioDBConverter
{
/**
* Data about one feature from the feature table in chado. This exists to avoid having lots of
* Item objects in memory.
*
* @author Kim Rutherford
*/
protected static class FeatureData
{
private String uniqueName;
private String chadoFeatureName;
// the synonyms that have already been created
private Set<String> existingSynonyms = new HashSet<String>();
private String itemIdentifier;
private String interMineType;
private Integer intermineObjectId;
short flags = 0;
static final short EVIDENCE_CREATED_BIT = 0;
static final short EVIDENCE_CREATED = 1 << EVIDENCE_CREATED_BIT;
static final short IDENTIFIER_SET_BIT = 1;
static final short IDENTIFIER_SET = 1 << IDENTIFIER_SET_BIT;
/**
* Return the id of the Item representing this feature.
* @return the ID
*/
public Integer getIntermineObjectId() {
return intermineObjectId;
}
/**
* Get the String read from the name column of the feature table.
* @return the name
*/
public String getChadoFeatureName() {
return chadoFeatureName;
}
/**
* Get the String read from the uniquename column of the feature table.
* @return the uniquename
*/
public String getChadoFeatureUniqueName() {
return uniqueName;
}
}
protected static final Logger LOG = Logger.getLogger(ChadoDBConverter.class);
private Map<Integer, FeatureData> features = new HashMap<Integer, FeatureData>();
private String dataSourceName;
private String dataSetTitle;
private int taxonId = -1;
private String genus;
private String species;
private int chadoOrganismId;
private Model model = Model.getInstanceByName("genomic");
private MultiKeyMap config = null;
private static final List<String> PARTOF_RELATIONS = Arrays.asList("partof", "part_of");
private static final List<Item> EMPTY_ITEM_LIST = Collections.emptyList();
private static final List<String> FEATURES = Arrays.asList(
"gene", "mRNA", "transcript",
"CDS", "intron", "exon",
"five_prime_untranslated_region",
"five_prime_UTR", "three_prime_untranslated_region",
"three_prime_UTR"
);
private static final List<String> CHROMOSOME_FEATURES =
Arrays.asList("chromosome", "chromosome_arm");
/**
* A class that represents an action while processing synonyms, dbxrefs, etc.
* @author Kim Rutherford
*/
protected static class ConfigAction
{
protected ConfigAction() {
// empty
}
}
/**
* An action that sets an attribute in a new Item.
*/
protected static class SetFieldConfigAction extends ConfigAction
{
private String thefieldName;
SetFieldConfigAction() {
thefieldName = null;
}
SetFieldConfigAction(String fieldName) {
this.thefieldName = fieldName;
}
/**
* Return the field name that was passed to the constructor.
* @return the field name
*/
public String getFieldName() {
return thefieldName;
}
}
/**
* An action that sets a Synonym.
*/
protected static class CreateSynonymAction extends ConfigAction
{
private String synonymType;
// make a synonym and use the type from chado ("symbol", "identifier" etc.) as the Synonym
// type
CreateSynonymAction() {
synonymType = null;
}
// make a synonym and use given type as the Synonym type
CreateSynonymAction(String synonymType) {
this.synonymType = synonymType;
}
}
private static class DoNothingAction extends ConfigAction
{
// do nothing for this data
}
/**
* An action that make a synonym.
*/
protected static final ConfigAction CREATE_SYNONYM_ACTION = new CreateSynonymAction();
protected static final ConfigAction DO_NOTHING_ACTION = new DoNothingAction();
/**
* Create a new ChadoDBConverter object.
* @param database the database to read from
* @param tgtModel the Model used by the object store we will write to with the ItemWriter
* @param writer an ItemWriter used to handle the resultant Items
*/
public ChadoDBConverter(Database database, Model tgtModel, ItemWriter writer) {
super(database, tgtModel, writer);
}
@SuppressWarnings("unchecked")
protected Map<MultiKey, List<ConfigAction>> getConfig() {
if (config == null) {
config = new MultiKeyMap();
}
return config;
}
/**
* Set the name of the DataSet Item to create for this converter.
* @param title the title
*/
public void setDataSetTitle(String title) {
this.dataSetTitle = title;
}
/**
* Set the name of the DataSource Item to create for this converter.
* @param name the name
*/
public void setDataSourceName(String name) {
this.dataSourceName = name;
}
/**
* Set the taxonId to use when creating the Organism Item for the new features.
* @param taxonId the taxon id
*/
public void setTaxonId(String taxonId) {
this.taxonId = Integer.valueOf(taxonId).intValue();
}
/**
* Get the taxonId to use when creating the Organism Item for the
* @return the taxon id
*/
public int getTaxonIdInt() {
return taxonId;
}
/**
* The genus to use when querying for features.
* @param genus the genus
*/
public void setGenus(String genus) {
this.genus = genus;
}
/**
* The species to use when querying for features.
* @param species the species
*/
public void setSpecies(String species) {
this.species = species;
}
/**
* Convert the list of features to a string to be used in a SQL query. The String will include
* the chromosome and chromosome_arm feature types.
* @return the list of features as a string (in SQL list format)
*/
private String getFeaturesString() {
List<String> features = new ArrayList<String>(getFeatures());
features.addAll(CHROMOSOME_FEATURES);
StringBuffer featureListString = new StringBuffer();
Iterator<String> i = features.iterator();
while (i.hasNext()) {
String item = i.next();
featureListString.append("'" + item + "'");
if (i.hasNext()) {
featureListString.append(", ");
};
}
return featureListString.toString();
}
/**
* Process the data from the Database and write to the ItemWriter.
* {@inheritDoc}
*/
@Override
public void process() throws Exception {
Connection connection;
if (getDatabase() == null) {
// no Database when testing and no connection needed
connection = null;
} else {
connection = getDatabase().getConnection();
}
if (dataSetTitle == null) {
throw new IllegalArgumentException("dataSetTitle not set in ChadoDBConverter");
}
if (dataSourceName == null) {
throw new IllegalArgumentException("dataSourceName not set in ChadoDBConverter");
}
if (getTaxonIdInt() == -1) {
throw new IllegalArgumentException("taxonId not set in ChadoDBConverter");
}
if (species == null) {
throw new IllegalArgumentException("species not set in ChadoDBConverter");
}
if (genus == null) {
throw new IllegalArgumentException("genus not set in ChadoDBConverter");
}
chadoOrganismId = getChadoOrganismId(connection);
processFeatureTable(connection);
processPubTable(connection);
processLocationTable(connection);
processRelationTable(connection);
processDbxrefTable(connection);
processSynonymTable(connection);
processFeaturePropTable(connection);
addMissingDataEvidence();
extraProcessing(features);
}
private void processFeatureTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSet = getDataSetItem(dataSetTitle); // Stores DataSet
Item dataSource = getDataSourceItem(dataSourceName); // Stores DataSource
Item organismItem = getOrganismItem(getTaxonIdInt()); // Stores Organism
ResultSet res = getFeatureResultSet(connection);
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String name = res.getString("name");
String uniqueName = res.getString("uniquename");
String type = res.getString("type");
String residues = res.getString("residues");
int seqlen = 0;
if (res.getObject("seqlen") != null) {
seqlen = res.getInt("seqlen");
}
List<String> primaryIds = new ArrayList<String>();
primaryIds.add(uniqueName);
String interMineType = TypeUtil.javaiseClassName(fixFeatureType(type));
uniqueName = fixIdentifier(interMineType, uniqueName);
Item feature = makeFeature(featureId, type, interMineType, name, uniqueName, seqlen);
if (feature != null) {
FeatureData fdat = new FeatureData();
fdat.itemIdentifier = feature.getIdentifier();
fdat.uniqueName = uniqueName;
fdat.chadoFeatureName = name;
fdat.interMineType = XmlUtil.getFragmentFromURI(feature.getClassName());
feature.setReference("organism", organismItem);
MultiKey nameKey =
new MultiKey("feature", fdat.interMineType, dataSourceName, "name");
List<ConfigAction> nameActionList = getConfig().get(nameKey);
if (name != null) {
if (nameActionList == null || nameActionList.size() == 0) {
if (feature.checkAttribute("symbol")) {
feature.setAttribute("symbol", name);
} else {
// do nothing, if the name needs to go in a different attribute
// it will need to be configured
}
} else {
for (ConfigAction action: nameActionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction attrAction =
(SetFieldConfigAction) action;
feature.setAttribute(attrAction.getFieldName(), name);
if (attrAction.getFieldName().equals("identifier")) {
fdat.flags |= FeatureData.IDENTIFIER_SET;
}
}
}
}
}
MultiKey uniqueNameKey =
new MultiKey("feature", fdat.interMineType, dataSourceName, "uniquename");
List<ConfigAction> uniqueNameActionList = getConfig().get(uniqueNameKey);
if (uniqueNameActionList == null || uniqueNameActionList.size() == 0) {
feature.setAttribute("identifier", uniqueName);
fdat.flags |= FeatureData.IDENTIFIER_SET;
} else {
for (ConfigAction action: uniqueNameActionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction attrAction = (SetFieldConfigAction) action;
feature.setAttribute(attrAction.getFieldName(), uniqueName);
if (attrAction.getFieldName().equals("identifier")) {
fdat.flags |= FeatureData.IDENTIFIER_SET;
}
}
}
}
if (feature.canReference("sequence") && residues != null && residues.length() > 0) {
Item sequence = createItem("Sequence");
sequence.setAttribute("residues", residues);
sequence.setAttribute("length", String.valueOf(seqlen));
feature.setReference("sequence", sequence);
store(sequence);
}
// don't set the evidence collection - that's done by processPubTable()
fdat.intermineObjectId = store(feature); // Stores Feature
// always create a synonym for the uniquename
createSynonym(fdat, "identifier", uniqueName, true, dataSet, EMPTY_ITEM_LIST,
dataSource); // Stores Synonym
if (name != null) {
if (nameActionList == null || nameActionList.size() == 0
|| nameActionList.contains(CREATE_SYNONYM_ACTION)) {
name = fixIdentifier(interMineType, name);
if (!fdat.existingSynonyms.contains(name)) {
createSynonym(fdat, "name", name, false, dataSet, EMPTY_ITEM_LIST,
dataSource); // Stores Synonym
}
}
}
features.put(featureId, fdat);
count++;
}
}
LOG.info("created " + count + " features");
res.close();
}
/**
* Make a new feature
* @param featureId the chado feature id
* @param chadoFeatureType the chado feature type (a SO term)
* @param interMineType the InterMine type of the feature
* @param name the name
* @param uniqueName the uniquename
* @param seqlen the sequence length (if known)
*/
protected Item makeFeature(Integer featureId, String chadoFeatureType, String interMineType,
String name, String uniqueName,
int seqlen) {
return createItem(interMineType);
}
/**
* Get a list of the chado/so types of the LocatedSequenceFeatures we wish to load. The list
* will not include chromosome-like features (eg. "chromosome" and "chromosome_arm").
* @return the list of features
*/
protected List<String> getFeatures() {
return FEATURES;
}
/**
* Fix types from the feature table, perhaps by changing non-SO type into their SO equivalent.
* Types that don't need fixing will be returned unchanged.
* @param type the input type
* @return the fixed type
*/
protected String fixFeatureType(String type) {
if (type.equals("five_prime_untranslated_region")) {
return "five_prime_UTR";
} else {
if (type.equals("three_prime_untranslated_region")) {
return "three_prime_UTR";
} else {
return type;
}
}
}
/**
* Do any extra processing for this database, after all other processing is done
* @param features a map from chado feature_id to data for that feature
*/
protected void extraProcessing(@SuppressWarnings("unused")
Map<Integer, FeatureData> featureDataMap)
throws ObjectStoreException {
// override in subclasses as necessary
}
private void processLocationTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getFeatureLocResultSet(connection);
int count = 0;
int featureWarnings = 0;
while (res.next()) {
Integer featureLocId = new Integer(res.getInt("featureloc_id"));
Integer featureId = new Integer(res.getInt("feature_id"));
Integer srcFeatureId = new Integer(res.getInt("srcfeature_id"));
int start = res.getInt("fmin") + 1;
int end = res.getInt("fmax");
int strand = res.getInt("strand");
if (features.containsKey(srcFeatureId)) {
FeatureData srcFeatureData = features.get(srcFeatureId);
if (features.containsKey(featureId)) {
FeatureData featureData = features.get(featureId);
Item location =
makeLocation(srcFeatureData.itemIdentifier, featureData.itemIdentifier,
start, end, strand, getTaxonIdInt(), dataSet);
final String featureClassName =
model.getPackageName() + "." + featureData.interMineType;
Class featureClass;
try {
featureClass = Class.forName(featureClassName);
} catch (ClassNotFoundException e) {
throw new RuntimeException("unable to find class object for setting "
+ "a chromosome reference", e);
}
if (LocatedSequenceFeature.class.isAssignableFrom(featureClass)
&& srcFeatureData.interMineType.equals("Chromosome")) {
Reference chrReference = new Reference();
chrReference.setName("chromosome");
chrReference.setRefId(srcFeatureData.itemIdentifier);
store(chrReference, featureData.getIntermineObjectId());
Reference locReference = new Reference();
locReference.setName("chromosomeLocation");
locReference.setRefId(location.getIdentifier());
store(locReference, featureData.getIntermineObjectId());
setAttribute(featureData.intermineObjectId, "length",
String.valueOf(end - start + 1));
}
count++;
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("featureId (" + featureId + ") from location " + featureLocId
+ " was not found in the feature table");
} else {
LOG.warn("further location warnings ignored");
}
featureWarnings++;
}
}
} else {
throw new RuntimeException("srcfeature_id (" + srcFeatureId + ") from location "
+ featureLocId + " was not found in the feature table");
}
}
LOG.info("created " + count + " locations");
res.close();
}
private void processRelationTable(Connection connection)
throws SQLException, ObjectStoreException {
ResultSet res = getFeatureRelationshipResultSet(connection);
Integer lastSubjectId = null;
// Map from relation type to Map from object type to FeatureData
Map<String, Map<String, List<FeatureData>>> relTypeMap =
new HashMap<String, Map<String, List<FeatureData>>>();
int featureWarnings = 0;
int count = 0;
int collectionTotal = 0;
while (res.next()) {
Integer featRelationshipId = new Integer(res.getInt("feature_relationship_id"));
Integer subjectId = new Integer(res.getInt("subject_id"));
Integer objectId = new Integer(res.getInt("object_id"));
String relationTypeName = res.getString("type_name");
if (lastSubjectId != null && !subjectId.equals(lastSubjectId)) {
processCollectionData(lastSubjectId, relTypeMap); // Stores stuff
collectionTotal += relTypeMap.size();
relTypeMap = new HashMap<String, Map<String, List<FeatureData>>>();
}
if (features.containsKey(subjectId)) {
if (features.containsKey(objectId)) {
FeatureData objectFeatureData = features.get(objectId);
Map<String, List<FeatureData>> objectClassFeatureDataMap;
if (relTypeMap.containsKey(relationTypeName)) {
objectClassFeatureDataMap = relTypeMap.get(relationTypeName);
} else {
objectClassFeatureDataMap = new HashMap<String, List<FeatureData>>();
relTypeMap.put(relationTypeName, objectClassFeatureDataMap);
}
List<FeatureData> featureDataList;
if (objectClassFeatureDataMap.containsKey(objectFeatureData.interMineType)) {
featureDataList =
objectClassFeatureDataMap.get(objectFeatureData.interMineType);
} else {
featureDataList = new ArrayList<FeatureData>();
objectClassFeatureDataMap.put(objectFeatureData.interMineType,
featureDataList);
}
featureDataList.add(objectFeatureData);
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("object_id " + objectId + " from feature_relationship "
+ featRelationshipId + " was not found in the feature table");
} else {
LOG.warn("further feature_relationship warnings ignored");
}
featureWarnings++;
}
}
} else {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("subject_id " + subjectId + " from feature_relationship "
+ featRelationshipId
+ " was not found in the feature table");
} else {
LOG.warn("further feature_relationship warnings ignored");
}
featureWarnings++;
}
}
count++;
lastSubjectId = subjectId;
}
if (lastSubjectId != null) {
processCollectionData(lastSubjectId, relTypeMap); // Stores stuff
collectionTotal += relTypeMap.size();
}
LOG.info("processed " + count + " relations");
LOG.info("total collection elements created: " + collectionTotal);
res.close();
}
/**
* Create collections and references for the Item given by chadoSubjectId.
*/
private void processCollectionData(Integer chadoSubjectId,
Map<String, Map<String, List<FeatureData>>> relTypeMap)
throws ObjectStoreException {
FeatureData subjectData = features.get(chadoSubjectId);
if (subjectData == null) {
LOG.warn("unknown feature " + chadoSubjectId + " passed to processCollectionData - "
+ "ignoring");
return;
}
// map from collection name to list of item ids
Map<String, List<String>> collectionsToStore = new HashMap<String, List<String>>();
String subjectInterMineType = subjectData.interMineType;
ClassDescriptor cd = model.getClassDescriptorByName(subjectInterMineType);
Integer intermineItemId = subjectData.intermineObjectId;
for (Map.Entry<String, Map<String, List<FeatureData>>> entry: relTypeMap.entrySet()) {
String relationType = entry.getKey();
Map<String, List<FeatureData>> objectClassFeatureDataMap = entry.getValue();
Set<Entry<String, List<FeatureData>>> mapEntries = objectClassFeatureDataMap.entrySet();
for (Map.Entry<String, List<FeatureData>> featureDataMap: mapEntries) {
String objectClass = featureDataMap.getKey();
List<FeatureData> featureDataCollection = featureDataMap.getValue();
List<FieldDescriptor> fds = null;
FeatureData subjectFeatureData = features.get(chadoSubjectId);
// key example: ("relationship", "Translation", "producedby", "MRNA")
MultiKey key = new MultiKey("relationship", subjectFeatureData.interMineType,
relationType, objectClass);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList != null) {
if (actionList.size() == 0
|| actionList.size() == 1 && actionList.get(0) instanceof DoNothingAction) {
// do nothing
continue;
}
fds = new ArrayList<FieldDescriptor>();
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
String fieldName = setAction.getFieldName();
FieldDescriptor fd = cd.getFieldDescriptorByName(fieldName);
if (fd == null) {
throw new RuntimeException("can't find field " + fieldName
+ " in class " + cd + " configured for "
+ key);
} else {
fds.add(fd);
}
}
}
if (fds.size() == 0) {
throw new RuntimeException("no actions found for " + key);
}
} else {
if (PARTOF_RELATIONS.contains(relationType)) {
// special case for part_of relations - try to find a reference or
// collection that has a name that looks right for these objects (of class
// objectClass). eg. If the subject is a Transcript and the objectClass
// is Exon then find collections called "exons", "geneParts" (GenePart is
// a superclass of Exon)
fds = getReferenceForRelationship(objectClass, cd);
} else {
continue;
}
}
if (fds.size() == 0) {
LOG.error("can't find collection for type " + relationType
+ " in " + subjectInterMineType + " while processing feature "
+ chadoSubjectId);
continue;
}
for (FieldDescriptor fd: fds) {
if (fd.isReference()) {
if (objectClassFeatureDataMap.size() > 1) {
throw new RuntimeException("found more than one object for reference "
+ fd + " in class "
+ subjectInterMineType
+ " current subject identifier: "
+ subjectData.uniqueName);
} else {
if (objectClassFeatureDataMap.size() == 1) {
Reference reference = new Reference();
reference.setName(fd.getName());
FeatureData referencedFeatureData = featureDataCollection.get(0);
reference.setRefId(referencedFeatureData.itemIdentifier);
store(reference, intermineItemId); // Stores Reference for Feature
// special case for 1-1 relations - we need to set the reverse
// reference
ReferenceDescriptor rd = (ReferenceDescriptor) fd;
ReferenceDescriptor reverseRD = rd.getReverseReferenceDescriptor();
if (reverseRD != null && !reverseRD.isCollection()) {
Reference revReference = new Reference();
revReference.setName(reverseRD.getName());
revReference.setRefId(subjectData.itemIdentifier);
store(revReference, referencedFeatureData.intermineObjectId);
}
}
}
} else {
List<String> itemIds;
if (collectionsToStore.containsKey(fd.getName())) {
itemIds = collectionsToStore.get(fd.getName());
} else {
itemIds = new ArrayList<String>();
collectionsToStore.put(fd.getName(), itemIds);
}
for (FeatureData featureData: featureDataCollection) {
itemIds.add(featureData.itemIdentifier);
}
}
}
}
}
for (Map.Entry<String, List<String>> entry: collectionsToStore.entrySet()) {
ReferenceList referenceList = new ReferenceList();
String collectionName = entry.getKey();
referenceList.setName(collectionName);
List<String> idList = entry.getValue();
referenceList.setRefIds(idList);
store(referenceList, intermineItemId); // Stores ReferenceList for Feature
// if there is a field called <classname>Count that matches the name of the collection
// we just stored, set it
String countName;
if (collectionName.endsWith("s")) {
countName = collectionName.substring(0, collectionName.length() - 1);
} else {
countName = collectionName;
}
countName += "Count";
if (cd.getAttributeDescriptorByName(countName, true) != null) {
setAttribute(intermineItemId, countName, String.valueOf(idList.size()));
}
}
}
/**
* Search ClassDescriptor cd class for refs/collections with the right name for the objectType
* eg. find CDSs collection for objectType = CDS and find gene reference for objectType = Gene.
*/
private List<FieldDescriptor> getReferenceForRelationship(String objectType,
ClassDescriptor cd) {
List<FieldDescriptor> fds = new ArrayList<FieldDescriptor>();
LinkedHashSet<String> allClasses = new LinkedHashSet<String>();
allClasses.add(objectType);
try {
Set<String> parentClasses = ClassDescriptor.findSuperClassNames(model, objectType);
allClasses.addAll(parentClasses);
} catch (MetaDataException e) {
throw new RuntimeException("class not found in the model", e);
}
for (String clsName: allClasses) {
List<String> possibleRefNames = new ArrayList<String>();
String unqualifiedClsName = TypeUtil.unqualifiedName(clsName);
possibleRefNames.add(unqualifiedClsName);
possibleRefNames.add(unqualifiedClsName + 's');
possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName));
possibleRefNames.add(StringUtil.decapitalise(unqualifiedClsName) + 's');
for (String possibleRefName: possibleRefNames) {
FieldDescriptor fd = cd.getFieldDescriptorByName(possibleRefName);
if (fd != null) {
fds.add(fd);
}
}
}
return fds;
}
private void processDbxrefTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getDbxrefResultSet(connection);
Set<String> existingAttributes = new HashSet<String>();
Integer currentFeatureId = null;
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String accession = res.getString("accession");
String dbName = res.getString("db_name");
Boolean isCurrent = res.getBoolean("is_current");
if (currentFeatureId != null && currentFeatureId != featureId) {
existingAttributes = new HashSet<String>();
}
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
accession = fixIdentifier(fdat.interMineType, accession);
MultiKey key = new MultiKey("dbxref", fdat.interMineType, dbName, isCurrent);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// try ignoring isCurrent
MultiKey key2 = new MultiKey("dbxref", fdat.interMineType, dbName, null);
actionList = getConfig().get(key2);
}
if (actionList == null) {
// no actions configured for this synonym
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
if (!existingAttributes.contains(setAction.getFieldName())) {
setAttribute(fdat.intermineObjectId, setAction.getFieldName(),
accession);
existingAttributes.add(setAction.getFieldName());
if (setAction.getFieldName().equals("identifier")) {
fdat.flags |= FeatureData.IDENTIFIER_SET;
}
}
} else {
if (action instanceof CreateSynonymAction) {
if (fdat.existingSynonyms.contains(accession)) {
continue;
} else {
createSynonym(fdat, "identifier", accession, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
currentFeatureId = featureId;
}
LOG.info("created " + count + " synonyms from the dbxref table");
res.close();
}
private void processFeaturePropTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getFeaturePropResultSet(connection);
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String identifier = res.getString("value");
String propTypeName = res.getString("type_name");
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
MultiKey key = new MultiKey("prop", fdat.interMineType, propTypeName);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// no actions configured for this prop
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
setAttribute(fdat.intermineObjectId, setAction.getFieldName(), identifier);
if (setAction.getFieldName().equals("identifier")) {
fdat.flags |= FeatureData.IDENTIFIER_SET;
}
} else {
if (action instanceof CreateSynonymAction) {
CreateSynonymAction synonymAction = (CreateSynonymAction) action;
Set<String> existingSynonyms = fdat.existingSynonyms;
if (existingSynonyms.contains(identifier)) {
continue;
} else {
String synonymType = synonymAction.synonymType;
if (synonymType == null) {
synonymType = propTypeName;
}
createSynonym(fdat, synonymType, identifier, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
}
LOG.info("created " + count + " synonyms from the featureprop table");
res.close();
}
private void processSynonymTable(Connection connection)
throws SQLException, ObjectStoreException {
Item dataSource = getDataSourceItem(dataSourceName);
Item dataSet = getDataSetItem(dataSetTitle);
ResultSet res = getSynonymResultSet(connection);
Set<String> existingAttributes = new HashSet<String>();
Integer currentFeatureId = null;
int count = 0;
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String identifier = res.getString("synonym_name");
String synonymTypeName = res.getString("type_name");
Boolean isCurrent = res.getBoolean("is_current");
identifier = fixIdentifier(synonymTypeName, identifier);
if (currentFeatureId != null && currentFeatureId != featureId) {
existingAttributes = new HashSet<String>();
}
if (features.containsKey(featureId)) {
FeatureData fdat = features.get(featureId);
MultiKey key =
new MultiKey("synonym", fdat.interMineType, synonymTypeName, isCurrent);
List<ConfigAction> actionList = getConfig().get(key);
if (actionList == null) {
// try ignoring isCurrent
MultiKey key2 =
new MultiKey("synonym", fdat.interMineType, synonymTypeName, null);
actionList = getConfig().get(key2);
}
if (actionList == null) {
// no actions configured for this synonym
continue;
}
for (ConfigAction action: actionList) {
if (action instanceof SetFieldConfigAction) {
SetFieldConfigAction setAction = (SetFieldConfigAction) action;
if (!existingAttributes.contains(setAction.getFieldName())) {
setAttribute(fdat.intermineObjectId, setAction.getFieldName(),
identifier);
existingAttributes.add(setAction.getFieldName());
if (setAction.getFieldName().equals("identifier")) {
fdat.flags |= FeatureData.IDENTIFIER_SET;
}
}
} else {
if (action instanceof CreateSynonymAction) {
if (fdat.existingSynonyms.contains(identifier)) {
continue;
} else {
createSynonym(fdat, synonymTypeName, identifier, false, dataSet,
EMPTY_ITEM_LIST, dataSource); // Stores Synonym
count++;
}
}
}
}
}
currentFeatureId = featureId;
}
LOG.info("created " + count + " synonyms from the synonym table");
res.close();
}
/**
* Process the identifier and return a "cleaned" version. Implement in sub-classes to fix
* data problem.
* @param the (SO) type of the feature that this identifier came from
* @param identifier the identifier
* @return a cleaned identifier
*/
protected String fixIdentifier(String type, String identifier) {
/*
* default implementation should be: return identifier
*/
// XXX FIXME TODO - for wormbase - move to WormBaseDBConverter
if (identifier.startsWith(type + ":")) {
return identifier.substring(type.length() + 1);
} else {
return identifier;
}
}
/**
* Set an attribute in an Item by creating an Attribute object and storing it.
* @param intermineObjectId the intermine object ID of the item to create this attribute for.
* @param attributeName the attribute name
* @param value the value to set
*/
protected void setAttribute(Integer intermineObjectId, String attributeName, String value)
throws ObjectStoreException {
Attribute att = new Attribute();
att.setName(attributeName);
att.setValue(value);
store(att, intermineObjectId);
}
private void processPubTable(Connection connection)
throws SQLException, ObjectStoreException {
ResultSet res = getPubResultSet(connection);
List<String> currentEvidenceIds = new ArrayList<String>();
Integer lastPubFeatureId = null;
int featureWarnings = 0;
int count = 0;
Map<String, String> pubs = new HashMap<String, String>();
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
if (!features.containsKey(featureId)) {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("feature " + featureId + " not found in features Map while "
+ "processing publications");
} else {
LOG.warn("further feature id warnings ignored in processPubTable()");
}
featureWarnings++;
}
continue;
}
String pubMedId = res.getString("pub_db_identifier");
if (lastPubFeatureId != null && !featureId.equals(lastPubFeatureId)) {
makeFeaturePublications(lastPubFeatureId, currentEvidenceIds);
makeFeatureEvidence(lastPubFeatureId, currentEvidenceIds); // Stores ReferenceList
currentEvidenceIds = new ArrayList<String>();
}
String publicationId;
if (pubs.containsKey(pubMedId)) {
publicationId = pubs.get(pubMedId);
} else {
Item publication = createItem("Publication");
publication.setAttribute("pubMedId", pubMedId);
store(publication); // Stores Publication
publicationId = publication.getIdentifier();
pubs.put(pubMedId, publicationId);
}
currentEvidenceIds.add(publicationId);
lastPubFeatureId = featureId;
count++;
}
if (lastPubFeatureId != null) {
makeFeaturePublications(lastPubFeatureId, currentEvidenceIds);
makeFeatureEvidence(lastPubFeatureId, currentEvidenceIds);
}
LOG.info("Created " + count + " publications");
res.close();
}
/**
* Set the publications collection of the feature with the given (chado) feature id.
*/
private void makeFeaturePublications(Integer featureId, List<String> argPublicationIds)
throws ObjectStoreException {
FeatureData fdat = features.get(featureId);
if (fdat == null) {
throw new RuntimeException("feature " + featureId + " not found in features Map");
}
if (!fdat.interMineType.equals("Gene")) {
// only Gene has a publications collection
return;
}
List<String> publicationIds = new ArrayList<String>(argPublicationIds);
ReferenceList referenceList = new ReferenceList();
referenceList.setName("publications");
referenceList.setRefIds(publicationIds);
store(referenceList, fdat.intermineObjectId);
}
/**
* Set the evidence collection of the feature with the given (chado) feature id.
*/
private void makeFeatureEvidence(Integer featureId, List<String> argEvidenceIds)
throws ObjectStoreException {
FeatureData fdat = features.get(featureId);
if (fdat == null) {
throw new RuntimeException("feature " + featureId + " not found in features Map");
}
List<String> evidenceIds = new ArrayList<String>(argEvidenceIds);
Item dataSet = getDataSetItem(dataSetTitle);
evidenceIds.add(0, dataSet.getIdentifier());
ReferenceList referenceList = new ReferenceList();
referenceList.setName("evidence");
referenceList.setRefIds(evidenceIds);
store(referenceList, fdat.intermineObjectId);
fdat.flags |= FeatureData.EVIDENCE_CREATED;
}
/**
* For those features in the features Map that don't yet have a evidence collection, create one
* containing the DataSet. We know if a feature doesn't have an evidence collection if it
* doesn't have it's EVIDENCE_CREATED flag set.
*/
private void addMissingDataEvidence() throws ObjectStoreException {
List<String> emptyList = Collections.emptyList();
for (Map.Entry<Integer, FeatureData> entry: features.entrySet()) {
Integer featureId = entry.getKey();
FeatureData featureData = entry.getValue();
if ((featureData.flags & FeatureData.EVIDENCE_CREATED) == 0) {
makeFeatureEvidence(featureId, emptyList);
}
}
}
/**
* Return the interesting rows from the features table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureResultSet(Connection connection)
throws SQLException {
String featureTypesString = getFeaturesString();
String query =
"SELECT feature_id, feature.name, uniquename, cvterm.name as type, seqlen, is_analysis,"
+ " residues"
+ " FROM feature, cvterm"
+ " WHERE feature.type_id = cvterm.cvterm_id"
+ " AND cvterm.name IN (" + featureTypesString + ")"
+ " AND organism_id = " + chadoOrganismId
+ " AND NOT feature.is_obsolete";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the chado organism id for the given genus/species. This is a protected method so
* that it can be overriden for testing
* @param connection the db connection
* @return the internal id (organism_id from the organism table)
* @throws SQLException if the is a database problem
*/
protected int getChadoOrganismId(Connection connection)
throws SQLException {
String query = "select organism_id from organism where genus = "
+ DatabaseUtil.objectToString(genus) + " and species = "
+ DatabaseUtil.objectToString(species);
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
if (res.next()) {
return res.getInt(1);
} else {
throw new RuntimeException("no rows returned when querying organism table for genus \""
+ genus + "\" and species \"" + species + "\"");
}
}
/**
* Return a SQL query string the gets all non-obsolete interesting features.
*/
private String getFeatureIdQuery() {
String featureTypesString = getFeaturesString();
return
" SELECT feature_id FROM feature, cvterm"
+ " WHERE cvterm.name IN (" + featureTypesString + ")"
+ " AND organism_id = " + chadoOrganismId
+ " AND NOT feature.is_obsolete"
+ " AND feature.type_id = cvterm.cvterm_id";
}
/**
* Return the interesting rows from the feature_relationship table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureRelationshipResultSet(Connection connection) throws SQLException {
String query =
"SELECT feature_relationship_id, subject_id, object_id, cvterm.name AS type_name"
+ " FROM feature_relationship, cvterm"
+ " WHERE cvterm.cvterm_id = type_id"
+ " AND subject_id IN (" + getFeatureIdQuery() + ")"
+ " AND object_id IN (" + getFeatureIdQuery() + ")"
+ " ORDER BY subject_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the featureloc table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeatureLocResultSet(Connection connection) throws SQLException {
String query =
"SELECT featureloc_id, feature_id, srcfeature_id, fmin, is_fmin_partial,"
+ " fmax, is_fmax_partial, strand"
+ " FROM featureloc"
+ " WHERE feature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND srcfeature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND locgroup = 0";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the dbxref table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getDbxrefResultSet(Connection connection) throws SQLException {
String query =
"SELECT feature.feature_id, accession, db.name AS db_name, is_current"
+ " FROM dbxref, feature_dbxref, feature, db"
+ " WHERE feature_dbxref.dbxref_id = dbxref.dbxref_id "
+ " AND feature_dbxref.feature_id = feature.feature_id "
+ " AND feature.feature_id IN"
+ " (" + getFeatureIdQuery() + ")"
+ " AND dbxref.db_id = db.db_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the featureprop table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getFeaturePropResultSet(Connection connection) throws SQLException {
String query =
"select feature_id, value, cvterm.name AS type_name FROM featureprop, cvterm"
+ " WHERE featureprop.type_id = cvterm.cvterm_id"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the synonym table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getSynonymResultSet(Connection connection) throws SQLException {
String query =
"SELECT DISTINCT feature_id, synonym.name AS synonym_name,"
+ " cvterm.name AS type_name, is_current"
+ " FROM feature_synonym, synonym, cvterm"
+ " WHERE feature_synonym.synonym_id = synonym.synonym_id"
+ " AND synonym.type_id = cvterm.cvterm_id"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return the interesting rows from the pub table.
* This is a protected method so that it can be overriden for testing
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getPubResultSet(Connection connection) throws SQLException {
String query =
"SELECT DISTINCT feature_pub.feature_id, dbxref.accession as pub_db_identifier"
+ " FROM feature_pub, dbxref, db, pub, pub_dbxref"
+ " WHERE feature_pub.pub_id = pub.pub_id"
+ " AND pub_dbxref.dbxref_id = dbxref.dbxref_id"
+ " AND dbxref.db_id = db.db_id"
+ " AND pub.pub_id = pub_dbxref.pub_id"
+ " AND db.name = 'pubmed'"
+ " AND feature_id IN (" + getFeatureIdQuery() + ")"
+ " ORDER BY feature_pub.feature_id";
LOG.info("executing: " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Call super.createSynonym(), store the Item then record in fdat that we've created it.
*/
private Item createSynonym(FeatureData fdat, String type, String identifier,
boolean isPrimary, Item dataSet, List<Item> otherEvidence,
Item dataSource)
throws ObjectStoreException {
if (fdat.existingSynonyms.contains(identifier)) {
throw new IllegalArgumentException("feature identifier " + identifier
+ " is already a synonym for: "
+ fdat.existingSynonyms);
}
List<Item> allEvidence = new ArrayList<Item>();
allEvidence.add(dataSet);
allEvidence.addAll(otherEvidence);
Item returnItem = createSynonym(fdat.itemIdentifier, type, identifier, isPrimary,
allEvidence, dataSource);
fdat.existingSynonyms.add(identifier);
return returnItem;
}
}
|
package org.intermine.bio.dataconversion;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.collections.keyvalue.MultiKey;
import org.apache.commons.collections.map.MultiKeyMap;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.intermine.bio.chado.ChadoCV;
import org.intermine.bio.chado.ChadoCVFactory;
import org.intermine.bio.chado.ChadoCVTerm;
import org.intermine.bio.chado.config.ConfigAction;
import org.intermine.bio.chado.config.CreateCollectionAction;
import org.intermine.bio.chado.config.CreateSynonymAction;
import org.intermine.bio.chado.config.SetFieldConfigAction;
import org.intermine.bio.util.OrganismData;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.util.IntPresentSet;
import org.intermine.util.StringUtil;
import org.intermine.util.XmlUtil;
import org.intermine.xml.full.Item;
import org.intermine.xml.full.Reference;
import org.intermine.xml.full.ReferenceList;
/**
* A converter for chado that handles FlyBase specific configuration.
* @author Kim Rutherford
*/
public class FlyBaseProcessor extends SequenceProcessor
{
/**
* The cv.name for the wild type class term. For chromosome_structure_variations, used to
* identify the "Feature type" from the "Class of aberration" section of a FlyBase aberation
* page.
*/
private static final String WT_CLASS_CVTERM = "wt_class";
private static final String FLYBASE_DB_NAME = "FlyBase";
/**
* The cv.name for the FlyBase miscellaneous CV.
*/
protected static final String FLYBASE_MISCELLANEOUS_CV = FLYBASE_DB_NAME + " miscellaneous CV";
/**
* The cv.name for the FlyBase miscellaneous CV.
*/
protected static final String FLYBASE_SO_CV_NAME = "SO";
private static final String FLYBASE_ANATOMY_TERM_PREFIX = "FBbt";
/**
* A ConfigAction that overrides processValue() to change FlyBase attribute tags
* (like "@FBcv0000289:hypomorph@") to text like: "hypomorph"
* @author Kim Rutherford
*/
private class AlleleClassSetFieldAction extends SetFieldConfigAction
{
/**
* Create a new AlleleClassSetFieldAction
* @param fieldName the fieldName to process with this object.
*/
AlleleClassSetFieldAction(String fieldName) {
super(fieldName);
}
/**
* {@inheritDoc}
*/
@Override
public String processValue(String value) {
Pattern p = Pattern.compile(FYBASE_PROP_ATTRIBUTE_PATTERN);
Matcher m = p.matcher(value);
StringBuffer sb = new StringBuffer();
while (m.find()) {
String field = m.group(1);
int colonPos = field.indexOf(':');
if (colonPos == -1) {
m.appendReplacement(sb, field);
} else {
String text = field.substring(colonPos + 1);
m.appendReplacement(sb, text);
}
}
m.appendTail(sb);
return sb.toString();
}
}
// a pattern the matches attribute stored in FlyBase properties, eg. "@FBcv0000289:hypomorph@"
private static final String FYBASE_PROP_ATTRIBUTE_PATTERN = "@([^@]+)@";
private static final Logger LOG = Logger.getLogger(FlyBaseProcessor.class);
// the configuration for this processor, set when getConfig() is called the first time
private final Map<Integer, MultiKeyMap> config = new HashMap();
// a set of feature_ids for those genes that have a location in the featureloc table, set by
// the constructor
private final IntPresentSet locatedGeneIds;
// a map from the uniquename of each allele to its item identifier
private Map<String, String> alleleIdMap = new HashMap();
// an object representing the FlyBase miscellaneous CV
private ChadoCV flyBaseMiscCv = null;
// an object representing the sequence ontology, as stored in the FlyBase chado database
private ChadoCV sequenceOntologyCV = null;
// a map from mutagen description to Mutagen Item identifier
private Map<String, String> mutagensMap = new HashMap();
// a map from featureId to seqlen
// private Map<Integer, Integer> cdnaLengths = null;
private final Map<Integer, Integer> chromosomeStructureVariationTypes;
private Map<String, String> interactionExperiments = new HashMap();
private static final String LOCATED_GENES_TEMP_TABLE_NAME = "intermine_located_genes_temp";
private static final String ALLELE_TEMP_TABLE_NAME = "intermine_flybase_allele_temp";
private static final String INSERTION_TEMP_TABLE_NAME = "intermine_flybase_insertion_temp";
// pattern to match the names of Exelixis insertions
// - matches "f07705" in "PBac{WH}f07705"
// - matches "f07705" in "PBac{WH}tam[f07705]"
private static final Pattern PB_INSERTION_PATTERN =
Pattern.compile(".*\\{.*\\}(?:.*\\[)?([def]\\d+)(?:\\])?");
// pattern to match GLEANR gene symbols from FlyBase chado
private static final Pattern GLEANR_PATTERN = Pattern.compile(".*GLEANR.*");
private static final Map<String, String> CHROMOSOME_STRUCTURE_VARIATION_SO_MAP = new HashMap();
private final Map<String, FeatureData> proteinFeatureDataMap = new HashMap();
static {
CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.put("chromosomal_deletion",
"ChromosomalDeletion");
CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.put("chromosomal_duplication",
"ChromosomalDuplication");
CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.put("chromosomal_inversion",
"ChromosomalInversion");
CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.put("chromosomal_translocation",
"ChromosomalTranslocation");
CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.put("transposition",
"Transposition");
}
private static final String CHROMOSOME_STRUCTURE_VARIATION_SO_NAME =
"chromosome_structure_variation";
/**
* Create a new FlyBaseChadoDBConverter.
* @param chadoDBConverter the converter that created this object
*/
public FlyBaseProcessor(ChadoDBConverter chadoDBConverter) {
super(chadoDBConverter);
Connection connection = getChadoDBConverter().getConnection();
try {
flyBaseMiscCv = getFlyBaseMiscCV(connection);
} catch (SQLException e) {
throw new RuntimeException("can't execute query for flybase cv terms", e);
}
try {
sequenceOntologyCV = getFlyBaseSequenceOntologyCV(connection);
} catch (SQLException e) {
throw new RuntimeException("can't execute query for so cv terms", e);
}
try {
createLocatedGenesTempTable(connection);
} catch (SQLException e) {
throw new RuntimeException("can't execute query for located genes", e);
}
locatedGeneIds = getLocatedGeneIds(connection);
chromosomeStructureVariationTypes = getChromosomeStructureVariationTypes(connection);
// try {
// cdnaLengths = makeCDNALengthMap(connection);
// } catch (SQLException e) {
// e.printStackTrace();
}
/**
* @param connection database connection
* @return map of feature_id to seqlen
*/
// protected Map<Integer, Integer> getLengths(Connection connection) {
// if (cdnaLengths == null) {
// try {
// cdnaLengths = makeCDNALengthMap(connection);
// } catch (SQLException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// return cdnaLengths;
/**
* Return a map from chromosome_structure_variation feature_ids to the cvterm_id of the
* associated cvtermprop. This is needed because the exact type of the
* chromosome_structure_variation objects is not used as the type_id of the feature, instead
* it's stored in the cvtermprop table.
*/
private Map<Integer, Integer> getChromosomeStructureVariationTypes(Connection connection) {
Map<Integer, Integer> retVal = new HashMap<Integer, Integer>();
ResultSet res;
try {
res = getChromosomeStructureVariationResultSet(connection);
} catch (SQLException e) {
throw new RuntimeException("can't execute query for chromosome_structure_variation "
+ "types", e);
}
try {
while (res.next()) {
int featureId = res.getInt("feature_id");
int cvtermId = res.getInt("cvterm_id");
retVal.put(new Integer(featureId), new Integer(cvtermId));
}
} catch (SQLException e) {
throw new RuntimeException("problem while reading chromosome_structure_variation "
+ "types", e);
}
return retVal;
}
/**
* Add the typeId to the List that is the value in the map for the featureId, creating the
* List if necessary.
*/
// private void addToMapList(Map<Integer, List<Integer>> map, int featureId, int typeId) {
// List<Integer> list;
// if (map.containsKey(featureId)) {
// list = map.get(featureId);
// } else {
// list = new ArrayList<Integer>();
// map.put(featureId, list);
// list.add(typeId);
/**
* Return the results of running a query for the chromosome_structure_variation feature types.
* @param connection the connection
* @return the results
* @throws SQLException if there is a database problem
*/
protected ResultSet getChromosomeStructureVariationResultSet(Connection connection)
throws SQLException {
String query =
" SELECT feature.feature_id, cvterm.cvterm_id"
+ " FROM feature, feature_cvterm, cvterm feature_type, cvterm, cv,"
+ " feature_cvtermprop, cvterm prop_term"
+ " WHERE feature.type_id = feature_type.cvterm_id"
+ " AND feature_type.name = '" + CHROMOSOME_STRUCTURE_VARIATION_SO_NAME + "' "
+ " AND feature_cvterm.feature_id = feature.feature_id"
+ " AND feature_cvterm.cvterm_id = cvterm.cvterm_id AND cvterm.cv_id = cv.cv_id"
+ " AND cv.name = 'SO' "
+ " AND feature_cvtermprop.feature_cvterm_id = feature_cvterm.feature_cvterm_id"
+ " AND feature_cvtermprop.type_id = prop_term.cvterm_id AND prop_term.name = '"
+ WT_CLASS_CVTERM + "'";
LOG.info("executing getChromosomeStructureVariationResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a set of ids of those genes that have a location in the featureloc table.
*/
private IntPresentSet getLocatedGeneIds(Connection connection) {
IntPresentSet retVal = new IntPresentSet();
ResultSet res;
try {
res = getLocatedGenesResultSet(connection);
} catch (SQLException e) {
throw new RuntimeException("can't execute query for located genes", e);
}
try {
while (res.next()) {
int featureId = res.getInt("feature_id");
retVal.set(featureId, true);
}
} catch (SQLException e) {
throw new RuntimeException("problem while reading located genes", e);
}
return retVal;
}
/**
* Create a temporary table containing the ids of the located genes. This is a protected
* method so that it can be overridden for testing
* @param connection the Connection
* @throws SQLException if there is a database problem
*/
protected void createLocatedGenesTempTable(Connection connection) throws SQLException {
String organismConstraint = getOrganismConstraint();
String orgConstraintForQuery = "";
if (!StringUtils.isEmpty(organismConstraint)) {
orgConstraintForQuery = " AND " + organismConstraint;
}
String query = "CREATE TEMPORARY TABLE " + LOCATED_GENES_TEMP_TABLE_NAME
+ " AS SELECT feature.feature_id FROM feature, cvterm"
+ " WHERE feature.type_id = cvterm.cvterm_id"
+ " AND cvterm.name = 'gene' "
+ " AND NOT feature.is_obsolete "
+ " AND feature.feature_id IN "
+ " (SELECT l.feature_id "
+ " FROM featureloc l, feature c "
+ " WHERE l.srcfeature_id = c.feature_id and NOT c.is_obsolete)"
+ orgConstraintForQuery;
Statement stmt = connection.createStatement();
LOG.info("executing createLocatedGenesTempTable(): " + query);
stmt.execute(query);
String idIndexQuery = "CREATE INDEX " + LOCATED_GENES_TEMP_TABLE_NAME + "_feature_index ON "
+ LOCATED_GENES_TEMP_TABLE_NAME + "(feature_id)";
LOG.info("executing: " + idIndexQuery);
stmt.execute(idIndexQuery);
String analyze = "ANALYZE " + LOCATED_GENES_TEMP_TABLE_NAME;
LOG.info("executing: " + analyze);
stmt.execute(analyze);
}
/**
* Create a temporary table of allele feature_ids. The table will only have allele of genes
* with locations.
* @param connection the connection
* @throws SQLException if there is a database problem
*/
protected void createAllelesTempTable(Connection connection) throws SQLException {
String organismConstraint = getOrganismConstraint();
String orgConstraintForQuery = "";
if (!StringUtils.isEmpty(organismConstraint)) {
orgConstraintForQuery = " AND " + organismConstraint;
}
String query =
" CREATE TEMPORARY TABLE " + ALLELE_TEMP_TABLE_NAME
+ " AS SELECT feature_id"
+ " FROM feature, cvterm feature_type "
+ " WHERE feature_type.name = 'gene'"
+ " AND type_id = feature_type.cvterm_id"
+ " AND uniquename LIKE 'FBal%'"
+ " AND NOT feature.is_obsolete"
+ " AND feature_id IN (SELECT feature_id FROM feature WHERE "
+ getLocatedGeneAllesSql() + ")"
+ orgConstraintForQuery;
Statement stmt = connection.createStatement();
LOG.info("executing createAllelesTempTable(): " + query);
stmt.execute(query);
String idIndexQuery = "CREATE INDEX " + ALLELE_TEMP_TABLE_NAME + "_feature_index ON "
+ ALLELE_TEMP_TABLE_NAME + "(feature_id)";
LOG.info("executing: " + idIndexQuery);
stmt.execute(idIndexQuery);
String analyze = "ANALYZE " + ALLELE_TEMP_TABLE_NAME;
LOG.info("executing: " + analyze);
stmt.execute(analyze);
}
/**
* Create a temporary table from pairs of insertions (eg. "FBti0027974" => "FBti0023081")
* containing the feature_ids of the pair (the object_id, subject_id in the relation table)
* and the fmin and fmax of the first insertion in the pair (ie. the progenitor / object from
* the feature_relationship table).
* The second in the pair is the "Modified descendant of" the first. The pairs are found using
* the 'modified_descendant_of' relation type. All insertions are from DrosDel.
* @param connection the connection
* @throws SQLException if there is a database problem
*/
protected void createInsertionTempTable(Connection connection) throws SQLException {
String query =
" CREATE TEMPORARY TABLE " + INSERTION_TEMP_TABLE_NAME
+ " AS SELECT obj.feature_id AS obj_id, sub.feature_id AS sub_id,"
+ " obj_loc.fmin, obj_loc.fmax,"
+ " obj_loc.srcfeature_id as chr_feature_id"
+ " FROM feature sub, cvterm sub_type, feature_relationship rel, cvterm rel_type, "
+ " feature obj, cvterm obj_type, featureloc obj_loc"
+ " WHERE sub.feature_id = rel.subject_id AND rel.object_id = obj.feature_id"
+ " AND sub_type.cvterm_id = sub.type_id AND obj_type.cvterm_id = obj.type_id"
+ " AND sub_type.name = 'transposable_element_insertion_site' "
+ " AND obj_type.name = 'transposable_element_insertion_site' "
+ " AND rel.type_id = rel_type.cvterm_id"
+ " AND rel_type.name = 'modified_descendant_of'"
+ " AND sub.feature_id in (select feature_id from feature_pub where pub_id ="
+ " (SELECT pub_id FROM pub"
+ " WHERE title = "
+ "'The DrosDel collection: a set of P-element insertions for "
+ "generating custom chromosomal aberrations in Drosophila melanogaster.')) "
+ " AND obj.feature_id = obj_loc.feature_id";
Statement stmt = connection.createStatement();
LOG.info("executing createInsertionTempTable(): " + query);
stmt.execute(query);
String idIndexQuery = "CREATE INDEX " + INSERTION_TEMP_TABLE_NAME + "index ON "
+ INSERTION_TEMP_TABLE_NAME + "(sub_id)";
LOG.info("executing: " + idIndexQuery);
stmt.execute(idIndexQuery);
String analyze = "ANALYZE " + INSERTION_TEMP_TABLE_NAME;
LOG.info("executing: " + analyze);
stmt.execute(analyze);
}
/**
* Get ChadoCV object representing the FlyBase misc cv.
* This is a protected method so that it can be overriden for testing
* @param connection the database Connection
* @return the cv
* @throws SQLException if there is a database problem
*/
protected ChadoCV getFlyBaseMiscCV(Connection connection) throws SQLException {
ChadoCVFactory cvFactory = new ChadoCVFactory(connection);
return cvFactory.getChadoCV(FLYBASE_MISCELLANEOUS_CV);
}
/**
* Get ChadoCV object representing SO from FlyBase.
* This is a protected method so that it can be overriden for testing
* @param connection the database Connection
* @return the cv
* @throws SQLException if there is a database problem
*/
protected ChadoCV getFlyBaseSequenceOntologyCV(Connection connection) throws SQLException {
ChadoCVFactory cvFactory = new ChadoCVFactory(connection);
return cvFactory.getChadoCV(FLYBASE_SO_CV_NAME);
}
/**
* {@inheritDoc}
*/
@Override
protected Integer store(Item feature, int taxonId) throws ObjectStoreException {
processItem(feature, new Integer(taxonId));
Integer itemId = super.store(feature, taxonId);
return itemId;
}
/**
* {@inheritDoc}
*/
@Override
protected Item makeLocation(int start, int end, int strand, FeatureData srcFeatureData,
FeatureData featureData, int taxonId)
throws ObjectStoreException {
Item location =
super.makeLocation(start, end, strand, srcFeatureData, featureData, taxonId);
processItem(location, new Integer(taxonId));
return location;
}
/**
* {@inheritDoc}
*/
@Override
protected Item createSynonym(FeatureData fdat, String type, String identifier,
boolean isPrimary, List<Item> otherEvidence)
throws ObjectStoreException {
Item synonym = super.createSynonym(fdat, type, identifier, isPrimary, otherEvidence);
/* synonym can be null if it's been created earlier. this would happen only if
* the synonym was created when another protein was created in favour of this one. */
if (synonym != null) {
OrganismData od = fdat.getOrganismData();
processItem(synonym, new Integer(od.getTaxonId()));
}
return synonym;
}
/**
* Return from chado the feature_ids of the genes with entries in the featureloc table.
* @param connection the db connection
* @return the SQL result set
* @throws SQLException if a database problem occurs
*/
protected ResultSet getLocatedGenesResultSet(Connection connection) throws SQLException {
String query = getLocatedGenesSql();
LOG.info("executing getLocatedGenesResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a query that gets the feature_ids of genes that have locations.
*/
private String getLocatedGenesSql() {
return "SELECT feature_id FROM " + LOCATED_GENES_TEMP_TABLE_NAME;
}
/**
* {@inheritDoc}
*/
@Override
protected Map<MultiKey, List<ConfigAction>> getConfig(int taxonId) {
MultiKeyMap map = config.get(new Integer(taxonId));
if (map == null) {
map = new MultiKeyMap();
config.put(new Integer(taxonId), map);
// synomym configuration example: for features of class "Gene", if the type name of
// the synonym is "fullname" and "is_current" is true, set the "name" attribute of
// the new Gene to be this synonym and then make a Synonym object
map.put(new MultiKey("synonym", "Gene", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "Gene", "fullname", Boolean.FALSE),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "Gene", "symbol", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "Gene", "symbol", Boolean.FALSE),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "Gene", "symbol", Boolean.FALSE),
Arrays.asList(new SetFieldConfigAction("GLEANRsymbol", GLEANR_PATTERN),
CREATE_SYNONYM_ACTION));
// dbxref table configuration example: for features of class "Gene", where the
// db.name is "FlyBase Annotation IDs" and "is_current" is true, set the
// "secondaryIdentifier" attribute of the new Gene to be this dbxref and then make a
// Synonym object
map.put(new MultiKey("dbxref", "Gene", FLYBASE_DB_NAME + " Annotation IDs",
Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("dbxref", "Gene", FLYBASE_DB_NAME + " Annotation IDs",
Boolean.FALSE),
Arrays.asList(CREATE_SYNONYM_ACTION));
// null for the "is_current" means either TRUE or FALSE is OK.
map.put(new MultiKey("dbxref", "Gene", FLYBASE_DB_NAME, null),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("dbxref", "MRNA", FLYBASE_DB_NAME + " Annotation IDs",
Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("dbxref", "TransposableElementInsertionSite", "drosdel", null),
Arrays.asList(new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "ChromosomeStructureVariation", "fullname",
Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "ChromosomalDeletion", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "ChromosomalDuplication", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "ChromosomalInversion", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "ChromosomalTranslocation", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "Transposition", "fullname", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "MRNA", "symbol", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("synonym", "MRNA", "symbol", Boolean.FALSE),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("dbxref", "MRNA", FLYBASE_DB_NAME + " Annotation IDs", null),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("dbxref", "MRNA", FLYBASE_DB_NAME, null),
Arrays.asList(CREATE_SYNONYM_ACTION));
// set the Allele.gene when there is an alleleof relationship between Allele and Gene
map.put(new MultiKey("relationship", "Allele", "alleleof", "Gene"),
Arrays.asList(new SetFieldConfigAction("gene")));
// Set the protein reference in the MRNA - "rev_relationship" means that the
// relationship table actually has Protein, producedby, MRNA. We configure like
// this so we can set a reference in MRNA rather than protein
map.put(new MultiKey("rev_relationship", "MRNA", "producedby", "Protein"),
Arrays.asList(new SetFieldConfigAction("protein")));
map.put(new MultiKey("relationship", "CDNAClone", "derived_assoc_cdna_clone", "Gene"),
Arrays.asList(new SetFieldConfigAction("gene")));
map.put(new MultiKey("relationship", "Gene", "producedby", "Protein"),
Arrays.asList(new SetFieldConfigAction("proteins")));
// featureprop configuration example: for features of class "Gene", if the type name
// of the prop is "cyto_range", set the "cytoLocation" attribute of the
// new Gene to be this property
map.put(new MultiKey("prop", "Gene", "cyto_range"),
Arrays.asList(new SetFieldConfigAction("cytoLocation")));
map.put(new MultiKey("prop", "Gene", "symbol"),
Arrays.asList(CREATE_SYNONYM_ACTION));
// the feature type for gene, eg. "rRNA_gene", "protein_coding_gene"
map.put(new MultiKey("prop", "Gene", "promoted_gene_type"),
Arrays.asList(new SetFieldConfigAction("featureType")));
map.put(new MultiKey("prop", "TransposableElementInsertionSite",
"curated_cytological_location"),
Arrays.asList(new SetFieldConfigAction("cytoLocation")));
ConfigAction alleleClassConfigAction = new AlleleClassSetFieldAction("alleleClass");
map.put(new MultiKey("prop", "Allele", "promoted_allele_class"),
Arrays.asList(alleleClassConfigAction));
// library config example: for features of class "CDNAClone", if the type name
// of the library is "stage", set the "stage" attribute of the
// new CDNAClone to be this property
map.put(new MultiKey("library", "CDNAClone", "stage"),
Arrays.asList(new SetFieldConfigAction("stage")));
// anatomy term config example: for features of class "CDNAClone" if there is an
// anatomy term, set a reference in CDNAClone.tissueSource
// map.put(new MultiKey("anatomyterm", "CDNAClone", null),
// Arrays.asList(new SetFieldConfigAction("tissueSource")));
// feature_cvterm example for Transposition: we create a featureTerms collection in the
// Transposition objects containing SequenceOntologyTerm objects. For the current
// feature we create one SequenceOntologyTerm object for each associated "SO" cvterm.
// We set the "name" field of the SequenceOntologyTerm to be the name from the cvterm
// table.
List<String> chromosomeStructureVariationClassNames =
Arrays.asList("ChromosomeStructureVariation", "ChromosomalDeletion",
"ChromosomalDuplication", "ChromosomalInversion",
"ChromosomalTranslocation", "Transposition");
for (String className: chromosomeStructureVariationClassNames) {
map.put(new MultiKey("cvterm", className, "SO"),
Arrays.asList(new CreateCollectionAction("SequenceOntologyTerm",
"featureTerms",
"name", true)));
}
// feature configuration example: for features of class "Exon", from "FlyBase",
// set the Gene.symbol to be the "name" field from the chado feature
map.put(new MultiKey("feature", "Exon", FLYBASE_DB_NAME, "name"),
Arrays.asList(new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
// DO_NOTHING_ACTION means skip the name from this feature
map.put(new MultiKey("feature", "Chromosome", FLYBASE_DB_NAME, "name"),
Arrays.asList(DO_NOTHING_ACTION));
map.put(new MultiKey("feature", "ChromosomeBand", FLYBASE_DB_NAME, "name"),
Arrays.asList(DO_NOTHING_ACTION));
map.put(new MultiKey("feature", "TransposableElementInsertionSite", FLYBASE_DB_NAME,
"name"),
Arrays.asList(new SetFieldConfigAction("symbol", PB_INSERTION_PATTERN),
new CreateSynonymAction(PB_INSERTION_PATTERN),
new SetFieldConfigAction("secondaryIdentifier"),
new CreateSynonymAction()));
map.put(new MultiKey("feature", "Gene", FLYBASE_DB_NAME, "uniquename"),
Arrays.asList(new SetFieldConfigAction("primaryIdentifier")));
map.put(new MultiKey("feature", "Gene", FLYBASE_DB_NAME, "name"),
Arrays.asList(DO_NOTHING_ACTION));
map.put(new MultiKey("feature", "ChromosomeStructureVariation", FLYBASE_DB_NAME,
"name"),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier"),
CREATE_SYNONYM_ACTION));
// just make a Synonym because the secondaryIdentifier and the symbol are set from the
// dbxref and synonym tables
map.put(new MultiKey("feature", "MRNA", FLYBASE_DB_NAME, "name"),
Arrays.asList(new CreateSynonymAction()));
map.put(new MultiKey("feature", "PointMutation", FLYBASE_DB_NAME, "uniquename"),
Arrays.asList(new SetFieldConfigAction("name"),
new SetFieldConfigAction("primaryIdentifier"),
CREATE_SYNONYM_ACTION));
// name isn't set in flybase:
map.put(new MultiKey("feature", "PointMutation", FLYBASE_DB_NAME, "name"),
Arrays.asList(DO_NOTHING_ACTION));
map.put(new MultiKey("dbxref", "Protein", FLYBASE_DB_NAME + " Annotation IDs",
Boolean.TRUE),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("feature", "Protein", FLYBASE_DB_NAME, "name"),
Arrays.asList(CREATE_SYNONYM_ACTION));
map.put(new MultiKey("feature", "Protein", FLYBASE_DB_NAME, "uniquename"),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier")));
map.put(new MultiKey("dbxref", "Protein", "GB_protein", Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("genbankIdentifier"),
CREATE_SYNONYM_ACTION));
// transposable_element and natural_transposable_element
map.put(new MultiKey("feature", "TransposableElement", FLYBASE_DB_NAME, "name"),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier"),
new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("feature", "NaturalTransposableElement", FLYBASE_DB_NAME, "name"),
Arrays.asList(new SetFieldConfigAction("secondaryIdentifier"),
new SetFieldConfigAction("symbol"),
CREATE_SYNONYM_ACTION));
map.put(new MultiKey("relationship", "TransposableElement",
"producedby", "NaturalTransposableElement"),
Arrays.asList(new SetFieldConfigAction("insertedElement")));
map.put(new MultiKey("synonym", "NaturalTransposableElement", "fullname",
Boolean.TRUE),
Arrays.asList(new SetFieldConfigAction("name"),
CREATE_SYNONYM_ACTION));
}
return map;
}
/**
* {@inheritDoc}
*/
@Override
protected String getExtraFeatureConstraint() {
return "NOT ((cvterm.name = 'golden_path_region'"
+ " OR cvterm.name = 'ultra_scaffold')"
+ " AND (uniquename LIKE 'Unknown_%' OR uniquename LIKE '%_groupMISC'))"
+ " AND " + getLocatedGeneAllesSql();
}
/**
* Query that returns only allele of located genes.
*/
private String getLocatedGeneAllesSql() {
return "(NOT (uniquename LIKE 'FBal%') OR feature_id IN"
+ " (SELECT subject_id"
+ " FROM feature_relationship, cvterm"
+ " WHERE type_id = cvterm.cvterm_id"
+ " AND cvterm.name = 'alleleof'"
+ " AND object_id IN (" + getLocatedGenesSql() + ")))";
}
/**
* {@inheritDoc}
*/
@Override
protected Item makeFeature(Integer featureId, String chadoFeatureType, String interMineType,
String name, String uniqueName,
int seqlen, int taxonId) {
String realInterMineType = interMineType;
if (chadoFeatureType.equals("protein") && !uniqueName.startsWith("FBpp")) {
return null;
}
if (chadoFeatureType.equals("gene")) {
if (uniqueName.startsWith("FBal")) {
// fix type of allele "gene" features
realInterMineType = "Allele";
} else {
if (!locatedGeneIds.contains(featureId.intValue())) {
// ignore genes with no location
return null;
}
}
}
// ignore unknown chromosome from dpse
if (uniqueName.startsWith("Unknown_")) {
return null;
}
if (taxonId != 7227 && chadoFeatureType.equals("chromosome_arm")) {
// nothing is located on a chromosome_arm
return null;
}
if (chadoFeatureType.equals("chromosome")
&& !uniqueName.equals("dmel_mitochondrion_genome")) {
// ignore Chromosomes from flybase - features are located on ChromosomeArms except
// for mitochondrial features
return null;
}
if (chadoFeatureType.equals("chromosome_arm")
|| chadoFeatureType.equals("ultra_scaffold")) {
if (uniqueName.equals("dmel_mitochondrion_genome")) {
// ignore - all features are on the Chromosome object with uniqueName
// "dmel_mitochondrion_genome"
return null;
}
realInterMineType = "Chromosome";
}
if (chadoFeatureType.equals("golden_path_region")) {
// For organisms other than D. melanogaster sometimes we can convert a
// golden_path_region to an actual chromosome: if name is 2L, 4, etc
if (taxonId == 7237) {
// chromosomes are stored as golden_path_region
realInterMineType = "Chromosome";
} else {
if (taxonId != 7227 && !uniqueName.contains("_")) {
realInterMineType = "Chromosome";
} else {
// golden_path_fragment is the actual SO term
realInterMineType = "GoldenPathFragment";
}
}
}
if (chadoFeatureType.equals(CHROMOSOME_STRUCTURE_VARIATION_SO_NAME)) {
Integer cvtermId = chromosomeStructureVariationTypes.get(featureId);
if (cvtermId != null) {
ChadoCVTerm term = sequenceOntologyCV.getByChadoId(cvtermId);
for (String soName: CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.keySet()) {
if (termOrChildrenNameMatches(term, soName)) {
realInterMineType = CHROMOSOME_STRUCTURE_VARIATION_SO_MAP.get(soName);
break;
}
}
}
}
if (chadoFeatureType.equals("transposable_element_insertion_site")
&& name == null && !uniqueName.startsWith("FBti")) {
// ignore this feature as it doesn't have an FBti identifier and there will be
// another feature for the same transposable_element_insertion_site that does have
// the FBti identifier
return null;
}
if (chadoFeatureType.equals("mRNA") && seqlen == 0) {
// flybase has > 7000 mRNA features that have no sequence and don't appear in their
// webapp so we filter them out
return null;
}
if (chadoFeatureType.equals("protein") && seqlen == 0) {
// flybase has ~ 2100 protein features that don't appear in their webapp so we
// filter them out
return null;
}
Item feature = getChadoDBConverter().createItem(realInterMineType);
if (realInterMineType.equals("Allele")) {
alleleIdMap.put(uniqueName, feature.getIdentifier());
}
return feature;
}
/**
* Return true iff the given term or one of its children is named termName.
*/
private boolean termOrChildrenNameMatches(ChadoCVTerm term, String termName) {
if (term.getName().equals(termName)) {
return true;
}
Set<ChadoCVTerm> children = term.getAllChildren();
for (ChadoCVTerm childTerm: children) {
if (childTerm.getName().equals(termName)) {
return true;
}
}
return false;
}
private static final List<String> FEATURES = Arrays.asList(
"gene", "mRNA", "transcript", "protein",
"intron", "exon", "regulatory_region", "enhancer", "EST", "cDNA_clone",
"miRNA", "snRNA", "ncRNA", "rRNA", "ncRNA", "snoRNA", "tRNA",
"chromosome_band", "transposable_element_insertion_site",
CHROMOSOME_STRUCTURE_VARIATION_SO_NAME,
"point_mutation", "natural_transposable_element",
"transposable_element"
);
/**
* Get a list of the chado/so types of the LocatedSequenceFeatures we wish to load. The list
* will not include chromosome-like features.
* @return the list of features
*/
@Override
protected List<String> getFeatures() {
return FEATURES;
}
/**
* For objects that have primaryIdentifier == null, set the primaryIdentifier to be the
* uniquename column from chado.
* {@inheritDoc}
*/
@Override
protected void extraProcessing(Connection connection, Map<Integer, FeatureData> features)
throws ObjectStoreException, SQLException {
createAllelesTempTable(connection);
createInsertionTempTable(connection);
for (FeatureData featureData: features.values()) {
if (!featureData.getFlag(FeatureData.IDENTIFIER_SET)) {
setAttribute(featureData.getIntermineObjectId(), "primaryIdentifier",
featureData.getChadoFeatureUniqueName());
}
}
processAlleleProps(connection, features);
Map<Integer, List<String>> mutagenMap = makeMutagenMap(connection);
for (Integer alleleFeatureId: mutagenMap.keySet()) {
FeatureData alleleDat = features.get(alleleFeatureId);
List<String> mutagenRefIds = new ArrayList<String>();
for (String mutagenDescription: mutagenMap.get(alleleFeatureId)) {
String mutagenIdentifier = getMutagen(mutagenDescription);
mutagenRefIds.add(mutagenIdentifier);
}
ReferenceList referenceList = new ReferenceList();
referenceList.setName("mutagens");
referenceList.setRefIds(mutagenRefIds);
getChadoDBConverter().store(referenceList, alleleDat.getIntermineObjectId());
}
createIndelReferences(connection);
createDeletionLocations(connection);
copyInsertionLocations(connection);
createInteractions(connection);
}
/**
* Create Interaction objects.
*/
private void createInteractions(Connection connection)
throws SQLException, ObjectStoreException {
Set<String> seenInteractions = new HashSet<String>();
ResultSet res = getInteractionResultSet(connection);
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
Integer otherFeatureId = new Integer(res.getInt("other_feature_id"));
String pubTitle = res.getString("pub_title");
Integer pubmedId = new Integer(res.getInt("pubmed_id"));
FeatureData featureData = getFeatureMap().get(featureId);
FeatureData otherFeatureData = getFeatureMap().get(otherFeatureId);
Item interaction = getChadoDBConverter().createItem("Interaction");
String shortName = "FlyBase" + ":" + featureData.getChadoFeatureUniqueName() + "_"
+ otherFeatureData.getChadoFeatureUniqueName();
int i = 0;
String newShortName = shortName;
while (seenInteractions.contains(newShortName)) {
i++;
newShortName = shortName + "-" + i;
}
seenInteractions.add(newShortName);
interaction.setAttribute("shortName", newShortName);
interaction.setAttribute("name", newShortName);
interaction.setReference("gene", featureData.getItemIdentifier());
interaction.addToCollection("interactingGenes", otherFeatureData.getItemIdentifier());
interaction.setAttribute("interactionType", "genetic");
String publicationItemId = makePublication(pubmedId);
String experimentItemIdentifier =
makeInteractionExperiment(pubTitle, publicationItemId);
interaction.setReference("experiment", experimentItemIdentifier);
OrganismData od = otherFeatureData.getOrganismData();
Item dataSetItem = getChadoDBConverter().getDataSetItem(od.getTaxonId());
interaction.addToCollection("dataSets", dataSetItem);
getChadoDBConverter().store(interaction);
}
}
/**
* Return the item identifier of the Interaction Item for the given pubmed id, creating the
* Item if necessary.
* @param experimentTitle the new title
* @param publicationItemIdentifier the item identifier of the publication for this experiment
* @return the interaction item identifier
* @throws ObjectStoreException if the item can't be stored
*/
protected String makeInteractionExperiment(String experimentTitle,
String publicationItemIdentifier)
throws ObjectStoreException {
if (interactionExperiments.containsKey(experimentTitle)) {
return interactionExperiments.get(experimentTitle);
}
Item experiment = getChadoDBConverter().createItem("InteractionExperiment");
experiment.setAttribute("name", experimentTitle);
experiment.setReference("publication", publicationItemIdentifier);
getChadoDBConverter().store(experiment);
String experimentId = experiment.getIdentifier();
interactionExperiments.put(experimentTitle, experimentId);
return experimentId;
}
private static final Pattern DELETION_LOC_PATTERN =
Pattern.compile("^([^:]+):(\\d+)(?:-\\d+)?..(?:\\d+-)?(\\d+) \\(([^;\\)]+);?([^\\)]*)\\)$");
/**
* Create Location objects for deletions (chromosome_structure_variation) as they don't have
* locations in the featureloc table.
* @throws ObjectStoreException
*/
private void createDeletionLocations(Connection connection)
throws SQLException, ObjectStoreException {
ResultSet res = getDeletionLocationResultSet(connection);
while (res.next()) {
Integer delId = new Integer(res.getInt("deletion_feature_id"));
String locationText = res.getString("location_text");
Integer organismId = new Integer(res.getInt("deletion_organism_id"));
FeatureData delFeatureData = getFeatureMap().get(delId);
Matcher m = DELETION_LOC_PATTERN.matcher(locationText);
if (m.matches()) {
String chromosomeName = m.group(1);
int start = Integer.parseInt(m.group(2));
int end = Integer.parseInt(m.group(3));
if (start > end) {
int tmp = start;
start = end;
end = tmp;
}
if (delFeatureData == null) {
LOG.info("can't find deletion " + delId + " in feature map");
continue;
}
int taxonId = delFeatureData.getOrganismData().getTaxonId();
Integer chrFeatureId = getChromosomeFeatureMap(organismId).get(chromosomeName);
makeAndStoreLocation(chrFeatureId, delFeatureData, start, end, 1, taxonId);
} else {
throw new RuntimeException("can't parse deletion location: " + locationText);
}
}
}
private void makeAndStoreLocation(Integer chrFeatureId, FeatureData subjectFeatureData,
int start, int end, int strand, int taxonId)
throws ObjectStoreException {
FeatureData chrFeatureData = getFeatureMap().get(chrFeatureId);
Item location =
getChadoDBConverter().makeLocation(chrFeatureData.getItemIdentifier(),
subjectFeatureData.getItemIdentifier(),
start, end, strand, taxonId);
Item dataSetItem = getChadoDBConverter().getDataSetItem(taxonId);
location.addToCollection("dataSets", dataSetItem);
Reference chrLocReference = new Reference();
chrLocReference.setName("chromosomeLocation");
chrLocReference.setRefId(location.getIdentifier());
getChadoDBConverter().store(chrLocReference, subjectFeatureData.getIntermineObjectId());
getChadoDBConverter().store(location);
}
/**
* Create the ChromosomalDeletion.element1 and element2 references (to
* TransposableElementInsertionSite objects)
*/
private void createIndelReferences(Connection connection)
throws ObjectStoreException, SQLException {
ResultSet res = getIndelResultSet(connection);
int featureWarnings = 0;
while (res.next()) {
Integer delId = new Integer(res.getInt("deletion_feature_id"));
Integer insId = new Integer(res.getInt("insertion_feature_id"));
String breakType = res.getString("breakpoint_type");
Reference reference = new Reference();
if (breakType.equals("bk1")) {
reference.setName("element1");
} else {
reference.setName("element2");
}
FeatureData insFeatureData = getFeatureMap().get(insId);
if (insFeatureData == null) {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("insertion " + insId
+ " was not found in the feature table");
} else {
LOG.warn("further warnings ignored");
}
featureWarnings++;
}
continue;
}
reference.setRefId(insFeatureData.getItemIdentifier());
FeatureData delFeatureData = getFeatureMap().get(delId);
if (delFeatureData == null) {
if (featureWarnings <= 20) {
if (featureWarnings < 20) {
LOG.warn("deletion " + delId
+ " was not found in the feature table");
} else {
LOG.warn("further warnings ignored");
}
featureWarnings++;
}
continue;
}
getChadoDBConverter().store(reference, delFeatureData.getIntermineObjectId());
}
}
private String getMutagen(String description) throws ObjectStoreException {
if (mutagensMap.containsKey(description)) {
return mutagensMap.get(description);
}
Item mutagen = getChadoDBConverter().createItem("Mutagen");
mutagen.setAttribute("description", description);
mutagensMap.put(description, mutagen.getIdentifier());
store(mutagen);
return mutagen.getIdentifier();
}
/**
* @param connection
*/
private void copyInsertionLocations(Connection connection)
throws ObjectStoreException, SQLException {
ResultSet res = getInsertionLocationsResultSet(connection);
while (res.next()) {
int subId = res.getInt("sub_id");
int chrId = res.getInt("chr_feature_id");
int fmin = res.getInt("fmin");
int fmax = res.getInt("fmax");
int start = fmin + 1;
int end = fmax;
FeatureData subFeatureData = getFeatureMap().get(new Integer(subId));
if (subFeatureData != null) {
// this is a hack - we should make sure that we only query for features that are in
// the feature map, ie. those for the current organism
int taxonId = subFeatureData.getOrganismData().getTaxonId();
makeAndStoreLocation(new Integer(chrId), subFeatureData, start, end, 1, taxonId);
}
}
}
private void store(Item item) throws ObjectStoreException {
getChadoDBConverter().store(item);
}
// map from anatomy identifier (eg. "FBbt0001234") to Item identifier
private Map<String, String> anatomyTermMap = new HashMap<String, String>();
// map from development term identifier (eg. "FBdv0001234") to Item identifier
private Map<String, String> developmentTermMap = new HashMap<String, String>();
// map from FlyBase cv identifier (eg. "FBcv0001234") to Item identifier
private Map<String, String> cvTermMap = new HashMap<String, String>();
private void processAlleleProps(Connection connection,
Map<Integer, FeatureData> features)
throws SQLException, ObjectStoreException {
Map<Integer, List<String>> annotationPubMap = makeAnnotationPubMap(connection);
ResultSet res = getAllelePropResultSet(connection);
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
String value = res.getString("value");
String propType = res.getString("type_name");
Integer featurePropId = new Integer(res.getInt("featureprop_id"));
FeatureData alleleFeatureData = features.get(featureId);
OrganismData od = alleleFeatureData.getOrganismData();
Item dataSetItem = getChadoDBConverter().getDataSetItem(od.getTaxonId());
String alleleItemIdentifier = alleleFeatureData.getItemIdentifier();
Item phenotypeAnnotation = null;
if (propType.equals("derived_pheno_manifest")) {
phenotypeAnnotation =
makePhenotypeAnnotation(alleleItemIdentifier, value,
dataSetItem, annotationPubMap.get(featurePropId));
phenotypeAnnotation.setAttribute("annotationType", "manifest in");
} else {
if (propType.equals("derived_pheno_class")) {
phenotypeAnnotation =
makePhenotypeAnnotation(alleleItemIdentifier, value,
dataSetItem, annotationPubMap.get(featurePropId));
phenotypeAnnotation.setAttribute("annotationType", "phenotype class");
}
}
if (phenotypeAnnotation != null) {
getChadoDBConverter().store(phenotypeAnnotation);
}
}
}
/**
* Return a Map from allele feature_id to mutagen. The mutagen is found be looking at cvterms
* that are associated with each feature and saving those terms that have "origin of mutation"
* as a parent term.
*/
private Map<Integer, List<String>> makeMutagenMap(Connection connection)
throws SQLException {
Map<Integer, List<String>> retMap = new HashMap<Integer, List<String>>();
ResultSet res = getAlleleCVTermsResultSet(connection);
RESULTS:
while (res.next()) {
Integer featureId = new Integer(res.getInt("feature_id"));
Integer cvtermId = new Integer(res.getInt("cvterm_id"));
ChadoCVTerm cvterm = flyBaseMiscCv.getByChadoId(cvtermId);
Set<ChadoCVTerm> parents = cvterm.getAllParents();
for (ChadoCVTerm parent: parents) {
if (parent.getName().equals("origin of mutation")) {
String fixedName = XmlUtil.fixEntityNames(cvterm.getName());
List<String> mutagens;
if (retMap.containsKey(featureId)) {
mutagens = retMap.get(featureId);
} else {
mutagens = new ArrayList<String>();
retMap.put(featureId, mutagens);
}
mutagens.add(fixedName);
continue RESULTS;
}
}
}
return retMap;
}
/**
* Get result set of feature_id, cvterm_id pairs for the alleles in flybase chado.
* @param connection the Connectio
* @return the cvterms
* @throws SQLException if there is a database problem
*/
protected ResultSet getAlleleCVTermsResultSet(Connection connection) throws SQLException {
String query = "SELECT DISTINCT feature.feature_id, cvterm.cvterm_id"
+ " FROM feature, feature_cvterm, cvterm"
+ " WHERE feature.feature_id = feature_cvterm.feature_id"
+ " AND feature.feature_id IN (" + getAlleleFeaturesSql() + ")"
+ " AND feature_cvterm.cvterm_id = cvterm.cvterm_id";
LOG.info("executing getAlleleCVTermsResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a map from featureprop_id for alleles to publication item identifier
*/
private Map<Integer, List<String>> makeAnnotationPubMap(Connection connection)
throws SQLException, ObjectStoreException {
Map<Integer, List<String>> retMap = new HashMap<Integer, List<String>>();
ResultSet res = getAllelePropPubResultSet(connection);
while (res.next()) {
Integer featurePropId = new Integer(res.getInt("featureprop_id"));
String pubDbId = res.getString("pub_db_identifier");
Integer n = new Integer(Integer.parseInt(pubDbId));
String pubicationItemIdentifier = makePublication(n);
if (!retMap.containsKey(featurePropId)) {
retMap.put(featurePropId, new ArrayList<String>());
}
retMap.get(featurePropId).add(pubicationItemIdentifier);
}
return retMap;
}
/**
* Return a map from feature_id to seqlen
* @throws SQLException if somethign goes wrong
*/
// private Map<Integer, Integer> makeCDNALengthMap(Connection connection)
// throws SQLException {
// Map<Integer, Integer> retMap = new HashMap();
// ResultSet res = getCDNALengthResultSet(connection);
// while (res.next()) {
// Integer featureId = new Integer(res.getInt("feature_id"));
// Integer seqlen = new Integer(res.getInt("seqlen"));
// retMap.put(featureId, seqlen);
// return retMap;
private Item makePhenotypeAnnotation(String alleleItemIdentifier, String value,
Item dataSetItem, List<String> publicationsItemIdList)
throws ObjectStoreException {
Item phenotypeAnnotation = getChadoDBConverter().createItem("PhenotypeAnnotation");
phenotypeAnnotation.addToCollection("dataSets", dataSetItem);
Pattern p = Pattern.compile(FYBASE_PROP_ATTRIBUTE_PATTERN);
Matcher m = p.matcher(value);
StringBuffer sb = new StringBuffer();
List<String> dbAnatomyTermIdentifiers = new ArrayList<String>();
List<String> dbDevelopmentTermIdentifiers = new ArrayList<String>();
List<String> dbCVTermIdentifiers = new ArrayList<String>();
while (m.find()) {
String field = m.group(1);
int colonPos = field.indexOf(':');
if (colonPos == -1) {
m.appendReplacement(sb, field);
} else {
String identifier = field.substring(0, colonPos);
if (identifier.startsWith(FLYBASE_ANATOMY_TERM_PREFIX)) {
dbAnatomyTermIdentifiers.add(addCVTermColon(identifier));
} else {
if (identifier.startsWith("FBdv")) {
dbDevelopmentTermIdentifiers.add(addCVTermColon(identifier));
} else {
if (identifier.startsWith("FBcv")) {
dbCVTermIdentifiers.add(addCVTermColon(identifier));
}
}
}
String text = field.substring(colonPos + 1);
m.appendReplacement(sb, text);
}
}
m.appendTail(sb);
/*
* ignore with for now because the with text is wrong in chado - see ticket #889
List<String> withAlleleIdentifiers = findWithAllele(value);
if (withAlleleIdentifiers.size() > 0) {
phenotypeAnnotation.setCollection("with", withAlleleIdentifiers);
}
*/
String valueNoRefs = sb.toString();
String valueNoUps = valueNoRefs.replaceAll("<up>", "[").replaceAll("</up>", "]");
phenotypeAnnotation.setAttribute("description", valueNoUps);
phenotypeAnnotation.setReference("allele", alleleItemIdentifier);
phenotypeAnnotation.setReference("subject", alleleItemIdentifier);
if (publicationsItemIdList != null && publicationsItemIdList.size() > 0) {
ReferenceList pubReferenceList =
new ReferenceList("publications", publicationsItemIdList);
phenotypeAnnotation.addCollection(pubReferenceList);
}
if (dbAnatomyTermIdentifiers.size() == 1) {
String anatomyIdentifier = dbAnatomyTermIdentifiers.get(0);
String anatomyTermItemId = makeAnatomyTerm(anatomyIdentifier);
phenotypeAnnotation.setReference("anatomyTerm", anatomyTermItemId);
phenotypeAnnotation.setReference("property", anatomyTermItemId);
} else {
if (dbAnatomyTermIdentifiers.size() > 1) {
throw new RuntimeException("more than one anatomy term: "
+ dbAnatomyTermIdentifiers);
}
}
if (dbDevelopmentTermIdentifiers.size() == 1) {
String developmentTermIdentifier = dbDevelopmentTermIdentifiers.get(0);
String developmentTermItemId = makeDevelopmentTerm(developmentTermIdentifier);
phenotypeAnnotation.setReference("developmentTerm", developmentTermItemId);
phenotypeAnnotation.setReference("property", developmentTermItemId);
} else {
if (dbAnatomyTermIdentifiers.size() > 1) {
throw new RuntimeException("more than one anatomy term: "
+ dbAnatomyTermIdentifiers);
}
}
if (dbCVTermIdentifiers.size() > 0) {
for (String cvTermIdentifier: dbCVTermIdentifiers) {
String cvTermItemId = makeCVTerm(cvTermIdentifier);
phenotypeAnnotation.addToCollection("cvTerms", cvTermItemId);
}
}
return phenotypeAnnotation;
}
private static final Pattern FLYBASE_TERM_IDENTIFIER_PATTERN =
Pattern.compile("^FB[^\\d][^\\d]\\d+");
/**
* For a FlyBase cvterm identifier like "FBbt00000001", add a colon in the middle and return:
* "FBbt:00000001"
* @param identifier the identifier from chado
* @return the public identifier
*/
protected static String addCVTermColon(String identifier) {
Matcher m = FLYBASE_TERM_IDENTIFIER_PATTERN.matcher(identifier);
if (m.matches()) {
return identifier.substring(0, 4) + ":" + identifier.substring(4);
}
return identifier;
}
/**
* Return the item identifiers of the alleles metioned in the with clauses of the argument.
* Currently unused because flybase with clauses are wrong - see ticket #889
*/
@SuppressWarnings("unused")
private List<String> findWithAllele(String value) {
Pattern p = Pattern.compile("with @(FBal\\d+):");
Matcher m = p.matcher(value);
List<String> foundIdentifiers = new ArrayList<String>();
while (m.find()) {
String identifier = m.group(1);
if (identifier.startsWith("FBal")) {
foundIdentifiers.add(identifier);
} else {
throw new RuntimeException("identifier in a with must start: \"FBal\" not: "
+ identifier);
}
}
List<String> alleleItemIdentifiers = new ArrayList<String>();
for (String foundIdentifier: foundIdentifiers) {
if (alleleIdMap.containsKey(foundIdentifier)) {
alleleItemIdentifiers.add(alleleIdMap.get(foundIdentifier));
} else {
// this allele wasn't stored so probably it didn't have the right organism - some
// GAL4 alleles have cerevisiae as organism, eg. FBal0060667:Scer\GAL4[sd-SG29.1]
// referenced by FBal0038994 Rac1[N17.Scer\UAS]
}
}
return alleleItemIdentifiers;
}
/**
* phenotype annotation creates and stores anatomy terms. so does librarycvterm
* @param identifier identifier for anatomy term
* @return refId for anatomy term object
* @throws ObjectStoreException if term can't be stored
*/
protected String makeAnatomyTerm(String identifier) throws ObjectStoreException {
String newIdentifier = identifier;
if (!newIdentifier.startsWith(FLYBASE_ANATOMY_TERM_PREFIX)) {
newIdentifier = FLYBASE_ANATOMY_TERM_PREFIX + identifier;
newIdentifier = addCVTermColon(newIdentifier);
}
if (anatomyTermMap.containsKey(newIdentifier)) {
return anatomyTermMap.get(newIdentifier);
}
Item anatomyTerm = getChadoDBConverter().createItem("AnatomyTerm");
anatomyTerm.setAttribute("identifier", newIdentifier);
getChadoDBConverter().store(anatomyTerm);
anatomyTermMap.put(identifier, anatomyTerm.getIdentifier());
return anatomyTerm.getIdentifier();
}
private String makeDevelopmentTerm(String identifier) throws ObjectStoreException {
if (developmentTermMap.containsKey(identifier)) {
return developmentTermMap.get(identifier);
}
Item developmentTerm = getChadoDBConverter().createItem("DevelopmentTerm");
developmentTerm.setAttribute("identifier", identifier);
getChadoDBConverter().store(developmentTerm);
developmentTermMap.put(identifier, developmentTerm.getIdentifier());
return developmentTerm.getIdentifier();
}
private String makeCVTerm(String identifier) throws ObjectStoreException {
if (cvTermMap.containsKey(identifier)) {
return cvTermMap.get(identifier);
}
Item cvTerm = getChadoDBConverter().createItem("CVTerm");
cvTerm.setAttribute("identifier", identifier);
getChadoDBConverter().store(cvTerm);
cvTermMap.put(identifier, cvTerm.getIdentifier());
return cvTerm.getIdentifier();
}
/**
* Return a result set containing the interaction genes pairs, the title of the publication
* that reported the interaction and its pubmed id. The method is protected
* so that is can be overridden for testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getInteractionResultSet(Connection connection) throws SQLException {
String query =
" SELECT feature.feature_id as feature_id, "
+ " other_feature.feature_id as other_feature_id, "
+ " pub.title as pub_title, dbx.accession as pubmed_id "
+ " FROM feature, cvterm cvt, feature other_feature, "
+ " feature_relationship_pub frpb, pub, "
+ " feature_relationship fr, pub_dbxref pdbx, dbxref dbx, db "
+ " WHERE feature.feature_id = subject_id "
+ " AND object_id = other_feature.feature_id "
+ " AND fr.type_id = cvt.cvterm_id AND cvt.name = 'interacts_genetically' "
+ " AND fr.feature_relationship_id = frpb.feature_relationship_id "
+ " AND frpb.pub_id = pub.pub_id AND db.name='pubmed' "
+ " AND pdbx.is_current=true AND pub.pub_id=pdbx.pub_id "
+ " AND pdbx.dbxref_id = dbx.dbxref_id AND dbx.db_id=db.db_id "
+ " AND NOT feature.is_obsolete AND NOT other_feature.is_obsolete "
+ " AND feature.feature_id IN (" + getLocatedGenesSql() + ")"
+ " AND other_feature.feature_id IN (" + getLocatedGenesSql() + ")";
LOG.info("executing getInteractionResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing the alleles and their featureprops. The method is protected
* so that is can be overridden for testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getAllelePropResultSet(Connection connection) throws SQLException {
String query =
"SELECT feature_id, value, cvterm.name AS type_name, featureprop_id"
+ " FROM featureprop, cvterm"
+ " WHERE featureprop.type_id = cvterm.cvterm_id"
+ " AND feature_id IN (" + getAlleleFeaturesSql() + ")"
+ " ORDER BY feature_id";
LOG.info("executing getAllelePropResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing pairs of chromosome_structure_variation (deletions) and
* transposable_element_insertion_site (insertions). The method is protected
* so that is can be overridden for testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getIndelResultSet(Connection connection) throws SQLException {
String query =
"SELECT del.feature_id as deletion_feature_id,"
+ " ins.feature_id as insertion_feature_id,"
+ " substring(break.uniquename FROM ':([^:]+)$') AS breakpoint_type"
+ " FROM feature del, cvterm del_type, feature_relationship del_rel,"
+ " cvterm del_rel_type,"
+ " feature break, cvterm break_type,"
+ " feature_relationship ins_rel, cvterm ins_rel_type,"
+ " feature ins, cvterm ins_type"
+ " WHERE del_rel.object_id = del.feature_id"
+ " AND del_rel.subject_id = break.feature_id"
+ " AND ins_rel.subject_id = break.feature_id"
+ " AND ins_rel.object_id = ins.feature_id"
+ " AND del.type_id = del_type.cvterm_id"
+ " AND ins.type_id = ins_type.cvterm_id"
+ " AND del_type.name = 'chromosome_structure_variation'"
+ " AND ins_type.name = 'transposable_element_insertion_site'"
+ " AND del_rel.type_id = del_rel_type.cvterm_id"
+ " AND del_rel_type.name = 'break_of'"
+ " AND ins_rel.type_id = ins_rel_type.cvterm_id"
+ " AND ins_rel_type.name = 'progenitor'"
+ " AND break.type_id = break_type.cvterm_id"
+ " AND break_type.name = 'breakpoint'"
// ignore the progenitors so we only set element1 and element2 to be the "descendants"
+ " AND ins.feature_id NOT IN (SELECT obj_id FROM " + INSERTION_TEMP_TABLE_NAME + ")";
LOG.info("executing getIndelResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing pairs of insertion feature_ids (eg. for "FBti0027974" =>
* "FBti0023081") and the fmin and fmax of the first insertion in the pair (ie. the progenitor).
* The second in the pair is the "Modified descendant of" the first. The pairs are found using
* the 'modified_descendant_of' relation type. All insertions are from DrosDel.
* The method is protected so that is can be overridden for testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getInsertionLocationsResultSet(Connection connection) throws SQLException {
String query = "SELECT * from " + INSERTION_TEMP_TABLE_NAME;
LOG.info("executing getInsertionLocationsResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing location for deletions (chromosome_structure_variation)
* objects. The locations are in the featureprop able in the form:
* 2R:12716549..12984803 (53D11;53F8)
* The method is protected so that is can be overridden for testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getDeletionLocationResultSet(Connection connection) throws SQLException {
String query =
"SELECT f.feature_id as deletion_feature_id, value as location_text, "
+ " feature.organism_id as deletion_organism_id"
+ "FROM feature f, feature b, feature_relationship fr, cvterm cvt1, cvterm cvt2, "
+ " featureloc fl "
+ "WHERE f.feature_id = fr.object_id AND fr.type_id = cvt1.cvterm_id "
+ "AND cvt1.name = 'break_of' "
+ "AND fr.subject_id = b.feature_id AND b.type_id = cvt2.cvterm_id "
+ "AND cvt2.name = 'breakpoint' "
+ "AND b.feature_id = fl.feature_id and f.name ~ '^Df.+' and f.uniquename like 'FBab%' "
+ "AND f.is_obsolete = false ";
LOG.info("executing getDeletionLocationResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing the featureprop_id and the publication identifier of the
* featureprops for al alleles. The method is protected so that is can be overridden for
* testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getAllelePropPubResultSet(Connection connection) throws SQLException {
String query =
"SELECT DISTINCT featureprop_pub.featureprop_id, dbxref.accession as pub_db_identifier"
+ " FROM featureprop, featureprop_pub, dbxref, db, pub, pub_dbxref"
+ " WHERE featureprop_pub.pub_id = pub.pub_id"
+ " AND featureprop.featureprop_id = featureprop_pub.featureprop_id"
+ " AND pub.pub_id = pub_dbxref.pub_id"
+ " AND pub_dbxref.dbxref_id = dbxref.dbxref_id"
+ " AND dbxref.db_id = db.db_id"
+ " AND db.name = 'pubmed'"
+ " AND feature_id IN (" + getAlleleFeaturesSql() + ")"
+ " ORDER BY featureprop_id";
LOG.info("executing getAllelePropPubResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Return a result set containing the feature_id and its seqlen
* The method is protected so that is can be overridden for
* testing.
* @param connection the Connection
* @throws SQLException if there is a database problem
* @return the ResultSet
*/
protected ResultSet getCDNALengthResultSet(Connection connection) throws SQLException {
String query =
"SELECT cl.feature_id, fls.seqlen "
+ "FROM feature cl, feature fls, feature_relationship fr, cvterm fls_type "
+ "WHERE fls_type.name IN ('cDNA','BAC_cloned_genomic_insert') "
+ " AND cl.feature_id=fr.object_id "
+ " AND fr.subject_id=fls.feature_id "
+ " AND fls.type_id=fls_type.cvterm_id ";
LOG.info("executing getCDNALengthResultSet(): " + query);
Statement stmt = connection.createStatement();
ResultSet res = stmt.executeQuery(query);
return res;
}
/**
* Convert ISO entities from FlyBase to HTML entities.
* {@inheritDoc}
*/
@Override
protected String fixIdentifier(@SuppressWarnings("unused") FeatureData fdat, String identifier)
{
if (StringUtils.isBlank(identifier)) {
return identifier;
}
return XmlUtil.fixEntityNames(identifier);
}
/**
* {@inheritDoc}
*/
@Override
protected FeatureData makeFeatureData(int featureId, String type, String uniqueName,
String name, String md5checksum, int seqlen,
int organismId) throws ObjectStoreException {
if (type.equals("protein")) {
// TODO what data are we trying to avoid with this?
if (!uniqueName.startsWith("FBpp")) {
return null;
}
if (proteinFeatureDataMap.containsKey(md5checksum)) {
FeatureData protein = proteinFeatureDataMap.get(md5checksum);
// make a synonym for the protein we're about to discard
if (protein != null) {
if (!StringUtil.isEmpty(uniqueName)
&& !protein.getExistingSynonyms().contains(uniqueName)) {
Item synonym = createSynonym(protein, "identifier", uniqueName, true,
null);
store(synonym);
}
if (!StringUtil.isEmpty(name)
&& !protein.getExistingSynonyms().contains(name)) {
Item synonym = createSynonym(protein, "name", name, false, null);
store(synonym);
}
}
return protein;
}
FeatureData fdat = super.makeFeatureData(featureId, type, uniqueName, name, md5checksum,
seqlen, organismId);
proteinFeatureDataMap.put(md5checksum, fdat);
return fdat;
}
return super.makeFeatureData(featureId, type, uniqueName, name, md5checksum, seqlen,
organismId);
}
/**
* Return a query that gets the feature_ids of the allele in the feature table.
*/
private String getAlleleFeaturesSql() {
return "SELECT feature_id FROM " + ALLELE_TEMP_TABLE_NAME;
}
/**
* Method to add dataSets and DataSources to items before storing
*/
private void processItem(Item item, Integer taxonId) {
if (item.getClassName().equals("DataSource")
|| item.getClassName().equals("DataSet")
|| item.getClassName().equals("Organism")
|| item.getClassName().equals("Sequence")) {
return;
}
if (taxonId == null) {
ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader classLoader = getClass().getClassLoader();
Thread.currentThread().setContextClassLoader(classLoader);
try {
throw new RuntimeException("getCurrentTaxonId() returned null while processing "
+ item);
} finally {
Thread.currentThread().setContextClassLoader(currentClassLoader);
}
}
ChadoDBConverter converter = getChadoDBConverter();
DataSetStoreHook.setDataSets(getModel(), item,
converter.getDataSetItem(taxonId.intValue()).getIdentifier(),
converter.getDataSourceItem().getIdentifier());
}
}
|
package org.intermine.bio.dataconversion;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.types.FileSet;
import java.util.Set;
import java.util.HashSet;
import java.util.StringTokenizer;
import java.util.Iterator;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.BufferedReader;
import java.io.BufferedWriter;
/**
* Read a set of Uniprot XML files and write out only those elements for an organism in
* the given set of names.
*
* @author Richard Smith
*/
public class UniprotFilterTask extends Task
{
protected FileSet fileSet;
protected File tgtDir;
protected Set organisms = new HashSet();
/**
* Set the source fileset.
* @param fileSet the fileset
*/
public void addFileSet(FileSet fileSet) {
this.fileSet = fileSet;
}
/**
* Set the target directory
* @param tgtDir the target directory
*/
public void setTgtDir(File tgtDir) {
this.tgtDir = tgtDir;
}
/**
* A space separated list of NCBI taxon ids to include in the filter output.
* @param organismStr a comma separated list of organism names
*/
public void setOrganisms(String organismStr) {
StringTokenizer st = new StringTokenizer(organismStr, " ");
while (st.hasMoreTokens()) {
this.organisms.add(st.nextToken().trim());
}
}
/**
* @see Task#execute
*/
public void execute() throws BuildException {
if (fileSet == null) {
throw new BuildException("fileSet must be specified");
} else {
System.err.print("Fileset: " + fileSet.toString() + "\n");
}
if (tgtDir == null) {
throw new BuildException("tgtDir must be specified");
} else {
System.err.print("TGT Dir: " + tgtDir.toString() + "\n");
}
if (organisms.isEmpty()) {
System.err.print("No Organisms to Filter on - doing everything!" + "\n");
} else {
String spacer = " ";
StringBuffer buff = new StringBuffer("Organism Filter List: ");
for (Iterator it = organisms.iterator(); it.hasNext(); ) {
buff.append(it.next() + spacer);
}
System.err.print(buff.toString() + "\n");
}
try {
UniprotXmlFilter filter = new UniprotXmlFilter(organisms);
DirectoryScanner ds = fileSet.getDirectoryScanner(getProject());
String[] files = ds.getIncludedFiles();
for (int i = 0; i < files.length; i++) {
File toRead = new File(ds.getBasedir(), files[i]);
System.err .println("Processing file " + toRead.toString());
String outName = toRead.getName().substring(0, toRead.getName().indexOf('.'))
+ "_filtered.xml";
File out = new File(tgtDir, outName);
BufferedWriter writer = new BufferedWriter(new FileWriter(out));
filter.filter(new BufferedReader(new FileReader(toRead)), writer);
writer.flush();
writer.close();
}
} catch (Exception e) {
throw new BuildException (e);
}
}
}
|
package de.afbb.bibo.servlet.server;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* collection of utility methods
*
* @author fi13.pendrulat
*/
public final class Utils {
private Utils() {
}
public static int nthOccurrence(final String str, final String toFind, int n) {
int pos = str.indexOf(toFind, 0);
while (n-- > 0 && pos != -1) {
pos = str.indexOf(toFind, pos + 1);
}
return pos;
}
public static String getRequestPart(final HttpServletRequest request, final int part) {
final String[] requestParts = request.getRequestURI().split("/");
return "/" + (requestParts.length > 1 ? requestParts[part + 1] : "");
}
public static void returnErrorMessage(final Class<?> servlet, final HttpServletRequest request,
final HttpServletResponse response) throws IOException {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
response.getWriter().println("the servlet: " + servlet.getSimpleName() + " could not serve your request: "
+ request.getRequestURI());
}
}
|
package se.kth.infosys.smx.ladok3;
import java.util.ArrayList;
import org.apache.camel.Exchange;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.blueprint.CamelBlueprintTestSupport;
import org.junit.Test;
public class Ladok3EnrichTest extends CamelBlueprintTestSupport {
@Override
protected String[] loadConfigAdminConfigurationFile() {
// which .cfg file to use, and the name of the persistence-id
return new String[]{"src/test/resources/test.properties", "se.kth.infosys.smx.ladok3"};
}
@Override
protected String getBlueprintDescriptor() {
return "/OSGI-INF/blueprint/producer-enrich-blueprint.xml";
}
@Test
public void testladok3() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
assertMockEndpointsSatisfied();
Exchange exchange = mock.getExchanges().get(0);
ArrayList<Object> aggregation = exchange.getIn().getBody(ArrayList.class);
assertFalse(aggregation.isEmpty());
}
}
|
package org.lantern;
import static org.lantern.Tr.*;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.Map;
import javax.swing.ImageIcon;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.StringUtils;
import org.lantern.event.Events;
import org.lantern.event.GoogleTalkStateEvent;
import org.lantern.event.ProxyConnectionEvent;
import org.lantern.event.QuitEvent;
import org.lantern.event.UpdateEvent;
import org.lantern.state.Mode;
import org.lantern.state.Model;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Class for handling all system tray interactions.
*/
@Singleton
public class SystemTrayImpl implements org.lantern.SystemTray {
private static final Logger log = LoggerFactory
.getLogger(SystemTrayImpl.class);
private SystemTray tray;
private TrayIcon trayIcon;
private PopupMenu menu;
private MenuItem connectionStatusItem;
private MenuItem updateItem;
private Map<String, Object> updateData;
private boolean active = false;
private final static String LABEL_DISCONNECTED = tr("TRAY_NOT_CONNECTED");
private final static String LABEL_CONNECTING = tr("TRAY_CONNECTING");
private final static String LABEL_CONNECTED = tr("TRAY_CONNECTED");
private final static String ICON_DISCONNECTED = "16off.png";
private final static String ICON_CONNECTING = "16off.png";
private final static String ICON_CONNECTED = "16on.png";
private final BrowserService browserService;
private final Model model;
private String connectionStatusText;
private Image trayImage;
/**
* Creates a new system tray handler class.
*
*/
@Inject
public SystemTrayImpl(final BrowserService browserService,
final Model model) {
this.browserService = browserService;
this.model = model;
Events.register(this);
}
@Override
public void start() {
log.debug("Starting system tray");
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
createTray();
}
});
}
@Override
public void stop() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
tray.remove(trayIcon);
}
});
}
@Override
public boolean isSupported() {
return SystemTray.isSupported();
}
@Override
public void createTray() {
tray = SystemTray.getSystemTray();
if (tray == null) {
log.warn("The system tray is not available");
} else {
log.info("Creating system tray...");
// Another thread could have set the tray item image before the
// tray item was created.
if (trayImage == null) {
// Image was not yet set
final String imageName;
if (SystemUtils.IS_OS_MAC_OSX) {
imageName = ICON_DISCONNECTED;
} else {
imageName = ICON_CONNECTED;
}
trayImage = newImage(imageName);
}
TrayIcon trayIcon = new TrayIcon(trayImage);
trayIcon.setToolTip(
tr("LANTERN") + " " + LanternClientConstants.VERSION);
this.menu = new PopupMenu();
this.connectionStatusItem = new MenuItem();
// Other threads can set the label before we've constructed the
// menu item, so check for it.
if (StringUtils.isNotBlank(connectionStatusText)) {
connectionStatusItem.setLabel(connectionStatusText);
} else {
connectionStatusItem.setLabel(LABEL_DISCONNECTED);
}
connectionStatusItem.setEnabled(false);
menu.add(connectionStatusItem);
final MenuItem dashboardItem = new MenuItem(tr("TRAY_SHOW_LANTERN"));
dashboardItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
log.debug("Reopening browser?");
browserService.reopenBrowser();
}
});
menu.add(dashboardItem);
menu.addSeparator();
final MenuItem quitItem = new MenuItem(tr("TRAY_QUIT"));
quitItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
log.debug("Got exit call");
// This tells things like the Proxifier to stop proxying.
Events.eventBus().post(new QuitEvent());
System.exit(0);
}
});
menu.add(quitItem);
trayIcon.setPopupMenu(menu);
if (SystemUtils.IS_OS_WINDOWS || SystemUtils.IS_OS_LINUX) {
trayIcon.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
log.debug("opening dashboard");
browserService.reopenBrowser();
}
});
log.debug("Added selection");
}
try {
tray.add(trayIcon);
} catch (AWTException e) {
System.out.println("TrayIcon could not be added.");
}
this.active = true;
}
log.debug("Finished creating tray...");
}
private void setImage(final Image image) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
if (trayIcon == null) {
trayImage = image;
} else {
trayIcon.setImage(image);
}
}
});
}
private void setStatusLabel(final String status) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// XXX i18n
if (connectionStatusItem == null) {
connectionStatusText = status;
} else {
connectionStatusItem.setLabel(status);
}
}
});
}
protected static Image newImage(String name) {
final File iconFile;
final File iconCandidate1 = new File("install/common/" + name);
if (iconCandidate1.isFile()) {
log.debug("Using install dir icon");
iconFile = iconCandidate1;
} else {
iconFile = new File(name);
}
if (!iconFile.isFile()) {
log.error("Still no icon file at: {}", iconFile.getAbsolutePath());
return null;
}
return (new ImageIcon(iconFile.getAbsolutePath())).getImage()
.getScaledInstance(16, 16, Image.SCALE_SMOOTH);
}
@Override
public void addUpdate(final Map<String, Object> data) {
log.info("Adding update data: {}", data);
if (this.updateData != null && this.updateData.equals(data)) {
log.info("Ignoring duplicate update data");
return;
}
this.updateData = data;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
if (updateItem == null) {
String label = tr("TRAY_UPDATE") + " " +
data.get(LanternConstants.UPDATE_KEY);
updateItem = new MenuItem(label);
updateItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
log.info("Got update call");
NativeUtils.openUri((String) updateData.get(
"installerUrl"));
}
});
}
}
});
}
@Subscribe
public void onUpdate(final UpdateEvent update) {
addUpdate(update.getData());
}
@Subscribe
public void onConnectivityStateChanged(final ProxyConnectionEvent csce) {
final ConnectivityStatus cs = csce.getConnectivityStatus();
log.debug("Received connectivity state changed: {}", cs);
if (!this.model.getSettings().isUiEnabled()) {
log.info("Ignoring event with UI disabled");
return;
}
onConnectivityStatus(cs);
}
@Subscribe
public void onGoogleTalkState(final GoogleTalkStateEvent event) {
if (model.getSettings().getMode() == Mode.get) {
log.debug("Not linking Google Talk state to connectivity " +
"state in get mode");
return;
}
final GoogleTalkState state = event.getState();
final ConnectivityStatus cs;
switch (state) {
case connected:
cs = ConnectivityStatus.CONNECTED;
break;
case notConnected:
cs = ConnectivityStatus.DISCONNECTED;
break;
case LOGIN_FAILED:
cs = ConnectivityStatus.DISCONNECTED;
break;
case connecting:
cs = ConnectivityStatus.CONNECTING;
break;
default:
log.error("Should never get here...");
cs = ConnectivityStatus.DISCONNECTED;
break;
}
onConnectivityStatus(cs);
}
private void onConnectivityStatus(final ConnectivityStatus cs) {
switch (cs) {
case DISCONNECTED:
changeIcon(ICON_DISCONNECTED);
changeStatusLabel(LABEL_DISCONNECTED);
break;
case CONNECTED:
changeIcon(ICON_CONNECTED);
changeStatusLabel(LABEL_CONNECTED);
break;
case CONNECTING:
changeIcon(ICON_CONNECTING);
changeStatusLabel(LABEL_CONNECTING);
break;
default:
break;
}
}
@Override
public boolean isActive() {
return this.active;
}
private void changeIcon(final String fileName) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
if (SystemUtils.IS_OS_MAC_OSX) {
log.info("Customizing image on OSX...");
final Image image = newImage(fileName);
setImage(image);
}
}
});
}
private void changeStatusLabel(final String status) {
setStatusLabel(status);
}
}
|
package org.nanopub;
import java.util.ArrayList;
import java.util.List;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.impl.ContextStatementImpl;
import org.openrdf.model.impl.StatementImpl;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.RDF;
/**
* This class allows for the programmatic creation of nanopubs in a step-wise fashion.
*
* @author Tobias Kuhn
*/
public class NanopubCreator {
private boolean finalized = false;
private URI nanopubUri;
private URI headUri, assertionUri, provenanceUri, pubinfoUri;
private List<Statement> assertion, provenance, pubinfo;
private List<Statement> statements;
private Nanopub nanopub;
private static final String headSuffix = "Head";
private static final String assertionSuffix = "Ass";
private static final String provenanceSuffix = "Prov";
private static final String pubinfoSuffix = "Info";
public NanopubCreator() {
init();
}
public NanopubCreator(URI nanopubUri) {
this();
setNanopubUri(nanopubUri);
}
public NanopubCreator(String nanopubUri) {
this();
setNanopubUri(nanopubUri);
}
private void init() {
assertion = new ArrayList<Statement>();
provenance = new ArrayList<Statement>();
pubinfo = new ArrayList<Statement>();
}
public void setNanopubUri(URI nanopubUri) {
if (finalized) throw new RuntimeException("Already finalized");
this.nanopubUri = nanopubUri;
if (headUri == null) headUri = new URIImpl(nanopubUri + headSuffix);
if (assertionUri == null) assertionUri = new URIImpl(nanopubUri + assertionSuffix);
if (provenanceUri == null) provenanceUri = new URIImpl(nanopubUri + provenanceSuffix);
if (pubinfoUri == null) pubinfoUri = new URIImpl(nanopubUri + pubinfoSuffix);
}
public void setNanopubUri(String nanopubUri) {
setNanopubUri(new URIImpl(nanopubUri));
}
public void setAssertionUri(URI assertionUri) {
if (finalized) throw new RuntimeException("Already finalized");
this.assertionUri = assertionUri;
}
public void setAssertionUri(String assertionUri) {
setAssertionUri(new URIImpl(assertionUri));
}
public void setProvenanceUri(URI provenanceUri) {
if (finalized) throw new RuntimeException("Already finalized");
this.provenanceUri = provenanceUri;
}
public void setProvenanceUri(String provenanceUri) {
setProvenanceUri(new URIImpl(provenanceUri));
}
public void setPubinfoUri(URI pubinfoUri) {
if (finalized) throw new RuntimeException("Already finalized");
this.pubinfoUri = pubinfoUri;
}
public void setPubinfoUri(String pubinfoUri) {
setPubinfoUri(new URIImpl(pubinfoUri));
}
public void addAssertionStatements(Statement... statements) {
if (finalized) throw new RuntimeException("Already finalized");
for (Statement st : statements) {
assertion.add(st);
}
}
public void addAssertionStatement(Resource subj, URI pred, Value obj) {
addAssertionStatements(new StatementImpl(subj, pred, obj));
}
public void addProvenanceStatements(Statement... statements) {
if (finalized) throw new RuntimeException("Already finalized");
for (Statement st : statements) {
provenance.add(st);
}
}
public void addProvenanceStatement(Resource subj, URI pred, Value obj) {
addProvenanceStatements(new StatementImpl(subj, pred, obj));
}
public void addPubinfoStatements(Statement... statements) {
if (finalized) throw new RuntimeException("Already finalized");
for (Statement st : statements) {
pubinfo.add(st);
}
}
public void addPubinfoStatement(Resource subj, URI pred, Value obj) {
addPubinfoStatements(new StatementImpl(subj, pred, obj));
}
public Nanopub finalizeNanopub() throws MalformedNanopubException {
if (finalized) {
return nanopub;
}
if (nanopubUri == null) throw new MalformedNanopubException("No nanopub URI specified");
collectStatements();
nanopub = new NanopubImpl(statements);
finalized = true;
return nanopub;
}
private void collectStatements() {
statements = new ArrayList<Statement>();
addStatement(nanopubUri, RDF.TYPE, Nanopub.NANOPUB_TYPE_URI, headUri);
addStatement(nanopubUri, Nanopub.HAS_ASSERTION_URI, assertionUri, headUri);
addStatement(nanopubUri, Nanopub.HAS_PROVENANCE_URI, provenanceUri, headUri);
addStatement(nanopubUri, Nanopub.HAS_PUBINFO_URI, pubinfoUri, headUri);
for (Statement st : assertion) {
addStatement(st.getSubject(), st.getPredicate(), st.getObject(), assertionUri);
}
for (Statement st : provenance) {
addStatement(st.getSubject(), st.getPredicate(), st.getObject(), provenanceUri);
}
for (Statement st : pubinfo) {
addStatement(st.getSubject(), st.getPredicate(), st.getObject(), pubinfoUri);
}
}
private void addStatement(Resource subj, URI pred, Value obj, Resource context) {
statements.add(new ContextStatementImpl(subj, pred, obj, context));
}
}
|
// File path shortening code adapted from:
package org.scijava.util;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Useful methods for working with file paths.
*
* @author Johannes Schindelin
* @author Curtis Rueden
* @author Grant Harris
*/
public final class FileUtils {
public static final int DEFAULT_SHORTENER_THRESHOLD = 4;
public static final String SHORTENER_BACKSLASH_REGEX = "\\\\";
public static final String SHORTENER_SLASH_REGEX = "/";
public static final String SHORTENER_BACKSLASH = "\\";
public static final String SHORTENER_SLASH = "/";
public static final String SHORTENER_ELLIPSE = "...";
/** A regular expression to match filenames containing version information. */
private static final Pattern VERSION_PATTERN = buildVersionPattern();
private FileUtils() {
// prevent instantiation of utility class
}
/**
* Gets the absolute path to the given file, with the directory separator
* standardized to forward slash, like most platforms use.
*
* @param file The file whose path will be obtained and standardized.
* @return The file's standardized absolute path.
*/
public static String getPath(final File file) {
final String path = file.getAbsolutePath();
final String slash = System.getProperty("file.separator");
return getPath(path, slash);
}
/**
* Gets a standardized path based on the given one, with the directory
* separator standardized from the specific separator to forward slash, like
* most platforms use.
*
* @param path The path to standardize.
* @param separator The directory separator to be standardized.
* @return The standardized path.
*/
public static String getPath(final String path, final String separator) {
// NB: Standardize directory separator (i.e., avoid Windows nonsense!).
return path.replaceAll(Pattern.quote(separator), "/");
}
/**
* Extracts the file extension from a file.
*
* @param file the file object
* @return the file extension (excluding the dot), or the empty string when
* the file name does not contain dots
*/
public static String getExtension(final File file) {
final String name = file.getName();
final int dot = name.lastIndexOf('.');
if (dot < 0) return "";
return name.substring(dot + 1);
}
/**
* Extracts the file extension from a file path.
*
* @param path the path to the file (relative or absolute)
* @return the file extension (excluding the dot), or the empty string when
* the file name does not contain dots
*/
public static String getExtension(final String path) {
return getExtension(new File(path));
}
/** Gets the {@link Date} of the file's last modification. */
public static Date getModifiedTime(final File file) {
final long modifiedTime = file.lastModified();
final Calendar c = Calendar.getInstance();
c.setTimeInMillis(modifiedTime);
return c.getTime();
}
/**
* Reads the contents of the given file into a new byte array.
*
* @see DigestUtils#string(byte[]) To convert a byte array to a string.
* @throws IOException If the file cannot be read.
*/
public static byte[] readFile(final File file) throws IOException {
final long length = file.length();
if (length > Integer.MAX_VALUE) {
throw new IllegalArgumentException("File too large");
}
final DataInputStream dis = new DataInputStream(new FileInputStream(file));
final byte[] bytes = new byte[(int) length];
dis.readFully(bytes);
dis.close();
return bytes;
}
/**
* Writes the given byte array to the specified file.
*
* @see DigestUtils#bytes(String) To convert a string to a byte array.
* @throws IOException If the file cannot be written.
*/
public static void writeFile(final File file, final byte[] bytes)
throws IOException
{
final FileOutputStream out = new FileOutputStream(file);
try {
out.write(bytes);
}
finally {
out.close();
}
}
public static String stripFilenameVersion(final String filename) {
final Matcher matcher = VERSION_PATTERN.matcher(filename);
if (!matcher.matches()) return filename;
return matcher.group(1) + matcher.group(5);
}
/**
* Lists all versions of a given (possibly versioned) file name.
*
* @param directory the directory to scan
* @param filename the file name to use
* @return the list of matches
*/
public static File[] getAllVersions(final File directory,
final String filename)
{
final Matcher matcher = VERSION_PATTERN.matcher(filename);
if (!matcher.matches()) {
final File file = new File(directory, filename);
return file.exists() ? new File[] { file } : null;
}
final String baseName = matcher.group(1);
final String classifier = matcher.group(6);
return directory.listFiles(new FilenameFilter() {
@Override
public boolean accept(final File dir, final String name) {
if (!name.startsWith(baseName)) return false;
final Matcher matcher2 = VERSION_PATTERN.matcher(name);
return matcher2.matches() && baseName.equals(matcher2.group(1)) &&
equals(classifier, matcher2.group(6));
}
private boolean equals(final String a, final String b) {
if (a == null) {
return b == null;
}
return a.equals(b);
}
});
}
public static File urlToFile(final URL url) {
return url == null ? null : urlToFile(url.toString());
}
public static File urlToFile(final String url) {
String path = url;
if (path.startsWith("jar:")) {
// remove "jar:" prefix and "!/" suffix
final int index = path.indexOf("!/");
path = path.substring(4, index);
}
try {
if (PlatformUtils.isWindows() && path.matches("file:[A-Za-z]:.*")) {
path = "file:/" + path.substring(5);
}
return new File(new URL(path).toURI());
}
catch (final MalformedURLException e) {
// NB: URL is not completely well-formed.
}
catch (final URISyntaxException e) {
// NB: URL is not completely well-formed.
}
if (path.startsWith("file:")) {
// pass through the URL as-is, minus "file:" prefix
path = path.substring(5);
return new File(path);
}
throw new IllegalArgumentException("Invalid URL: " + url);
}
/**
* Shortens the path to a maximum of 4 path elements.
*
* @param path the path to the file (relative or absolute)
* @return shortened path
*/
public static String shortenPath(final String path) {
return shortenPath(path, DEFAULT_SHORTENER_THRESHOLD);
}
/**
* Shortens the path based on the given maximum number of path elements. E.g.,
* "C:/1/2/test.txt" returns "C:/1/.../test.txt" if threshold is 1.
*
* @param path the path to the file (relative or absolute)
* @param threshold the number of directories to keep unshortened
* @return shortened path
*/
public static String shortenPath(final String path, final int threshold) {
String regex = SHORTENER_BACKSLASH_REGEX;
String sep = SHORTENER_BACKSLASH;
if (path.indexOf("/") > 0) {
regex = SHORTENER_SLASH_REGEX;
sep = SHORTENER_SLASH;
}
String pathtemp[] = path.split(regex);
// remove empty elements
int elem = 0;
{
final String newtemp[] = new String[pathtemp.length];
int j = 0;
for (int i = 0; i < pathtemp.length; i++) {
if (!pathtemp[i].equals("")) {
newtemp[j++] = pathtemp[i];
elem++;
}
}
pathtemp = newtemp;
}
if (elem > threshold) {
final StringBuilder sb = new StringBuilder();
int index = 0;
// drive or protocol
final int pos2dots = path.indexOf(":");
if (pos2dots > 0) {
// case c:\ c:/ etc.
sb.append(path.substring(0, pos2dots + 2));
index++;
if (path.indexOf(":/") > 0 && pathtemp[0].length() > 2) {
sb.append(SHORTENER_SLASH);
}
}
else {
final boolean isUNC =
path.substring(0, 2).equals(SHORTENER_BACKSLASH_REGEX);
if (isUNC) {
sb.append(SHORTENER_BACKSLASH).append(SHORTENER_BACKSLASH);
}
}
for (; index <= threshold; index++) {
sb.append(pathtemp[index]).append(sep);
}
if (index == (elem - 1)) {
sb.append(pathtemp[elem - 1]);
}
else {
sb.append(SHORTENER_ELLIPSE).append(sep).append(pathtemp[elem - 1]);
}
return sb.toString();
}
return path;
}
/**
* Compacts a path into a given number of characters. The result is similar to
* the Win32 API PathCompactPathExA.
*
* @param path the path to the file (relative or absolute)
* @param limit the number of characters to which the path should be limited
* @return shortened path
*/
public static String limitPath(final String path, final int limit) {
if (path.length() <= limit) return path;
final char shortPathArray[] = new char[limit];
final char pathArray[] = path.toCharArray();
final char ellipseArray[] = SHORTENER_ELLIPSE.toCharArray();
final int pathindex = pathArray.length - 1;
final int shortpathindex = limit - 1;
// fill the array from the end
int i = 0;
for (; i < limit; i++) {
if (pathArray[pathindex - i] != '/' && pathArray[pathindex - i] != '\\') {
shortPathArray[shortpathindex - i] = pathArray[pathindex - i];
}
else {
break;
}
}
// check how much space is left
final int free = limit - i;
if (free < SHORTENER_ELLIPSE.length()) {
// fill the beginning with ellipse
for (int j = 0; j < ellipseArray.length; j++) {
shortPathArray[j] = ellipseArray[j];
}
}
else {
// fill the beginning with path and leave room for the ellipse
int j = 0;
for (; j + ellipseArray.length < free; j++) {
shortPathArray[j] = pathArray[j];
}
// ... add the ellipse
for (int k = 0; j + k < free; k++) {
shortPathArray[j + k] = ellipseArray[k];
}
}
return new String(shortPathArray);
}
/**
* Creates a temporary directory.
* <p>
* Since there is no atomic operation to do that, we create a temporary file,
* delete it and create a directory in its place. To avoid race conditions, we
* use the optimistic approach: if the directory cannot be created, we try to
* obtain a new temporary file rather than erroring out.
* </p>
* <p>
* It is the caller's responsibility to make sure that the directory is
* deleted; see {@link #deleteRecursively(File)}.
* </p>
*
* @param prefix The prefix string to be used in generating the file's name;
* see {@link File#createTempFile(String, String, File)}
* @param suffix The suffix string to be used in generating the file's name;
* see {@link File#createTempFile(String, String, File)}
* @return An abstract pathname denoting a newly-created empty directory
* @throws IOException
*/
public static File createTemporaryDirectory(final String prefix,
final String suffix) throws IOException
{
return createTemporaryDirectory(prefix, suffix, null);
}
/**
* Creates a temporary directory.
* <p>
* Since there is no atomic operation to do that, we create a temporary file,
* delete it and create a directory in its place. To avoid race conditions, we
* use the optimistic approach: if the directory cannot be created, we try to
* obtain a new temporary file rather than erroring out.
* </p>
* <p>
* It is the caller's responsibility to make sure that the directory is
* deleted; see {@link #deleteRecursively(File)}.
* </p>
*
* @param prefix The prefix string to be used in generating the file's name;
* see {@link File#createTempFile(String, String, File)}
* @param suffix The suffix string to be used in generating the file's name;
* see {@link File#createTempFile(String, String, File)}
* @param directory The directory in which the file is to be created, or null
* if the default temporary-file directory is to be used
* @return: An abstract pathname denoting a newly-created empty directory
* @throws IOException
*/
public static File createTemporaryDirectory(final String prefix,
final String suffix, final File directory) throws IOException
{
for (int counter = 0; counter < 10; counter++) {
final File file = File.createTempFile(prefix, suffix, directory);
if (!file.delete()) {
throw new IOException("Could not delete file " + file);
}
// in case of a race condition, just try again
if (file.mkdir()) return file;
}
throw new IOException(
"Could not create temporary directory (too many race conditions?)");
}
/**
* Deletes a directory recursively.
*
* @param directory The directory to delete.
* @return whether it succeeded (see also {@link File#delete()})
*/
public static boolean deleteRecursively(final File directory) {
if (directory == null) return true;
final File[] list = directory.listFiles();
if (list == null) return true;
for (final File file : list) {
if (file.isFile()) {
if (!file.delete()) return false;
}
else if (file.isDirectory()) {
if (!deleteRecursively(file)) return false;
}
}
return directory.delete();
}
/**
* Recursively lists the contents of the referenced directory. Directories are
* excluded from the result. Supported protocols include {@code file} and
* {@code jar}.
*
* @param directory The directory whose contents should be listed.
* @return A collection of {@link URL}s representing the directory's contents.
* @see #listContents(URL, boolean, boolean)
*/
public static Collection<URL> listContents(final URL directory) {
return listContents(directory, true, true);
}
/**
* Lists all contents of the referenced directory. Supported protocols include
* {@code file} and {@code jar}.
*
* @param directory The directory whose contents should be listed.
* @param recurse Whether to list contents recursively, as opposed to only the
* directory's direct contents.
* @param filesOnly Whether to exclude directories in the resulting collection
* of contents.
* @return A collection of {@link URL}s representing the directory's contents.
*/
public static Collection<URL> listContents(final URL directory,
final boolean recurse, final boolean filesOnly)
{
return appendContents(new ArrayList<URL>(), directory, recurse, filesOnly);
}
/**
* Recursively adds contents from the referenced directory to an existing
* collection. Directories are excluded from the result. Supported protocols
* include {@code file} and {@code jar}.
*
* @param result The collection to which contents should be added.
* @param directory The directory whose contents should be listed.
* @return A collection of {@link URL}s representing the directory's contents.
* @see #appendContents(Collection, URL, boolean, boolean)
*/
public static Collection<URL> appendContents(final Collection<URL> result,
final URL directory)
{
return appendContents(result, directory, true, true);
}
/**
* Add contents from the referenced directory to an existing collection.
* Supported protocols include {@code file} and {@code jar}.
*
* @param result The collection to which contents should be added.
* @param directory The directory whose contents should be listed.
* @param recurse Whether to append contents recursively, as opposed to only
* the directory's direct contents.
* @param filesOnly Whether to exclude directories in the resulting collection
* of contents.
* @return A collection of {@link URL}s representing the directory's contents.
*/
public static Collection<URL> appendContents(final Collection<URL> result,
final URL directory, final boolean recurse, final boolean filesOnly)
{
if (directory == null) return result; // nothing to append
final String protocol = directory.getProtocol();
if (protocol.equals("file")) {
final File dir = urlToFile(directory);
final File[] list = dir.listFiles();
if (list != null) {
for (final File file : list) {
try {
if (!filesOnly || file.isFile()) {
result.add(file.toURI().toURL());
}
if (recurse && file.isDirectory()) {
appendContents(result, file.toURI().toURL(), recurse, filesOnly);
}
}
catch (final MalformedURLException e) {
e.printStackTrace();
}
}
}
}
else if (protocol.equals("jar")) {
try {
final String url = directory.toString();
final int bang = url.indexOf("!/");
if (bang < 0) return result;
final String prefix = url.substring(bang + 2);
final String baseURL = url.substring(0, bang + 2);
final JarURLConnection connection =
(JarURLConnection) new URL(baseURL).openConnection();
final JarFile jar = connection.getJarFile();
for (final JarEntry entry : new IteratorPlus<JarEntry>(jar.entries())) {
final String urlEncoded =
new URI(null, null, entry.getName(), null).toString();
if (urlEncoded.length() > prefix.length() && // omit directory itself
urlEncoded.startsWith(prefix))
{
if (filesOnly && urlEncoded.endsWith("/")) {
// URL is directory; exclude it
continue;
}
if (!recurse) {
// check whether this URL is a *direct* child of the directory
final int slash = urlEncoded.indexOf("/", prefix.length());
if (slash >= 0 && slash != urlEncoded.length() - 1) {
// not a direct child
continue;
}
}
result.add(new URL(baseURL + urlEncoded));
}
}
jar.close();
}
catch (final IOException e) {
e.printStackTrace();
}
catch (final URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
return result;
}
// -- Helper methods --
/** Builds the {@link #VERSION_PATTERN} constant. */
private static Pattern buildVersionPattern() {
final String version =
"\\d+(\\.\\d+|\\d{7})+[a-z]?\\d?(-[A-Za-z0-9.]+?|\\.GA)*?";
final String suffix = "\\.jar(-[a-z]*)?";
return Pattern.compile("(.+?)(-" + version + ")?((-(" + classifiers() +
"))?(" + suffix + "))");
}
/** Helper method of {@link #buildVersionPattern()}. */
private static String classifiers() {
final String[] classifiers = {
"swing",
"swt",
"shaded",
"sources",
"javadoc",
"native",
"linux-x86",
"linux-x86_64",
"macosx-x86_64",
"windows-x86",
"windows-x86_64",
"android-arm",
"android-x86",
};
final StringBuilder sb = new StringBuilder("(");
for (final String classifier : classifiers) {
if (sb.length() > 1) sb.append("|");
sb.append(classifier);
}
sb.append(")");
return sb.toString();
}
// -- Deprecated methods --
/**
* Returns the {@link Matcher} object dissecting a versioned file name.
*
* @param filename the file name
* @return the {@link Matcher} object
* @deprecated see {@link #stripFilenameVersion(String)}
*/
@Deprecated
public static Matcher matchVersionedFilename(final String filename) {
return VERSION_PATTERN.matcher(filename);
}
}
|
package org.takes.rs;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import lombok.EqualsAndHashCode;
import org.takes.Response;
/**
* Response decorator, with an additional headers.
*
* <p>The class is immutable and thread-safe.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 0.1
*/
@EqualsAndHashCode(callSuper = true)
public final class RsWithHeaders extends RsWrap {
/**
* Ctor.
* @param res Original response
* @param headers Headers
*/
public RsWithHeaders(final Response res, final CharSequence... headers) {
this(res, Arrays.asList(headers));
}
//@todo #160:DEV To implement the concatenation and
// transformation with conjunction of Concat class
// and Transform class to get rid of the use of List.add()
// in the anonymous Response class head() nethod.
/**
* Ctor.
* @param res Original response
* @param headers Headers
*/
public RsWithHeaders(final Response res,
final Iterable<? extends CharSequence> headers) {
super(
new Response() {
@Override
public List<String> head() throws IOException {
final List<String> head = new LinkedList<String>();
for (final String hdr : res.head()) {
head.add(hdr);
}
for (final CharSequence header : headers) {
head.add(header.toString().trim());
}
return head;
}
@Override
public InputStream body() throws IOException {
return res.body();
}
}
);
}
}
|
package ru.lj.alamar.microbe;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DecimalFormat;
import java.util.Properties;
import java.util.Random;
import java.util.Arrays;
import ru.yandex.bolts.collection.Cf;
import ru.yandex.bolts.collection.ListF;
/**
* @author ilyak
*/
public class Model {
private static final DecimalFormat FMT = new DecimalFormat("0.
public static void main(String[] args) throws Exception {
if (args.length == 0) {
System.err.println("Usage: model {model-name} [RNG-seed] [key=value]...");
System.err.println("See MODELS directory");
System.exit(1);
}
if (args[1] == null) {
args = new String[] { args[0] }; /* anti-maven */
}
String modelName = args[0].replace(".properties", "");
PrintWriter out = output(modelName, args);
try {
Properties model = loadModel(modelName, args, out);
print(out, "model = " + modelName);
Random r = new Random(Integer.parseInt(model.getProperty("seed")));
runSimulation(r, model, out);
} finally {
out.close();
System.out.println("Simulation complete for model: " + modelName + " " + Cf.list(args).drop(1).mkString(" "));
}
}
static void runSimulation(Random r, Properties model, PrintWriter out) throws IOException {
ListF<Microbe> microbes = Cf.arrayList();
int population = Integer.parseInt(model.getProperty("population"));
float normalFitness = Float.parseFloat(model.getProperty("normal.fitness"));
int chromosomes = Integer.parseInt(model.getProperty("chromosomes"));
int genes = Integer.parseInt(model.getProperty("genes"));
for (int i = 0; i < population; i++) {
microbes.add(new Microbe(normalFitness, chromosomes, genes, false));
}
int variploidPopulation = Integer.parseInt(model.getProperty("variploid.population"));
for (int i = 0; i < variploidPopulation; i++) {
microbes.add(new Microbe(normalFitness, chromosomes, genes, true));
}
float geneMutationChance = Float.parseFloat(model.getProperty("gene.mutation.chance"));
float negativeEffect = Float.parseFloat(model.getProperty("negative.effect"));
float mutationPositiveChance = Float.parseFloat(model.getProperty("mutation.positive.chance"));
float positiveEffect = Float.parseFloat(model.getProperty("positive.effect"));
float luckRatio = Float.parseFloat(model.getProperty("luck.ratio"));
float downsizeChance = Float.parseFloat(model.getProperty("downsize.chance"));
float conversionChance = Float.parseFloat(model.getProperty("conversion.chance"));
float crossingChance = Float.parseFloat(model.getProperty("crossing.chance"));
int maxVariploidChromosomes = Integer.parseInt(model.getProperty("max.variploid.chromosomes"));
float horizontalTransferRatio = Float.parseFloat(model.getProperty("horizontal.transfer.ratio"));
float chromosomeSubstitutionRatio = Float.parseFloat(model.getProperty("chromosome.substitution.ratio"));
float chromosomeExchangeRatio = Float.parseFloat(model.getProperty("chromosome.exchange.ratio"));
boolean inexactDuplication = "true".equalsIgnoreCase(model.getProperty("inexact.chromosome.duplication"));
boolean mitosis = "true".equalsIgnoreCase(model.getProperty("mitosis"));
int steps = Integer.parseInt(model.getProperty("steps"));
print(out, "step\tpopulation\taverage fitness");
for (int s = 0; s < steps; s++) {
float totalFitness = 0f;
int[] ploidy = new int[10];
float totalChromosomes = 0;
for (Microbe microbe : microbes) {
microbe.mutate(r, geneMutationChance, negativeEffect, mutationPositiveChance, positiveEffect, conversionChance, crossingChance);
totalFitness += microbe.fitness();
totalChromosomes += microbe.getChromosomes().length;
if (microbe.getPloidy() <= 9) {
ploidy[microbe.isChangePloidy() ? microbe.getPloidy() : 0]++;
}
}
for (int t = 0; t < horizontalTransferRatio * totalChromosomes; t++) {
Microbe donor = microbes.get(r.nextInt(microbes.size()));
Microbe recipient = microbes.get(r.nextInt(microbes.size()));
recipient.horizontalTransfer(r, donor);
}
for (int t = 0; t < chromosomeSubstitutionRatio * totalChromosomes; t++) {
Microbe donor = microbes.get(r.nextInt(microbes.size()));
Microbe recipient = microbes.get(r.nextInt(microbes.size()));
recipient.chromosomeSubstitution(r, donor);
}
for (int t = 0; t < chromosomeExchangeRatio * totalChromosomes; t++) {
Microbe donor = microbes.get(r.nextInt(microbes.size()));
Microbe recipient = microbes.get(r.nextInt(microbes.size()));
recipient.chromosomeExchange(r, donor);
}
float avgFitness = totalFitness / (float) microbes.size();
microbes = Microbe.selectOffspring(r, microbes, luckRatio, maxVariploidChromosomes, inexactDuplication, downsizeChance, mitosis);
if (microbes.isEmpty()) {
break;
}
print(out, s + "\t" + microbes.size() + "\t" + FMT.format(avgFitness));
if (variploidPopulation > 0) {
printPloidy(out, ploidy, microbes.size());
}
if (s % 10 == 0) {
out.flush();
}
}
/*for (Microbe microbe : microbes.shuffle()) {
for (float[] chromosome : microbe.getChromosomes()) {
for (float gene : chromosome) {
out.print(FMT.format(gene));
out.print("\t");
}
out.println();
}
out.println();
}*/
}
private static final int BAR_WIDTH = 50;
static void printPloidy(PrintWriter out, int[] ploidy, int population) throws IOException {
StringBuilder bar = new StringBuilder();
StringBuilder table = new StringBuilder();
for (int i = 0; i < 10; i++) {
for (int b = 0; b < (ploidy[i] * BAR_WIDTH + population / 2) / population; b++) {
bar.append(i == 0 ? "M" : Integer.toString(i));
}
table.append("\t").append(ploidy[i]);
}
print(out, bar.append(table).toString());
}
static void print(PrintWriter out, String line) {
System.out.println(line);
out.println(line);
}
static PrintWriter output(String modelName, String[] args) throws IOException {
String trail = "";
for (int a = 1; a < args.length; a++) {
trail += "-" + args[a].replaceAll(" ", "").replaceAll("=", "-").replaceAll("\\.", "");
}
File output = new File("models/" + modelName + trail + ".txt");
if (output.exists()) {
System.err.println("Creating back-up copy of simulation results");
output.renameTo(new File(output.getPath() + ".bak"));
}
return new PrintWriter(output);
}
static Properties loadModel(String modelName, String[] args, final PrintWriter out) throws IOException {
Properties model = new Properties() {
public String getProperty(String name) {
String value = super.getProperty(name);
print(out, name + " = " + value);
return value;
}
};
loadPropertiesFile(model, "default");
loadPropertiesFile(model, modelName);
String baseModelName = model.getProperty("base.model");
if (baseModelName != null) {
// No support for nesting!
// XXX do we need it at all when we have command-line properties?
loadPropertiesFile(model, baseModelName);
loadPropertiesFile(model, modelName);
}
for (int a = 1; a < args.length; a++) {
String arg = args[a];
if (arg.matches("^[0-9]+$")) {
model.setProperty("seed", arg);
continue;
}
int eq = arg.indexOf("=");
if (eq <= 0) {
throw new RuntimeException("Cannot parse key=value: " + arg);
}
model.setProperty(arg.substring(0, eq).trim(), arg.substring(eq + 1).trim());
}
return model;
}
static void loadPropertiesFile(Properties model, String modelName) throws IOException {
FileInputStream stream = new FileInputStream(new File("models/" + modelName + ".properties"));
try {
model.load(stream);
} finally {
try {
stream.close();
} catch (Exception e) {
System.err.println(e);
}
}
}
}
|
package seedu.unburden.ui;
import javafx.fxml.FXML;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.layout.HBox;
import seedu.unburden.model.task.ReadOnlyTask;
//@@Gauri Joshi A0143095H
public class TaskCard extends UiPart{
private static final String FXML = "TaskListCard.fxml";
@FXML
private HBox cardPane;
@FXML
private Label name;
@FXML
private Label taskD;
@FXML
private Label date;
@FXML
private Label startTime;
@FXML
private Label endTime;
@FXML
private Label id;
@FXML
private Label tags;
@FXML
private Label done;
private ReadOnlyTask task;
private int displayedIndex;
public TaskCard(){
}
public static TaskCard load(ReadOnlyTask task, int displayedIndex){
TaskCard card = new TaskCard();
card.task = task;
card.displayedIndex = displayedIndex;
return UiPartLoader.loadUiPart(card);
}
@FXML
public void initialize() {
id.setText(displayedIndex + ". ");
name.setText(task.getName().fullName);
if(task.getDone()){
cardPane.setStyle("-fx-background-color : #f97f9c");
}
taskD.setText(task.getTaskDescription().fullTaskDescriptions);
date.setText(task.getDate().fullDate);
startTime.setText(task.getStartTime().fullTime);
endTime.setText(task.getEndTime().fullTime);
done.setText(" [ " + task.getDoneString() + " ] ");
tags.setText(" " + task.tagsString());
/*
if(person.getDate().fullDate != "NIL" && person.getStartTime().fullTime != "NIL"){
name.setText(
person.getName().fullName + "\n"
+ "Deadline : " + person.getDate().fullDate + "\n"
+ "Start Time : " + person.getStartTime().fullTime + "\n"
+ "End Time : " + person.getEndTime().fullTime + "\n"
);
id.setText(displayedIndex + ". ");
//date.setText(person.getDate().fullDate);
//startTime.setText(person.getStartTime().fullTime);
//endTime.setText(person.getEndTime().fullTime);
tags.setText(" " + person.tagsString());
}
if(person.getStartTime().fullTime == "NIL" && person.getEndTime().fullTime == "NIL" && person.getDate().fullDate != "NIL"){
name.setText(person.getName().fullName + "\n"
+ "Deadline : " + person.getDate().fullDate + "\n");
id.setText(displayedIndex + ". ");
tags.setText(" " + person.tagsString());
}
if(person.getStartTime().fullTime == "NIL" && person.getEndTime().fullTime == "NIL" && person.getDate().fullDate == "NIL"){
name.setText(person.getName().fullName + "\n");
id.setText(displayedIndex + ". ");
tags.setText(" " + person.tagsString());
}
*/
}
public HBox getLayout() {
return cardPane;
}
@Override
public void setNode(Node node) {
cardPane = (HBox)node;
}
@Override
public String getFxmlPath() {
return FXML;
}
}
|
package bonsai.examples.model;
import bonsai.examples.model.*;
import org.chocosolver.solver.constraints.*;
import org.chocosolver.solver.variables.*;
import org.chocosolver.solver.*;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Consumer;
public class ModelFactory {
/* Create a Location */
public static Location createLocation(String lab) {return new Location(lab);}
/* Create a Transition */
public static Transition createTransition(Location source, Constraint g, Action a, Consumer e, Location target)
{ Transition t = new Transition(source, g, a, e, target);
source.addTransition(t);
return t;
}
// Action management and creation
static final int INPUT = 0;
static final int OUTPUT = 1;
public static Action[] createIOActions(String lab)
{
InputAction i = new InputAction(Action.cpt, lab);
OutputAction o = new OutputAction(Action.cpt, lab);
i.setComplement(o);
o.setComplement(i);
Action.incr();
Action[] actions = new Action[2];
actions[INPUT] = i;
actions[OUTPUT] = o;
return actions;
}
public static Program_Graph createProgram(Set<Location> locations, Set<Location> inits)
{
Program_Graph pg = new Program_Graph();
for(Location l : locations)
{
pg.addLocation(l);
}
for(Location li : inits)
{
pg.addInitLocation(li);
}
return pg;
}
// return a running ts !
public static Transition_System createSystem(Set<Program_Graph> pgs)
{
Transition_System ts = new Transition_System();
for(Program_Graph pg_i : pgs)
{
ts.addProgram(pg_i);
}
ts.start();
return ts;
}
/** Manage Constraints (guards and effects) **/
// x [<= >= < > !=] n
public static Constraint createGuard_GT(IModel model, IntVar v, IntVar n)
{
return model.arithm(v,">",n);
}
public static Constraint createGuard_GE(IModel model, IntVar v, IntVar n)
{
return model.arithm(v,">=",n);
}
public static Constraint createGuard_LT(IModel model, IntVar v, IntVar n)
{
return model.arithm(v,"<",n);
}
public static Constraint createGuard_LE(IModel model, IntVar v, IntVar n)
{
return model.arithm(v,"<=",n);
}
public static Constraint createGuard_EG(IModel model, IntVar v, IntVar n)
{
return model.arithm(v,"=",n);
}
public static Constraint createGuard_DIF(IModel model, IntVar v, IntVar n) // DIF
{
return model.arithm(v,"!=",n);
}
// x' = x + n
public static Consumer<IModel> createIntIncr(StreamVariable i, int n)
{
//TODO manage stream ! link v' and v
return (IModel model) ->
{
//System.out.println("I get index " + Integer.toString(i.getCurrentIndex()-1));
//choco x_i+1
IntVar i_i = i.getInstance(i.getCurrentIndex()-1); // Current i
IntVar i_p = i.IncrIndex(model, 0); // New i'
model.arithm(i_p, "=", model.intOffsetView(i_i, n)).post();
};
}
// Add intMinusView(y); (-a) intScaleView (a * b)
public static Program_Graph createProcessModel(String postfix, Action[] lock_Act, Action[] rel_Act, IModel model, StreamVariable x)
{
//local Variables -->> new variable
StreamVariable i = new StreamVariable(model, "i" + "_" + postfix, 0);
IntVar i_var = i.getInstance(0);
// Locations
Location li = createLocation("init" + "_" + postfix);
Location lw = createLocation("request" + "_" + postfix);
Location lc = createLocation("crit" + "_" + postfix);
Location lr = createLocation("release" + "_" + postfix);
Location le = createLocation("end" + "_" + postfix);
// Transitions Location source, Constraint g, Action a, Constraint e, Location target
Transition tiw = createTransition(li, createGuard_GE(model, i_var, N), Action.tau, null, lw);
Transition twc = createTransition(lw, null, lock_Act[OUTPUT], null, lc); /* !lock */
Transition tcr = createTransition(lc, null, Action.tau, createIntIncr(x, 1), lr);
Transition tri = createTransition(lr, null, rel_Act[OUTPUT], createIntIncr(i, 1), li); /* !release */
Transition tie = createTransition(li, createGuard_GE(model, i_var, N), Action.tau, null, le);
//System.out.println("~~~~test: " + tiw.target.toString());
// set
Set<Location> locations_process = new HashSet<>();
locations_process.add(li);
locations_process.add(lw);
locations_process.add(lc);
locations_process.add(lr);
locations_process.add(le);
Set<Location> init_process = new HashSet<>();
init_process.add(li);
return createProgram(locations_process, init_process);
}
/////////// Test with the Peterson Example
static IntVar N = null;
public static Transition_System createPetersonExample(IModel model)
{
//Global variable
StreamVariable x = new StreamVariable(model, "x", 0);
N = model.intVar("N", 10);
//Actions return Pair<InputAction, OutputAction>
Action[] lock_Act = createIOActions("req");
Action[] rel_Act = createIOActions("rel");
// Process Model
Program_Graph process1 = createProcessModel("1", lock_Act, rel_Act, model, x);
Program_Graph process2 = createProcessModel("2", lock_Act, rel_Act, model, x);
// lock Model
// Locations
Location lu = createLocation("unlock");
Location ll = createLocation("lock");
// Transitions Location source, Constraint g, Action a, Constraint e, Location target
Transition tul = createTransition(lu, null, lock_Act[INPUT], null, ll); /* ?lock */
Transition tlu = createTransition(ll, null, rel_Act[INPUT], null, lu); /* ?release */
// set
Set<Location> locations_lock = new HashSet<>();
locations_lock.add(lu);
locations_lock.add(ll);
Set<Location> init_lock = new HashSet<>(); /* init -- lock? --> */
init_lock.add(lu);
Program_Graph lock = createProgram(locations_lock, init_lock);
Set<Program_Graph> pgs = new HashSet<>();
pgs.add(process1);
pgs.add(process2);
pgs.add(lock);
//start is done in the factory ... keep ?
return createSystem(pgs);
}
}
|
package vexpressed;
import org.antlr.v4.runtime.ANTLRErrorListener;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
import vexpressed.core.ExpressionCalculatorVisitor;
import vexpressed.core.ExpressionException;
import vexpressed.core.FunctionExecutor;
import vexpressed.core.VariableResolver;
import vexpressed.grammar.ExprLexer;
import vexpressed.grammar.ExprParser;
import vexpressed.meta.ExpressionType;
import vexpressed.validation.ExpressionValidatorVisitor;
import vexpressed.validation.FunctionTypeResolver;
import vexpressed.validation.VariableTypeResolver;
public final class VexpressedUtils {
private static final ANTLRErrorListener ERROR_LISTENER = new ExceptionThrowingErrorListener();
/**
* Evaluates the expression using provided {@link VariableResolver} and
* {@link FunctionExecutor}. This always parses the expression so it is not
* "production-ready" because it is better to cache the parse trees for the
* same expression.
*/
public static Object eval(String expression,
VariableResolver variableResolver, FunctionExecutor functionExecutor)
{
ParseTree parseTree = createParseTree(expression);
ParseTreeVisitor visitor = new ExpressionCalculatorVisitor(variableResolver)
.withFunctionExecutor(functionExecutor);
return visitor.visit(parseTree);
}
/**
* Parses expression and returns the {@link ParseTree} for further usage. This is
* slow operation compared to evaluation itself, so it is recommended to cache parse
* trees for repeated evaluations of the same expression.
*/
public static ParseTree createParseTree(String expression) {
ANTLRInputStream input = new ANTLRInputStream(expression);
ExprLexer lexer = new ExprLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
ExprParser parser = new ExprParser(tokens);
parser.removeErrorListeners();
parser.addErrorListener(ERROR_LISTENER);
return parser.result();
}
/**
* Checks validity of this expression and returns its {@link ExpressionType} when valid,
* otherwise throws exception. Does not cache parse tree, but for validation this may
* actually be more appropriate behavior as it is expected that many variations of the
* expression will be checked.
*/
public static ExpressionType check(String expression,
VariableTypeResolver variableTypeResolver, FunctionTypeResolver functionTypeResolver)
{
ParseTree parseTree = createParseTree(expression);
ExpressionValidatorVisitor visitor = new ExpressionValidatorVisitor(variableTypeResolver)
.withFunctionTypeResolver(functionTypeResolver);
return visitor.visit(parseTree);
}
private static class ExceptionThrowingErrorListener extends BaseErrorListener {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
int line, int col, String msg, RecognitionException e)
{
throw new ExpressionException(
"Expression parse failed at " + line + ':' + col + " - " + msg +
underlinedError(recognizer, (Token) offendingSymbol, line, col));
}
private String underlinedError(
Recognizer recognizer, Token offendingToken, int line, int col)
{
StringBuilder sb = new StringBuilder();
CommonTokenStream tokens =
(CommonTokenStream) recognizer.getInputStream();
String input = tokens.getTokenSource().getInputStream().toString();
String[] lines = input.split("\n");
String errorLine = lines[line - 1];
sb.append('\n').append(errorLine);
int start = offendingToken.getStartIndex();
int stop = offendingToken.getStopIndex();
if (start > stop) {
return sb.toString();
}
sb.append('\n');
for (int i = 0; i < col; i++) sb.append(' ');
if (start >= 0 && stop >= 0) {
for (int i = start; i <= stop; i++) sb.append('^');
}
return sb.toString();
}
}
}
|
package io.cogswell.sdk.pubsub;
import android.util.Log;
import com.google.common.base.Function;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import junit.framework.TestCase;
import org.json.JSONObject;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import io.cogswell.sdk.pubsub.handlers.PubSubCloseHandler;
import io.cogswell.sdk.pubsub.handlers.PubSubMessageHandler;
import io.cogswell.sdk.pubsub.handlers.PubSubNewSessionHandler;
import io.cogswell.sdk.pubsub.handlers.PubSubRawRecordHandler;
import io.cogswell.sdk.pubsub.handlers.PubSubReconnectHandler;
import io.cogswell.sdk.utils.Container;
import io.cogswell.sdk.utils.Duration;
public class PubSubHandleTest extends TestCase {
private static int asyncTimeoutSeconds = 4;
private Executor executor = Executors.newFixedThreadPool(16);
private LinkedList<PubSubHandle> handles = new LinkedList<>();
private List<String> keys = new ArrayList<String>();
private String host = null;
@Override
protected void setUp() throws Exception {
InputStream jsonConfigIS = this.getClass().getResourceAsStream("config.json");
String configJsonString = new Scanner(jsonConfigIS, "UTF-8").useDelimiter("\\A").next();
JSONObject configJson = new JSONObject(configJsonString);
// Get the host.
host = configJson.optString("host", null);
// Add the keys
JSONObject keysJson = configJson.getJSONObject("keys");
String rKey = keysJson.optString("readKey", null);
if (rKey != null) {
keys.add(rKey);
}
String wKey = keysJson.optString("writeKey", null);
if (wKey != null) {
keys.add(wKey);
}
String aKey = keysJson.optString("adminKey", null);
if (aKey != null) {
keys.add(aKey);
}
}
@Override
protected void tearDown() throws Exception {
final CountDownLatch latch = new CountDownLatch(handles.size());
// Shutdown all PubSubHandles which have been connected.
for (PubSubHandle handle : handles) {
handle.close().addListener(new Runnable(){
@Override
public void run() {
latch.countDown();
}
}, executor);
}
latch.await(asyncTimeoutSeconds, TimeUnit.SECONDS);
}
public PubSubHandle stashHandle(PubSubHandle handle) {
handles.push(handle);
return handle;
}
public void testConnect() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>(1);
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
assertNotNull(connectFuture);
Futures.addCallback(connectFuture, new FutureCallback<PubSubHandle>() {
public void onSuccess(PubSubHandle psh) {
stashHandle(psh);
queue.offer(psh == null ? "null-pubsub-handle" : "success");
}
public void onFailure(Throwable error) {
queue.offer("connect-error");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testGetSessionUuid() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, UUID> getSessionUuidFunction =
new AsyncFunction<PubSubHandle, UUID>() {
public ListenableFuture<UUID> apply(PubSubHandle pubsubHandle) {
return stashHandle(pubsubHandle).getSessionUuid();
}
};
ListenableFuture<UUID> getSessionUuidFuture = Futures.transformAsync(connectFuture, getSessionUuidFunction, executor);
Futures.addCallback(getSessionUuidFuture, new FutureCallback<UUID>() {
public void onSuccess(UUID sessionId) {
queue.offer(sessionId == null ? "null-session-id" : "success");
}
public void onFailure(Throwable error) {
queue.offer("session-id-fetch-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testSubscribe() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
queue.offer("should-not-have-received-a-message");
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, List<String>> unsubscribeFunction =
new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> subscribeResponse) {
if (!subscribeResponse.contains(testChannel)) {
queue.offer("expected-channel-not-in-subscriptions");
}
return handle.get().unsubscribe(testChannel);
}
};
ListenableFuture<List<String>> unsubscribeFuture = Futures.transformAsync(subscribeFuture, unsubscribeFunction, executor);
Futures.addCallback(unsubscribeFuture, new FutureCallback<List<String>>() {
public void onSuccess(List<String> unsubscribeResponse) {
queue.offer(unsubscribeResponse.isEmpty() ? "success" : "subscriptions-not-empty");
}
public void onFailure(Throwable error) {
queue.offer("unsubscribe-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testListSubscriptions() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
queue.offer("should-not-have-received-a-message");
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, List<String>> listSubscriptionsFunction =
new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> subscribeResponse) {
if (!subscribeResponse.contains(testChannel)) {
queue.offer("expected-channel-not-in-subscriptions");
}
return handle.get().listSubscriptions();
}
};
ListenableFuture<List<String>> listSubscriptionsFuture = Futures.transformAsync(subscribeFuture, listSubscriptionsFunction, executor);
Futures.addCallback(listSubscriptionsFuture, new FutureCallback<List<String>>() {
public void onSuccess(List<String> subscriptions) {
queue.offer(subscriptions.contains(testChannel) ? "success" : "channel-missing-from-subscriptions");
}
public void onFailure(Throwable error) {
queue.offer("subscription-listing-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testUnsubscribeAll() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
queue.offer("should-not-have-received-a-message");
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, List<String>> unsubscribeAllFunction =
new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> subscribeResponse) {
return handle.get().unsubscribeAll();
}
};
ListenableFuture<List<String>> unsubscribeAllFuture = Futures.transformAsync(subscribeFuture, unsubscribeAllFunction, executor);
AsyncFunction<List<String>, List<String>> listSubscriptionsFunction =
new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> unsubscribeAllResponse) {
if (!unsubscribeAllResponse.contains(testChannel)) {
queue.offer("expected-channel-not-in-unsubscribe-response");
}
return handle.get().listSubscriptions();
}
};
ListenableFuture<List<String>> listSubscriptionsFuture = Futures.transformAsync(unsubscribeAllFuture, listSubscriptionsFunction, executor);
Futures.addCallback(listSubscriptionsFuture, new FutureCallback<List<String>>() {
public void onSuccess(List<String> subscriptions) {
queue.offer(subscriptions.isEmpty() ? "success" : "still-subscribed-after-unsubscribe-all");
}
public void onFailure(Throwable error) {
queue.offer("subscription-listing-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testSubscribeThenPublishWithoutAck() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final String testMessage = "TEST-MESSAGE:"+System.currentTimeMillis()+"-"+Math.random();
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueue.offer(record);
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, Long> publishFunction =
new AsyncFunction<List<String>, Long>() {
public ListenableFuture<Long> apply(List<String> subscribeResponse) {
return handle.get().publish(testChannel, testMessage);
}
};
ListenableFuture<Long> publishFuture = Futures.transformAsync(subscribeFuture, publishFunction, executor);
Futures.addCallback(publishFuture, new FutureCallback<Long>() {
public void onSuccess(Long publishResponse) {
queue.offer(publishResponse == null ? "null-publish-response" : "success");
}
public void onFailure(Throwable error) {
queue.offer("publish-failure");
}
}, executor);
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
PubSubMessageRecord messageRecord = messageQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertEquals(testMessage, messageRecord.getMessage());
assertEquals(testChannel, messageRecord.getChannel());
}
public void testSubscribeThenPublishWithAck() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final String testMessage = "TEST-MESSAGE:"+System.currentTimeMillis()+"-"+Math.random();
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueue.offer(record);
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, UUID> publishWithAckFunction =
new AsyncFunction<List<String>, UUID>() {
public ListenableFuture<UUID> apply(List<String> subscribeResponse) {
return handle.get().publishWithAck(testChannel, testMessage);
}
};
ListenableFuture<UUID> publishWithAckFuture = Futures.transformAsync(subscribeFuture, publishWithAckFunction, executor);
Futures.addCallback(publishWithAckFuture, new FutureCallback<UUID>() {
public void onSuccess(UUID publishWithAckResponse) {
queue.offer(publishWithAckResponse == null ? "null-message-id-response" : "success");
}
public void onFailure(Throwable error) {
queue.offer("publish-with-ack-failure");
}
}, executor);
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertEquals(testMessage, messageQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS).getMessage());
}
public void testClose() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannel = "TEST-CHANNEL";
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
queue.offer("should-not-have-received-a-message");
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, Void> closeFunction =
new AsyncFunction<List<String>, Void>() {
public ListenableFuture<Void> apply(List<String> subscribeResponse) {
return handle.get().close();
}
};
ListenableFuture<Void> closeFuture = Futures.transformAsync(subscribeFuture, closeFunction, executor);
Futures.addCallback(closeFuture, new FutureCallback<Void>() {
public void onSuccess(Void closeResponse) {
queue.offer("success");
}
public void onFailure(Throwable error) {
queue.offer("socket-close-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testRestoreSession() throws Exception {
final Container<PubSubHandle> firstHandle = new Container<>();
final Container<PubSubHandle> secondHandle = new Container<>();
final Container<UUID> uuid = new Container<>();
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final Container<SortedSet<String>> originalSubscriptions = new Container<>();
final Container<SortedSet<String>> reconnectSubscriptions = new Container<>();
final String testChannel = "TEST-CHANNEL";
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
queue.offer("should-not-have-received-a-message");
}
};
// Open a connection, subscribe, then close.
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return firstHandle.set(stashHandle(pubsubHandle)).subscribe(testChannel, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, UUID> getSessionUuidFunction =
new AsyncFunction<List<String>, UUID>() {
public ListenableFuture<UUID> apply(List<String> subscriptions) {
originalSubscriptions.set(new TreeSet<>(subscriptions));
return firstHandle.get().getSessionUuid();
}
};
ListenableFuture<UUID> getSessionUuidFuture = Futures.transformAsync(subscribeFuture, getSessionUuidFunction, executor);
Function<UUID, List<String>> closeFunction =
new Function<UUID, List<String>>() {
public List<String> apply(UUID sessionId) {
uuid.set(sessionId);
firstHandle.get().dropConnection(new PubSubDropConnectionOptions(Duration.of(10, TimeUnit.MILLISECONDS)));
return null;
}
};
ListenableFuture<List<String>> closeFuture = Futures.transform(getSessionUuidFuture, closeFunction, executor);
AsyncFunction<List<String>, PubSubHandle> reconnectFunction =
new AsyncFunction<List<String>, PubSubHandle>() {
public ListenableFuture<PubSubHandle> apply(List<String> subscriptions) {
return PubSubSDK.getInstance().connect(keys, new PubSubOptions(host, false, Duration.of(3, TimeUnit.SECONDS), uuid.get()));
}
};
ListenableFuture<PubSubHandle> reconnectFuture = Futures.transformAsync(closeFuture, reconnectFunction, executor);
AsyncFunction<PubSubHandle, List<String>> listSubscriptionsFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return secondHandle.set(stashHandle(pubsubHandle)).listSubscriptions();
}
};
ListenableFuture<List<String>> listSubscriptionsFuture = Futures.transformAsync(reconnectFuture, listSubscriptionsFunction, executor);
Futures.addCallback(listSubscriptionsFuture, new FutureCallback<List<String>>() {
public void onSuccess(List<String> subscriptions) {
reconnectSubscriptions.set(new TreeSet<>(subscriptions));
queue.offer(subscriptions.contains(testChannel) ? "success" : "channel-missing-from-subscriptions");
}
public void onFailure(Throwable error) {
queue.offer("subscription-listing-failure");
}
});
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertEquals(originalSubscriptions.get(), reconnectSubscriptions.get());
}
public void testSubscribeToAThenPublishToB() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueue = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannelA = "TEST-CHANNEL-A";
final String testChannelB = "TEST-CHANNEL-B";
final String testMessage = "TEST-MESSAGE:"+System.currentTimeMillis()+"-"+Math.random();
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueue.offer(record);
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunction =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannelA, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFuture = Futures.transformAsync(connectFuture, subscribeFunction, executor);
AsyncFunction<List<String>, Long> publishFunction =
new AsyncFunction<List<String>, Long>() {
public ListenableFuture<Long> apply(List<String> subscribeResponse) {
return handle.get().publish(testChannelB, testMessage);
}
};
ListenableFuture<Long> publishFuture = Futures.transformAsync(subscribeFuture, publishFunction, executor);
Futures.addCallback(publishFuture, new FutureCallback<Long>() {
public void onSuccess(Long publishResponse) {
queue.offer(publishResponse == null ? "null-publish-response" : "success");
}
public void onFailure(Throwable error) {
queue.offer("publish-failure");
}
}, executor);
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertNull(messageQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
}
public void testSubscribeToAAndBThenPublishToAndB() throws Exception {
final BlockingQueue<String> queue = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueueA = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueueB = new LinkedBlockingQueue<>();
final Container<PubSubHandle> handle = new Container<>();
final String testChannelA = "TEST-CHANNEL-A";
final String testChannelB = "TEST-CHANNEL-B";
final String testMessageA = "TEST-MESSAGE-A:"+System.currentTimeMillis()+"-"+Math.random();
final String testMessageB = "TEST-MESSAGE-B:"+System.currentTimeMillis()+"-"+Math.random();
final PubSubMessageHandler messageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
String channel = record.getChannel();
if (testChannelA.equals(channel)) {
messageQueueA.offer(record);
} else if (testChannelB.equals(channel)) {
messageQueueB.offer(record);
}
}
};
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunctionA =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return handle.set(stashHandle(pubsubHandle)).subscribe(testChannelA, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFutureA = Futures.transformAsync(connectFuture, subscribeFunctionA, executor);
AsyncFunction<List<String>, List<String>> subscribeFunctionB =
new AsyncFunction<List<String>, List<String> >() {
public ListenableFuture<List<String>> apply(List<String> subscribeResponse) {
return handle.get().subscribe(testChannelB, messageHandler);
}
};
ListenableFuture<List<String>> subscribeFutureB = Futures.transformAsync(subscribeFutureA, subscribeFunctionB, executor);
AsyncFunction<List<String>, Long> publishFunctionA =
new AsyncFunction<List<String>, Long>() {
public ListenableFuture<Long> apply(List<String> subscribeResponse) {
return handle.get().publish(testChannelA, testMessageA);
}
};
ListenableFuture<Long> publishFutureA = Futures.transformAsync(subscribeFutureB, publishFunctionA, executor);
AsyncFunction<Long, Long> publishFunctionB =
new AsyncFunction<Long, Long>() {
public ListenableFuture<Long> apply(Long publishResponse) {
return handle.get().publish(testChannelB, testMessageB);
}
};
ListenableFuture<Long> publishFutureB = Futures.transformAsync(publishFutureA, publishFunctionB, executor);
Futures.addCallback(publishFutureB, new FutureCallback<Long>() {
public void onSuccess(Long publishResponse) {
queue.offer(publishResponse == null ? "null-publish-response" : "success");
}
public void onFailure(Throwable error) {
queue.offer("publish-failure");
}
}, executor);
assertEquals("success", queue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
PubSubMessageRecord recordA = messageQueueA.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(recordA);
assertEquals(testMessageA, recordA.getMessage());
assertEquals(testChannelA, recordA.getChannel());
PubSubMessageRecord recordB = messageQueueB.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(recordB);
assertEquals(testMessageB, recordB.getMessage());
assertEquals(testChannelB, recordB.getChannel());
// Success! Both messages received.
}
public void testSubscribeToAAndBThenPublishToAndBIn4Clients() throws Exception {
final Container<PubSubHandle> pubHandleA = new Container<>();
final Container<PubSubHandle> pubHandleB = new Container<>();
final Container<UUID> messageIdA = new Container<>();
final Container<UUID> messageIdB = new Container<>();
final CountDownLatch readyLatch = new CountDownLatch(4);
final BlockingQueue<String> queueSubA = new LinkedBlockingQueue<>();
final BlockingQueue<String> queueSubB = new LinkedBlockingQueue<>();
final BlockingQueue<String> queuePubA = new LinkedBlockingQueue<>();
final BlockingQueue<String> queuePubB = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueueA = new LinkedBlockingQueue<>();
final BlockingQueue<PubSubMessageRecord> messageQueueB = new LinkedBlockingQueue<>();
final String testChannelA = "TEST-CHANNEL-A";
final String testChannelB = "TEST-CHANNEL-B";
final String testMessageA = "TEST-MESSAGE-A:"+System.currentTimeMillis()+"-"+Math.random();
final String testMessageB = "TEST-MESSAGE-B:"+System.currentTimeMillis()+"-"+Math.random();
final PubSubMessageHandler messageHandlerChannelA = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueueA.offer(record);
}
};
final PubSubMessageHandler messageHandlerChannelB = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueueB.offer(record);
}
};
// Subscriber A:
ListenableFuture<PubSubHandle> connectFutureSubscriberA = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunctionA =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return stashHandle(pubsubHandle).subscribe(testChannelA, messageHandlerChannelA);
}
};
Futures.addCallback(
Futures.transformAsync(connectFutureSubscriberA, subscribeFunctionA, executor),
new FutureCallback<List<String>>() {
public void onSuccess(List<String> result) {
readyLatch.countDown();
queueSubA.offer(result.contains(testChannelA) ? "success" : "channel-A-missing-from-subscriptions");
}
public void onFailure(Throwable t) {
queueSubA.offer("subscription-A-failure");
}
}
);
// Subscriber B:
ListenableFuture<PubSubHandle> connectFutureSubscriberB = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
AsyncFunction<PubSubHandle, List<String>> subscribeFunctionB =
new AsyncFunction<PubSubHandle, List<String>>() {
public ListenableFuture<List<String>> apply(PubSubHandle pubsubHandle) {
return stashHandle(pubsubHandle).subscribe(testChannelB, messageHandlerChannelB);
}
};
Futures.addCallback(
Futures.transformAsync(connectFutureSubscriberB, subscribeFunctionB, executor),
new FutureCallback<List<String>>() {
public void onSuccess(List<String> result) {
readyLatch.countDown();
queueSubB.offer(result.contains(testChannelB) ? "success" : "channel-B-missing-from-subscriptions");
}
public void onFailure(Throwable t) {
queueSubB.offer("subscription-B-failure");
}
}
);
// Publisher A:
Futures.addCallback(
PubSubSDK.getInstance().connect(keys, new PubSubOptions(host)),
new FutureCallback<PubSubHandle>() {
public void onSuccess(PubSubHandle handle) {
pubHandleA.set(handle);
readyLatch.countDown();
}
public void onFailure(Throwable t) {
queuePubA.offer("publisher-A-connect-failure");
}
}
);
final Runnable publishA = new Runnable() {
public void run() {
Futures.addCallback(
pubHandleA.get().publishWithAck(testChannelA, testMessageA),
new FutureCallback<UUID>() {
public void onSuccess(UUID messageId) {
messageIdA.set(messageId);
queuePubA.offer(messageId == null ? "null-sequence-for-channel-A-publish" : "success");
}
public void onFailure(Throwable t) {
queuePubA.offer("publish-to-channel-A-failed");
}
}
);
}
};
// Publisher B:
Futures.addCallback(
PubSubSDK.getInstance().connect(keys, new PubSubOptions(host)),
new FutureCallback<PubSubHandle>() {
public void onSuccess(PubSubHandle handle) {
pubHandleB.set(handle);
readyLatch.countDown();
}
public void onFailure(Throwable t) {
queuePubB.offer("publisher-B-connect-failure");
}
}
);
final Runnable publishB = new Runnable() {
public void run() {
Futures.addCallback(
pubHandleB.get().publishWithAck(testChannelB, testMessageB),
new FutureCallback<UUID>() {
public void onSuccess(UUID messageId) {
messageIdB.set(messageId);
queuePubB.offer(messageId == null ? "null-message-id-for-channel-B-publish" : "success");
}
public void onFailure(Throwable t) {
queuePubB.offer("publish-to-channel-B-failed");
}
}
);
}
};
long asyncTimeoutSeconds = 3;
// Wait for all connections to be established.
readyLatch.await(asyncTimeoutSeconds, TimeUnit.SECONDS);
executor.execute(publishA);
executor.execute(publishB);
assertEquals("success", queueSubA.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertEquals("success", queueSubB.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertEquals("success", queuePubA.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertEquals("success", queuePubB.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
PubSubMessageRecord recordA = messageQueueA.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(recordA);
assertEquals(testMessageA, recordA.getMessage());
assertEquals(testChannelA, recordA.getChannel());
assertEquals(messageIdA.get(), recordA.getId());
PubSubMessageRecord recordB = messageQueueB.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(recordB);
assertEquals(testMessageB, recordB.getMessage());
assertEquals(testChannelB, recordB.getChannel());
assertEquals(messageIdB.get(), recordB.getId());
}
/**
* Test a single PubSubHandle going through all of the features in sequence.
*/
public void testFullSweep() throws Exception {
final Container<PubSubHandle> handle = new Container<>();
final Container<String> failure = new Container<>();
final Container<UUID> oldSession = new Container<>();
final Container<UUID> newSession = new Container<>();
final Container<UUID> replacementSession = new Container<>();
final Container<UUID> firstMessageId = new Container<>();
final Container<UUID> secondMessageId = new Container<>();
final Container<SortedSet<String>> subscribeMainSubscriptions = new Container<>();
final Container<SortedSet<String>> subscribeControlSubscriptions = new Container<>();
final Container<SortedSet<String>> reconnectSubscriptions = new Container<>();
final Container<SortedSet<String>> unsubscribeMainSubscriptions = new Container<>();
final Container<SortedSet<String>> unsubscribeAllSubscriptions = new Container<>();
final Container<PubSubMessageRecord> controlMessage = new Container<>();
final BlockingQueue<String> reconnectQueue = new LinkedBlockingQueue<>(1);
final BlockingQueue<String> messageDeliveredQueue = new LinkedBlockingQueue<>(1);
final BlockingQueue<String> closeQueue = new LinkedBlockingQueue<>(1);
final BlockingQueue<PubSubMessageRecord> messageQueue = new LinkedBlockingQueue<>(1);
final String mainChannel = "new-channel-test";//"MAIN-TEST-CHANNEL-" + System.nanoTime();
final String controlChannel = "CONTROL-TEST-CHANNEL-" + System.nanoTime();
final String firstMessage = "TEST-MESSAGE-" + System.nanoTime();
final String secondMessage = "TEST-MESSAGE-" + System.nanoTime();
// Message handler for the main channel.
final PubSubMessageHandler mainChannelMessageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
messageQueue.offer(record);
}
};
final PubSubRawRecordHandler rawRecordHandler = new PubSubRawRecordHandler() {
public void onRawRecord(String rawRecord) {
Log.e("MESSAGE_FOR_YOU_SIR::::", rawRecord);
}
};
// Message handler for the control channel (should never receive anything).
final PubSubMessageHandler controlChannelMessageHandler = new PubSubMessageHandler() {
public void onMessage(PubSubMessageRecord record) {
controlMessage.set(record);
}
};
// Reconnect handler.
final PubSubReconnectHandler reconnectHandler = new PubSubReconnectHandler() {
public void onReconnect() {
reconnectQueue.offer("reconnected");
}
};
// New session handler.
final PubSubNewSessionHandler newSessionHandler = new PubSubNewSessionHandler() {
public void onNewSession(UUID uuid) {
replacementSession.set(uuid);
}
};
// Socket close handler.
final PubSubCloseHandler closeHandler = new PubSubCloseHandler() {
public void onClose(Throwable error) {
closeQueue.offer("closed");
}
};
// Establish the initial connection.
ListenableFuture<PubSubHandle> connectFuture = PubSubSDK.getInstance().connect(keys, new PubSubOptions(host));
// Stash the handle then fetch the session UUID.
AsyncFunction<PubSubHandle, UUID> getSessionIdTransformer = new AsyncFunction<PubSubHandle, UUID>() {
@Override
public ListenableFuture<UUID> apply(PubSubHandle pubsubHandle) throws Exception {
// Stash the PubSubHandle for later user.
handle.set(stashHandle(pubsubHandle));
pubsubHandle.onReconnect(reconnectHandler);
pubsubHandle.onNewSession(newSessionHandler);
pubsubHandle.onRawRecord(rawRecordHandler);
pubsubHandle.onClose(closeHandler);
return pubsubHandle.getSessionUuid();
}
};
ListenableFuture<UUID> oldSessionIdFuture = Futures.transformAsync(connectFuture, getSessionIdTransformer);
// Record the old session UUID then subscribe to main channel.
AsyncFunction<UUID, List<String>> subscribeToMainTransformer = new AsyncFunction<UUID, List<String>>() {
public ListenableFuture<List<String>> apply(UUID sessionId) {
oldSession.set(sessionId);
// Subscribe to the main channel.
return handle.get().subscribe(mainChannel, mainChannelMessageHandler);
}
};
ListenableFuture<List<String>> mainSubscribeFuture = Futures.transformAsync(oldSessionIdFuture, subscribeToMainTransformer);
// Report the subscriptions post subscribe to main, then subscribe to the control channel.
AsyncFunction<List<String>, List<String>> subscribeToControlTransformer = new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> subscriptions) {
// Now subscribed to main channel.
subscribeMainSubscriptions.set(new TreeSet<>(subscriptions));
// Subscribe to the control channel
return handle.get().subscribe(controlChannel, controlChannelMessageHandler);
}
};
ListenableFuture<List<String>> controlSubscribeFuture = Futures.transformAsync(mainSubscribeFuture, subscribeToControlTransformer);
// Report the subscriptions post subscribe to control, then publish the first message.
AsyncFunction<List<String>, UUID> publishFirstMessageTransformer = new AsyncFunction<List<String>, UUID>() {
public ListenableFuture<UUID> apply(List<String> subscriptions) throws Exception {
// Now subscribed to control channel.
subscribeControlSubscriptions.set(new TreeSet<>(subscriptions));
// Publish the first message, expecting an acknowledgement.
return handle.get().publishWithAck(mainChannel, firstMessage);
}
};
ListenableFuture<UUID> publishFirstMessageFuture = Futures.transformAsync(controlSubscribeFuture, publishFirstMessageTransformer);
// Report the subscriptions post subscribe to control, then drop the connection.
FutureCallback<UUID> firstMessageConfirmationCallback = new FutureCallback<UUID>() {
public void onSuccess(UUID messageId) {
// First message was successfully delivered.
firstMessageId.set(messageId);
// Notify of message 1 delivery.
messageDeliveredQueue.offer("message-1-published");
}
public void onFailure(Throwable t) {
failure.set("failed-before-first-publish");
}
};
Futures.addCallback(publishFirstMessageFuture, firstMessageConfirmationCallback);
// Wait for the first message, and evaluate its contents.
assertEquals("message-1-published", messageDeliveredQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertNotNull(firstMessageId.get());
PubSubMessageRecord msg1Record = messageQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(msg1Record);
assertEquals(firstMessageId.get(), msg1Record.getId());
assertEquals(mainChannel, msg1Record.getChannel());
assertEquals(firstMessage, msg1Record.getMessage());
// After receiving the first message, drop the connection.
handle.get().dropConnection(new PubSubDropConnectionOptions(
Duration.of(0L, TimeUnit.MICROSECONDS)
));
// Once we have reconnected (reconnectHandler), fetch the new session UUID.
assertEquals("reconnected", reconnectQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
ListenableFuture<UUID> newSessionIdFuture = handle.get().getSessionUuid();
// Record the new session UUID, then fetch the list of subscriptions to confirm that they were restored.
AsyncFunction<UUID, List<String>> fetchSubscriptionsTransformer = new AsyncFunction<UUID, List<String>>() {
public ListenableFuture<List<String>> apply(UUID sessionId) throws Exception {
// Stash the reconnect session's ID.
newSession.set(sessionId);
// List the channels to which we are subscribed post reconnect.
return handle.get().listSubscriptions();
}
};
ListenableFuture<List<String>> restoredSubscriptionsFuture = Futures.transformAsync(newSessionIdFuture, fetchSubscriptionsTransformer);
// Record the restored subscriptions, then publish the second message (with acknowledgement).
AsyncFunction<List<String>, UUID> publishMessageTransformer = new AsyncFunction<List<String>, UUID>() {
public ListenableFuture<UUID> apply(List<String> subscriptions) throws Exception {
// Stash the post-reconnect (restored) subscriptions.
reconnectSubscriptions.set(new TreeSet<>(subscriptions));
// Publish the second message, expecting an acknowledgement.
return handle.get().publishWithAck(mainChannel, secondMessage);
}
};
ListenableFuture<UUID> publishedMessageIdFuture = Futures.transformAsync(restoredSubscriptionsFuture, publishMessageTransformer);
FutureCallback<UUID> publishMessageCallback = new FutureCallback<UUID>() {
public void onSuccess(UUID messageId) {
// Second message was successfully delivered.
secondMessageId.set(messageId);
// Notify of message 2 delivery.
messageDeliveredQueue.offer("message-2-published");
}
public void onFailure(Throwable t) {
messageDeliveredQueue.offer("failed-to-publish-message");
}
};
Futures.addCallback(publishedMessageIdFuture, publishMessageCallback);
// Once the message has been published and delivered, then move on to unsubscribe operations.
assertEquals("message-2-published", messageDeliveredQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertNotNull(secondMessageId.get());
assertEquals(oldSession.get(), newSession.get());
assertEquals(new TreeSet<>(Arrays.asList(mainChannel)), subscribeMainSubscriptions.get());
assertEquals(new TreeSet<>(Arrays.asList(mainChannel, controlChannel)), subscribeControlSubscriptions.get());
assertEquals(new TreeSet<>(Arrays.asList(mainChannel, controlChannel)), reconnectSubscriptions.get());
PubSubMessageRecord msg2Record = messageQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS);
assertNotNull(msg2Record);
assertEquals(secondMessageId.get(), msg2Record.getId());
assertEquals(mainChannel, msg2Record.getChannel());
assertEquals(secondMessage, msg2Record.getMessage());
// Now unsubscribe from the main channel.
ListenableFuture<List<String>> unsubscribeMainFuture = handle.get().unsubscribe(mainChannel);
// After recording the subscriptions post unsubscribe from main channel, unsubscribe from all channels.
AsyncFunction<List<String>, List<String>> unsubscribeAllTransformer = new AsyncFunction<List<String>, List<String>>() {
public ListenableFuture<List<String>> apply(List<String> subscriptions) throws Exception {
// Stash the list of subscriptions post unsubscribe from main.
unsubscribeMainSubscriptions.set(new TreeSet<>(subscriptions));
// Now unsubscribe from all channels.
return handle.get().unsubscribeAll();
}
};
ListenableFuture<List<String>> unsubscribeAllFuture = Futures.transformAsync(unsubscribeMainFuture, unsubscribeAllTransformer);
// After recording the subscriptions post unsubscribe all, close the connection.
AsyncFunction<List<String>, Void> closeTransformer = new AsyncFunction<List<String>, Void>() {
public ListenableFuture<Void> apply(List<String> subscriptions) throws Exception {
// Stash the list of subscriptions post unsubscribe from all.
unsubscribeAllSubscriptions.set(new TreeSet<>(subscriptions));
// Now close the connection.
return handle.get().close();
}
};
assertEquals("closed", closeQueue.poll(asyncTimeoutSeconds, TimeUnit.SECONDS));
assertNotNull(handle.get());
assertNull(failure.get());
assertNull(controlMessage.get());
assertNull(replacementSession.get());
assertEquals(new TreeSet<>(Arrays.asList(controlChannel)), unsubscribeMainSubscriptions.get());
assertEquals(new TreeSet<>(Arrays.asList(controlChannel)), unsubscribeAllSubscriptions.get());
}
}
|
package com.gmail.liamgomez75.parkourroll.utils;
import org.bukkit.ChatColor;
import org.bukkit.World;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.plugin.Plugin;
/**
* Utility methods that interact with a configuration file for Level values.
*
* @author JamesHealey94 <jameshealey1994.gmail.com>
* @author Liam Gomez <liamgomez75.gmail.com>
*/
public abstract class LevelConfigUtils {
/**
* The string for the value in the config this class is interacting with.
*/
public static final String LVL_CONFIG_STRING = "Level";
/**
* The default value, used if no other values are found.
*/
public static final int LVL_DEFAULT = 1;
/**
* Sets the Level of the passed player in the passed world to the passed
* state, then saves the config.
*
* @param player player to set the Level of
* @param world world in which to set the player's Level
* @param level Level to change to
* @param plugin plugin with the config storing Level values
*/
public static void setPlayerLevel(Player player, World world, int level, Plugin plugin) {
final String path = "Server.Worlds." + world.getName() + ".Players." + player.getName() + "." + LVL_CONFIG_STRING;
plugin.getConfig().set(path, level);
plugin.saveConfig();
}
/**
* Return the specified player's Level in a specified world.
* The method first looks for a specific value for the player in the world.
*
* If that is not found, the method checks the default value for the world.
*
* If that is not found, the method checks the default value for the server.
*
* If there is an error with the default value for the server, the default
* is returned.
*
* Currently the default is 1.
*
* @param player the player being checked
* @param world the world of the player being checked
* @param plugin plugin with config which stores Level data
* @return the Level of player in world
*/
public static int getPlayerLevel(Player player, World world, Plugin plugin) {
return plugin.getConfig().getInt("Server.Worlds." + world.getName() + ".Players." + player.getName() + "." + LVL_CONFIG_STRING,
LVL_DEFAULT);
}
/**
* Sets the Level of the passed player in the passed world to the passed
* state, then saves the config.
* Uses a String instead of player and world
* @param player player to set the Level of
* @param world world in which to set the player's Level
* @param level Level to change to
* @param plugin plugin with the config storing Level values
*/
public static void setPlayerLevel(String player, String world, int level, Plugin plugin, CommandSender sender) {
if (plugin.getConfig().getString("Server.Worlds." + world + ".Players." + player) != null) {
if (plugin.getConfig().getString("Server.Worlds." + world + ".Players." + player) != null) {
final String path = "Server.Worlds." + world + ".Players." + player + "." + LVL_CONFIG_STRING;
plugin.getConfig().set(path, level);
plugin.saveConfig();
} else {
sender.sendMessage(ChatColor.RED + "Player not found.");
}
} else {
sender.sendMessage(ChatColor.RED + "World '" + world + "' does not exist.");
}
}
/**
* Return the specified player's Level in a specified world.
* The method first looks for a specific value for the player in the world.
*
* If that is not found, the method checks the default value for the world.
*
* If that is not found, the method checks the default value for the server.
*
* If there is an error with the default value for the server, the default
* is returned.
*
* Currently the default is 1.
* Uses String instead of Player and world
* @param player the player being checked
* @param world the world of the player being checked
* @param plugin plugin with config which stores Level data
* @return the Level of player in world
*/
public static int getPlayerLevel(String player, String world, Plugin plugin, CommandSender sender) {
if (plugin.getConfig().getString("Server.Worlds." + world) != null) {
if (plugin.getConfig().getString("Server.Worlds." + world + ".Players." + player) != null) {
return plugin.getConfig().getInt("Server.Worlds." + world + ".Players." + player + "." + LVL_CONFIG_STRING,
LVL_DEFAULT);
} else {
sender.sendMessage(ChatColor.RED + "Player not found.");
}
} else {
sender.sendMessage(ChatColor.RED + "World '" + world + "' does not exist.");
}
return -1;
}
}
|
package com.mebigfatguy.fbcontrib.detect;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.bcel.classfile.Code;
import com.mebigfatguy.fbcontrib.utils.BugType;
import com.mebigfatguy.fbcontrib.utils.UnmodifiableSet;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.OpcodeStack.CustomUserValue;
import edu.umd.cs.findbugs.ba.ClassContext;
/**
* looks for common methods that are believed to be non mutating, where the value is discarded. Since the method makes no changes to the object, calling this
* method is useless. The method call can be removed.
*/
@CustomUserValue
public class NonProductiveMethodCall extends BytecodeScanningDetector {
private static final Set<Pattern> IMMUTABLE_METHODS = UnmodifiableSet.create(
// @formatter:off
Pattern.compile(".*@toString\\(\\)Ljava/lang/String;"),
Pattern.compile("java/lang/.+@.+Value\\(\\)[BCDFIJSZ]"),
Pattern.compile(".*@equals\\(Ljava/lang/Object;\\)Z"),
Pattern.compile(".*@hashCode\\(\\)I"),
Pattern.compile(".*@clone\\(\\).+"),
Pattern.compile("java/util/.+@toArray\\(\\)\\[.+"),
Pattern.compile("java/time/(?:Instant|((?:Local|Zoned)(?:Date)?(?:Time)?))@(?:plus|minus|with).*"),
Pattern.compile("java/nio/file/Path@.*"),
Pattern.compile("java/lang/Enum@.*")
// @formatter:on
);
private BugReporter bugReporter;
private OpcodeStack stack;
/**
* constructs a NPMC detector given the reporter to report bugs on
*
* @param bugReporter
* the sync of bug reports
*/
public NonProductiveMethodCall(BugReporter bugReporter) {
this.bugReporter = bugReporter;
}
/**
* implements the visitor to set and clear the stack
*/
@Override
public void visitClassContext(ClassContext classContext) {
try {
stack = new OpcodeStack();
super.visitClassContext(classContext);
} finally {
stack = null;
}
}
/**
* implements the visitor to reset the opcode stack
*
* @param obj
* the context object of the currently parsed code block
*/
@Override
public void visitCode(Code obj) {
stack.resetForMethodEntry(this);
super.visitCode(obj);
}
/**
* implements the visitor to look for return values of common immutable method calls, that are thrown away.
*
* @param seen
* the opcode of the currently parsed instruction
*/
@Override
public void sawOpcode(int seen) {
String methodInfo = null;
try {
stack.precomputation(this);
switch (seen) {
case INVOKEVIRTUAL:
case INVOKEINTERFACE:
case INVOKESTATIC:
String sig = getSigConstantOperand();
if (!sig.endsWith("V")) {
methodInfo = getClassConstantOperand() + '@' + getNameConstantOperand() + getSigConstantOperand();
}
break;
case POP:
case POP2:
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String mInfo = (String) item.getUserValue();
if (mInfo != null) {
for (Pattern p : IMMUTABLE_METHODS) {
Matcher m = p.matcher(mInfo);
if (m.matches()) {
bugReporter.reportBug(new BugInstance(this, BugType.NPMC_NON_PRODUCTIVE_METHOD_CALL.name(), NORMAL_PRIORITY).addClass(this)
.addMethod(this).addSourceLine(this).addString(mInfo));
break;
}
}
}
}
break;
}
} finally {
stack.sawOpcode(this, seen);
if ((methodInfo != null) && (stack.getStackDepth() > 0)) {
OpcodeStack.Item item = stack.getStackItem(0);
item.setUserValue(methodInfo);
}
}
}
}
|
package com.mebigfatguy.fbcontrib.detect;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.bcel.Constants;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.CodeException;
import org.apache.bcel.classfile.ExceptionTable;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.LocalVariable;
import org.apache.bcel.classfile.LocalVariableTable;
import org.apache.bcel.classfile.Method;
import org.apache.bcel.generic.Type;
import com.mebigfatguy.fbcontrib.utils.BugType;
import com.mebigfatguy.fbcontrib.utils.RegisterUtils;
import com.mebigfatguy.fbcontrib.utils.ToString;
import com.mebigfatguy.fbcontrib.utils.Values;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.ClassContext;
/**
* looks for parameters that are defined by classes, but only use methods defined by an
* implemented interface or super class. Relying on concrete classes in public signatures causes cohesion,
* and makes low impact changes more difficult.
*/
public class OverlyConcreteParameter extends BytecodeScanningDetector
{
private final BugReporter bugReporter;
private JavaClass[] constrainingClasses;
private Map<Integer, Map<JavaClass, List<MethodInfo>>> parameterDefiners;
private BitSet usedParameters;
private JavaClass objectClass;
private OpcodeStack stack;
private int parmCount;
private boolean methodSignatureIsConstrained;
private boolean methodIsStatic;
/**
* constructs a OCP detector given the reporter to report bugs on
* @param bugReporter the sync of bug reports
*/
public OverlyConcreteParameter(final BugReporter bugReporter) {
this.bugReporter = bugReporter;
try {
objectClass = Repository.lookupClass("java/lang/Object");
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
objectClass = null;
}
}
@Override
public void visitClassContext(ClassContext classContext) {
try {
JavaClass[] infs = classContext.getJavaClass().getAllInterfaces();
JavaClass[] sups = classContext.getJavaClass().getSuperClasses();
constrainingClasses = new JavaClass[infs.length + sups.length];
System.arraycopy(infs, 0, constrainingClasses, 0, infs.length);
System.arraycopy(sups, 0, constrainingClasses, infs.length, sups.length);
parameterDefiners = new HashMap<Integer, Map<JavaClass, List<MethodInfo>>>();
usedParameters = new BitSet();
stack = new OpcodeStack();
super.visitClassContext(classContext);
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
} finally {
constrainingClasses = null;
parameterDefiners = null;
usedParameters = null;
stack = null;
}
}
@Override
public void visitMethod(Method obj) {
methodSignatureIsConstrained = false;
String methodName = obj.getName();
if (!Values.CONSTRUCTOR.equals(methodName)
&& !Values.STATIC_INITIALIZER.equals(methodName)) {
String methodSig = obj.getSignature();
methodSignatureIsConstrained = methodIsSpecial(methodName, methodSig);
if (!methodSignatureIsConstrained) {
String parms = methodSig.split("\\(|\\)")[1];
if (parms.indexOf(';') >= 0) {
outer:for (JavaClass cls : constrainingClasses) {
Method[] methods = cls.getMethods();
for (Method m : methods) {
if (methodName.equals(m.getName())) {
if (methodSig.equals(m.getSignature())) {
methodSignatureIsConstrained = true;
break outer;
}
}
}
}
}
}
}
}
@Override
public void visitCode(final Code obj) {
try {
if (methodSignatureIsConstrained) {
return;
}
if (obj.getCode() == null) {
return;
}
Method m = getMethod();
if (m.isSynthetic()) {
return;
}
if (m.getName().startsWith("access$")) {
return;
}
methodIsStatic = m.isStatic();
parmCount = m.getArgumentTypes().length;
if (parmCount == 0) {
return;
}
parameterDefiners.clear();
usedParameters.clear();
stack.resetForMethodEntry(this);
if (buildParameterDefiners()) {
super.visitCode(obj);
reportBugs();
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
}
}
@Override
public void sawOpcode(final int seen) {
if (parameterDefiners.isEmpty()) {
return;
}
try {
stack.precomputation(this);
if ((seen == INVOKEVIRTUAL) || (seen == INVOKESTATIC) || (seen == INVOKESPECIAL) || (seen == INVOKEINTERFACE)) {
String methodSig = getSigConstantOperand();
Type[] parmTypes = Type.getArgumentTypes(methodSig);
int stackDepth = stack.getStackDepth();
if (stackDepth >= parmTypes.length) {
for (int i = 0; i < parmTypes.length; i++) {
OpcodeStack.Item itm = stack.getStackItem(i);
int reg = itm.getRegisterNumber();
removeUselessDefiners(parmTypes[parmTypes.length - i - 1].getSignature(), reg);
}
}
if ((seen != INVOKESPECIAL) && (seen != INVOKESTATIC)) {
if (stackDepth > parmTypes.length) {
OpcodeStack.Item itm = stack.getStackItem(parmTypes.length);
int reg = itm.getRegisterNumber();
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
removeUselessDefiners(reg);
}
} else {
parameterDefiners.clear();
}
}
} else if ((seen == ASTORE) || ((seen >= ASTORE_0) && (seen <= ASTORE_3)) || (seen == PUTFIELD) || (seen == GETFIELD) || (seen == PUTSTATIC) || (seen == GETSTATIC)) {
//Don't check parameters that are aliased
if (stack.getStackDepth() > 0) {
OpcodeStack.Item itm = stack.getStackItem(0);
int reg = itm.getRegisterNumber();
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
parameterDefiners.remove(Integer.valueOf(reg));
}
} else {
parameterDefiners.clear();
}
if ((seen == GETFIELD) || (seen == PUTFIELD)) {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item itm = stack.getStackItem(1);
int reg = itm.getRegisterNumber();
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
parameterDefiners.remove(Integer.valueOf(reg));
}
} else {
parameterDefiners.clear();
}
}
} else if ((seen == ALOAD) || ((seen >= ALOAD_0) && (seen <= ALOAD_3))) {
int reg = RegisterUtils.getALoadReg(this, seen);
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
usedParameters.set(reg);
}
} else if (seen == AASTORE) {
//Don't check parameters that are stored in
if (stack.getStackDepth() >= 3) {
OpcodeStack.Item itm = stack.getStackItem(0);
int reg = itm.getRegisterNumber();
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
parameterDefiners.remove(Integer.valueOf(reg));
}
} else {
parameterDefiners.clear();
}
} else if (seen == ARETURN) {
if (stack.getStackDepth() >= 1) {
OpcodeStack.Item item = stack.getStackItem(0);
int reg = item.getRegisterNumber();
int parm = reg;
if (!methodIsStatic) {
parm
}
if ((parm >= 0) && (parm < parmCount)) {
parameterDefiners.remove(Integer.valueOf(reg));
}
} else {
parameterDefiners.clear();
}
}
} finally {
stack.sawOpcode(this, seen);
}
}
private static boolean methodIsSpecial(String methodName, String methodSig) {
return ("readObject".equals(methodName) && "(Ljava/io/ObjectInputStream;)V".equals(methodSig));
}
private void reportBugs() {
Iterator<Map.Entry<Integer, Map<JavaClass, List<MethodInfo>>>> it = parameterDefiners.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Integer, Map<JavaClass, List<MethodInfo>>> entry = it.next();
Integer reg = entry.getKey();
if (!usedParameters.get(reg.intValue())) {
it.remove();
continue;
}
Map<JavaClass, List<MethodInfo>> definers = entry.getValue();
definers.remove(objectClass);
if (definers.size() > 0) {
String name = "";
LocalVariableTable lvt = getMethod().getLocalVariableTable();
if (lvt != null) {
LocalVariable lv = lvt.getLocalVariable(reg.intValue(), 0);
if (lv != null) {
name = lv.getName();
}
}
int parm = reg.intValue();
if (!methodIsStatic) {
parm
}
parm++; //users expect 1 based parameters
String infName = definers.keySet().iterator().next().getClassName();
bugReporter.reportBug( new BugInstance(this, BugType.OCP_OVERLY_CONCRETE_PARAMETER.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this, 0)
.addString(getCardinality(parm)+" parameter '" + name + "' could be declared as " + infName +" instead"));
}
}
}
private static String getCardinality(int num) {
if (num == 1) {
return "1st";
}
if (num == 2) {
return "2nd";
}
if (num == 3) {
return "3rd";
}
return num + "th";
}
/**
* builds a map of method information for each method of each interface that each parameter implements of this method
* @return a map by parameter id of all the method signatures that interfaces of that parameter implements
*
* @throws ClassNotFoundException if the class can't be loaded
*/
private boolean buildParameterDefiners()
throws ClassNotFoundException {
Type[] parms = getMethod().getArgumentTypes();
if (parms.length == 0) {
return false;
}
boolean hasPossiblyOverlyConcreteParm = false;
for (int i = 0; i < parms.length; i++) {
String parm = parms[i].getSignature();
if (parm.startsWith("L")) {
String clsName = parm.substring(1, parm.length() - 1).replace('/', '.');
if (clsName.startsWith("java.lang.")) {
continue;
}
JavaClass cls = Repository.lookupClass(clsName);
if (cls.isClass() && (!cls.isAbstract())) {
Map<JavaClass, List<MethodInfo>> definers = getClassDefiners(cls);
if (definers.size() > 0) {
parameterDefiners.put( Integer.valueOf(i + (methodIsStatic ? 0 : 1)), definers );
hasPossiblyOverlyConcreteParm = true;
}
}
}
}
return hasPossiblyOverlyConcreteParm;
}
/**
* returns a map of method information for each public method for each interface this class implements
* @param cls the class whose interfaces to record
*
* @return a map of (method name)(method sig) by interface
* @throws ClassNotFoundException if unable to load the class
*/
private static Map<JavaClass, List<MethodInfo>> getClassDefiners(final JavaClass cls)
throws ClassNotFoundException {
Map<JavaClass, List<MethodInfo>> definers = new HashMap<JavaClass, List<MethodInfo>>();
for (JavaClass ci : cls.getAllInterfaces()) {
if ("java.lang.Comparable".equals(ci.getClassName())) {
continue;
}
List<MethodInfo> methodInfos = getPublicMethodInfos(ci);
if (methodInfos.size() > 0) {
definers.put(ci, methodInfos);
}
}
return definers;
}
/**
* returns a lost of method information of all public or protected methods in this class
*
* @param cls the class to look for methods
* @return a map of (method name)(method signature)
*/
private static List<MethodInfo> getPublicMethodInfos(final JavaClass cls) {
List<MethodInfo> methodInfos = new ArrayList<MethodInfo>();
Method[] methods = cls.getMethods();
for (Method m : methods) {
if ((m.getAccessFlags() & (Constants.ACC_PUBLIC|Constants.ACC_PROTECTED)) != 0) {
ExceptionTable et = m.getExceptionTable();
methodInfos.add(new MethodInfo(m.getName(), m.getSignature(), et == null ? null : et.getExceptionNames()));
}
}
return methodInfos;
}
private void removeUselessDefiners(final int reg) {
Map<JavaClass, List<MethodInfo>> definers = parameterDefiners.get(Integer.valueOf(reg));
if ((definers != null) && (definers.size() > 0)) {
String methodSig = getSigConstantOperand();
String methodName = getNameConstantOperand();
MethodInfo methodInfo = new MethodInfo(methodName, methodSig, null);
Iterator<List<MethodInfo>> it = definers.values().iterator();
while (it.hasNext()) {
boolean methodDefined = false;
List<MethodInfo> methodSigs = it.next();
for (MethodInfo mi : methodSigs) {
if (methodInfo.equals(mi)) {
methodDefined = true;
String[] exceptions = mi.getMethodExceptions();
if (exceptions != null) {
for (String ex : exceptions) {
if (!isExceptionHandled(ex)) {
methodDefined = false;
break;
}
}
}
break;
}
}
if (!methodDefined) {
it.remove();
}
}
if (definers.isEmpty()) {
parameterDefiners.remove(Integer.valueOf(reg));
}
}
}
/**
* returns whether this exception is handled either in a try/catch or throws clause at this pc
*
* @param ex the name of the exception
*
* @return whether the exception is handled
*/
private boolean isExceptionHandled(String ex) {
try {
JavaClass thrownEx = Repository.lookupClass(ex);
//First look at the throws clause
ExceptionTable et = getMethod().getExceptionTable();
if (et != null) {
String[] throwClauseExNames = et.getExceptionNames();
for (String throwClauseExName : throwClauseExNames) {
JavaClass throwClauseEx = Repository.lookupClass(throwClauseExName);
if (thrownEx.instanceOf(throwClauseEx)) {
return true;
}
}
}
// Next look at the try catch blocks
CodeException[] catchExs = getCode().getExceptionTable();
if (catchExs != null) {
int pc = getPC();
for (CodeException catchEx : catchExs) {
if ((pc >= catchEx.getStartPC()) && (pc <= catchEx.getEndPC())) {
int type = catchEx.getCatchType();
if (type != 0) {
String catchExName = getConstantPool().getConstantString(type, Constants.CONSTANT_Class);
JavaClass catchException = Repository.lookupClass(catchExName);
if (thrownEx.instanceOf(catchException)) {
return true;
}
}
}
}
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
}
return false;
}
private void removeUselessDefiners(String parmSig, final int reg) {
if (parmSig.startsWith("L")) {
parmSig = parmSig.substring( 1, parmSig.length() - 1).replace('/', '.');
if ("java.lang.Object".equals(parmSig)) {
parameterDefiners.remove(Integer.valueOf(reg));
return;
}
Map<JavaClass, List<MethodInfo>> definers = parameterDefiners.get(Integer.valueOf(reg));
if ((definers != null) && (definers.size() > 0)) {
Iterator<JavaClass> it = definers.keySet().iterator();
while (it.hasNext()) {
JavaClass definer = it.next();
if (!definer.getClassName().equals(parmSig)) {
it.remove();
}
}
if (definers.isEmpty()) {
parameterDefiners.remove(Integer.valueOf(reg));
}
}
}
}
/**
* an inner helper class that holds basic information about a method
*/
static class MethodInfo
{
private final String methodName;
private final String methodSig;
private final String[] methodExceptions;
MethodInfo(String name, String sig, String[] excs) {
methodName = name;
methodSig = sig;
methodExceptions = excs;
}
String getMethodName() {
return methodName;
}
String getMethodSignature() {
return methodSig;
}
String[] getMethodExceptions() {
return methodExceptions;
}
@Override
public int hashCode() {
return methodName.hashCode() ^ methodSig.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof MethodInfo)) {
return false;
}
MethodInfo that = (MethodInfo)o;
if (!methodName.equals(that.methodName)) {
return false;
}
if (!methodSig.equals(that.methodSig)) {
return false;
}
return true;
}
@Override
public String toString() {
return ToString.build(this);
}
}
}
|
package com.swabunga.spell.engine;
import java.io.*;
import java.util.*;
/**
* Yet another <code>SpellDictionary</code> this one is based on Damien Guillaume's
* Diskbased dictionary but adds a cache to try to improve abit on performance.
*
* @author Robert Gustavsson
* @version 0.01
*/
public class SpellDictionaryCachedDichoDisk extends SpellDictionaryDichoDisk {
// Only used for testing to measure the effectiveness of the cache.
static public int hits=0;
static public int codes=0;
public static final String PRE_CACHE_FILE_EXT=".pre";
private static int MAX_CACHED=10000;
private HashMap suggestionCache=new HashMap(MAX_CACHED);
private String preCacheFileName;
/**
* Dictionary Convienence Constructor.
*/
public SpellDictionaryCachedDichoDisk(File wordList)
throws FileNotFoundException, IOException {
super((File) wordList);
loadPreCache(wordList);
}
/**
* Dictionary Convienence Constructor.
*/
public SpellDictionaryCachedDichoDisk(File wordList, String encoding)
throws FileNotFoundException, IOException {
super(wordList, encoding);
loadPreCache(wordList);
}
/**
* Dictionary constructor that uses an aspell phonetic file to
* build the transformation table.
*/
public SpellDictionaryCachedDichoDisk(File wordList, File phonetic)
throws FileNotFoundException, IOException {
super(wordList, phonetic);
loadPreCache(wordList);
}
/**
* Dictionary constructor that uses an aspell phonetic file to
* build the transformation table.
*/
public SpellDictionaryCachedDichoDisk(File wordList, File phonetic, String encoding)
throws FileNotFoundException, IOException {
super(wordList, phonetic, encoding);
loadPreCache(wordList);
}
/**
* Add a word permanantly to the dictionary (and the dictionary file).
* <i>not implemented !</i>
*/
public void addWord(String word) {
System.err.println("error: addWord is not implemented for SpellDictionaryDichoDisk");
}
/**
* Clears the cache.
*/
public void clearCache(){
suggestionCache.clear();
}
/**
* Returns a list of strings (words) for the code.
*/
public List getWords(String code) {
List list;
codes++;
if(suggestionCache.containsKey(code)){
hits++;
list=getCachedList(code);
return list;
}
list=super.getWords(code);
addToCache(code,list);
return list;
}
/**
* This method returns the cached suggestionlist and also moves the code to
* the top of the codeRefQueue to indicate this code has resentlly been
* referenced.
*/
private List getCachedList(String code){
CacheObject obj=(CacheObject)suggestionCache.get(code);
obj.setRefTime();
return obj.getSuggestionList();
}
/**
* Adds a code and it's suggestion list to the cache.
*/
private void addToCache(String code, List l){
String c=null;
String lowestCode=null;
long lowestTime=Long.MAX_VALUE;
Iterator it;
CacheObject obj;
if(suggestionCache.size()>=MAX_CACHED){
it=suggestionCache.keySet().iterator();
while(it.hasNext()){
c=(String)it.next();
obj=(CacheObject)suggestionCache.get(c);
if(obj.getRefTime()==0){
lowestCode=c;
break;
}
if(lowestTime>obj.getRefTime()){
lowestCode=c;
lowestTime=obj.getRefTime();
}
}
suggestionCache.remove(lowestCode);
}
suggestionCache.put(code,new CacheObject(l));
}
/**
* Load the cache from file. The cach file has the same name as the
* dico file with the .pre extension added.
*/
private void loadPreCache(File dicoFile)throws IOException{
String code;
List suggestions;
HashMap map;
long size;
preCacheFileName=dicoFile.getPath()+PRE_CACHE_FILE_EXT;
File preFile=new File(preCacheFileName);
if(!preFile.exists()){
System.out.println("No precache file");
return;
}
System.out.println("Precaching...");
ObjectInputStream in=new ObjectInputStream(new FileInputStream(preFile));
try{
size=in.readLong();
for(int i=0;i<size;i++){
code=(String)in.readObject();
suggestions=(List)in.readObject();
suggestionCache.put(code,new CacheObject(suggestions));
}
in.close();
}catch(ClassNotFoundException ex){
System.out.println(ex.getMessage());
}catch(IOException ex){
System.out.println("EndOfFile");
}
}
/**
* Saves the current cache to file.
*/
public void saveCache() throws IOException{
String code;
if(preCacheFileName==null){
System.out.println("NULL");
return;
}
File preFile=new File(preCacheFileName);
System.out.println("Saving cache...");
ObjectOutputStream out=new ObjectOutputStream(new FileOutputStream(preFile));
Iterator it=suggestionCache.keySet().iterator();
out.writeLong(suggestionCache.size());
while(it.hasNext()){
code=(String)it.next();
out.writeObject(code);
out.writeObject(((CacheObject)suggestionCache.get(code)).getSuggestionList());
}
out.close();
}
// INNER CLASSES
// ------------------------------------------------------------------------
private class CacheObject implements Serializable{
private List suggestions=null;
private long refTime=0;
public CacheObject(List list){
this.suggestions=list;
}
public List getSuggestionList(){
return suggestions;
}
public void setRefTime(){
refTime=System.currentTimeMillis();
}
public long getRefTime(){
return refTime;
}
}
}
|
package com.untamedears.ItemExchange.command.commands;
import java.util.Iterator;
import org.apache.commons.lang.StringUtils;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.command.CommandSender;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import com.untamedears.ItemExchange.ItemExchangePlugin;
import com.untamedears.ItemExchange.command.PlayerCommand;
import com.untamedears.ItemExchange.exceptions.ExchangeRuleParseException;
import com.untamedears.ItemExchange.utility.ExchangeRule;
import com.untamedears.ItemExchange.utility.ExchangeRule.RuleType;
import com.untamedears.citadel.Citadel;
import com.untamedears.citadel.entity.Faction;
/*
* When holding an exchange rule block in the players hand allows editing of the
* different rules.
*/
public class SetCommand extends PlayerCommand {
public SetCommand() {
super("Set Field");
setDescription("Sets the field of the ExchangeRule held in hand");
setUsage("/ieset");
setArgumentRange(1, 200);
setIdentifiers(new String[] { "ieset", "ies" });
}
@Override
public boolean execute(CommandSender sender, String[] args) {
try {
ExchangeRule exchangeRule = ExchangeRule.parseRuleBlock(((Player) sender).getItemInHand());
int itemAmount = ((Player) sender).getItemInHand().getAmount();
if ((args[0].equalsIgnoreCase("commonname") || args[0].equalsIgnoreCase("c"))) {
if(args.length == 2) {
if(!ItemExchangePlugin.NAME_MATERIAL.containsKey(args[1])) {
sender.sendMessage(ChatColor.RED + "Material not found.");
return true;
}
ItemStack itemStack = ItemExchangePlugin.NAME_MATERIAL.get(args[1]);
exchangeRule.setMaterial(itemStack.getType());
exchangeRule.setDurability(itemStack.getDurability());
sender.sendMessage(ChatColor.GREEN + "Material changed successfully.");
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies commonname <name>");
return true;
}
}
else if ((args[0].equalsIgnoreCase("material") || args[0].equalsIgnoreCase("m"))) {
if(args.length == 2) {
Material m = Material.getMaterial(args[1]);
if(m == null) {
try {
m = Material.getMaterial(Integer.parseInt(args[1]));
}
catch(NumberFormatException e) {}
}
if(m != null) {
exchangeRule.setMaterial(m);
sender.sendMessage(ChatColor.GREEN + "Material changed successfully.");
}
else {
sender.sendMessage(ChatColor.RED + "Material not found.");
return true;
}
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies material <name|id>");
return true;
}
}
else if ((args[0].equalsIgnoreCase("amount") || args[0].equalsIgnoreCase("a"))) {
if(args.length == 2) {
try {
int amount = Integer.valueOf(args[1]);
if(amount < 1) {
sender.sendMessage(ChatColor.RED + "Invalid amount.");
return true;
}
else {
exchangeRule.setAmount(Integer.valueOf(args[1]));
sender.sendMessage(ChatColor.GREEN + "Amount changed successfully.");
}
}
catch(NumberFormatException e) {
sender.sendMessage(ChatColor.RED + "Invalid number.");
return true;
}
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies amount <name>");
return true;
}
}
else if ((args[0].equalsIgnoreCase("durability") || args[0].equalsIgnoreCase("d"))) {
if(args.length == 2) {
try {
short durability = Short.valueOf(args[1]);
exchangeRule.setDurability(durability);
sender.sendMessage(ChatColor.GREEN + "Durability changed successfully.");
}
catch(NumberFormatException e) {
sender.sendMessage(ChatColor.RED + "Invalid durability.");
return true;
}
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies durability <amount>");
return true;
}
}
else if (args[0].equalsIgnoreCase("allowenchantments") || args[0].equalsIgnoreCase("allowenchants")) {
exchangeRule.setUnlistedEnchantmentsAllowed(true);
sender.sendMessage(ChatColor.GREEN + "Unlisted enchantments are now allowed.");
}
else if (args[0].equalsIgnoreCase("denyenchantments") || args[0].equalsIgnoreCase("denyenchants")) {
exchangeRule.setUnlistedEnchantmentsAllowed(false);
sender.sendMessage(ChatColor.GREEN + "Unlisted enchantments are now denied.");
}
else if ((args[0].equalsIgnoreCase("enchantment") || args[0].equalsIgnoreCase("e"))) {
if(args[1].length() != 2) {
sender.sendMessage(ChatColor.RED + "Usage: /ieset enchantment <+/?/-><enchantment abbrv.>[level]");
return true;
}
char first = args[1].charAt(0);
boolean requiresLevel = first == '+';
if(!requiresLevel)
args[1] = args[1].replaceAll("[0-9]", "");
String abbrv = args[1].substring(1, requiresLevel ? args[1].length() - 1 : args[1].length());
if(!ItemExchangePlugin.ABBRV_ENCHANTMENT.containsKey(abbrv)) {
StringBuilder enchantments = new StringBuilder();
Iterator<String> iterator = ItemExchangePlugin.ABBRV_ENCHANTMENT.keySet().iterator();
while(iterator.hasNext()) {
enchantments.append(iterator.next());
if(iterator.hasNext()) {
enchantments.append(", ");
}
}
sender.sendMessage(ChatColor.RED + "Invalid enchantment specified.");
sender.sendMessage(ChatColor.YELLOW + "Valid enchantments: " + enchantments.toString());
return true;
}
Enchantment enchantment = Enchantment.getByName(ItemExchangePlugin.ABBRV_ENCHANTMENT.get(abbrv));
int level;
try {
level = requiresLevel ? Integer.parseInt(String.valueOf((args[1].charAt(args[1].length() - 1)))) : 1;
}
catch (NumberFormatException e) {
sender.sendMessage(ChatColor.RED + "This command requires a level.");
return true;
}
if(level < 1) {
sender.sendMessage(ChatColor.RED + "Enchantment level must be at least 1.");
return true;
}
if (first == '+') {
exchangeRule.requireEnchantment(enchantment, level);
exchangeRule.removeExcludedEnchantment(enchantment);
sender.sendMessage(ChatColor.GREEN + "Successfully added required enchantment.");
}
else if (first == '-') {
exchangeRule.excludeEnchantment(enchantment);
exchangeRule.removeRequiredEnchantment(enchantment);
sender.sendMessage(ChatColor.GREEN + "Successfully added excluded enchantment.");
}
else if (first == '?') {
exchangeRule.removeRequiredEnchantment(enchantment);
exchangeRule.removeExcludedEnchantment(enchantment);
sender.sendMessage(ChatColor.GREEN + "Successfully removed rules relating to enchantment.");
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ieset enchantment <+/?/-><enchantment abbrv.>[level]");
return true;
}
}
else if ((args[0].equalsIgnoreCase("displayname") || args[0].equalsIgnoreCase("n"))) {
if(args.length == 2) {
exchangeRule.setDisplayName(StringUtils.join(args, ' ', 1, args.length));
sender.sendMessage(ChatColor.GREEN + "Successfully changed display name.");
}
else if(args.length == 1) {
exchangeRule.setDisplayName("");
sender.sendMessage(ChatColor.GREEN + "Successfully removed display name.");
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies displayname [name]");
return true;
}
}
else if ((args[0].equalsIgnoreCase("lore") || args[0].equalsIgnoreCase("l"))) {
if(args.length == 2) {
exchangeRule.setLore(args[1].split(";"));
sender.sendMessage(ChatColor.GREEN + "Successfully changed lore.");
}
else if(args.length == 1) {
exchangeRule.setLore(new String[0]);
sender.sendMessage(ChatColor.GREEN + "Successfully removed lore.");
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies lore [line 1[;line 2[; ...]]]");
return true;
}
}
else if (args[0].equalsIgnoreCase("group")) {
if(exchangeRule.getType() != RuleType.INPUT) {
sender.sendMessage(ChatColor.RED + "This command can only be run on input blocks!");
return true;
}
if(args.length == 2) {
Faction group = Citadel.getGroupManager().getGroup(args[1]);
if(group != null) {
exchangeRule.setCitadelGroup(group);
sender.sendMessage(ChatColor.GREEN + "Successfully changed Citadel group.");
}
else {
sender.sendMessage(ChatColor.RED + "The specified Citadel group does not exist!");
}
}
else if(args.length == 1) {
exchangeRule.setCitadelGroup(null);
sender.sendMessage(ChatColor.GREEN + "Successfully removed Citadel group.");
}
else {
sender.sendMessage(ChatColor.RED + "Usage: /ies group [citadel group]");
return true;
}
}
else if (args[0].equalsIgnoreCase("switchio") || args[0].equalsIgnoreCase("s")) {
exchangeRule.switchIO();
sender.sendMessage(ChatColor.GREEN + "Successfully switched input/output.");
}
else {
throw new IllegalArgumentException(ChatColor.RED + "Incorrect Field: " + args[0]);
}
ItemStack itemstack = exchangeRule.toItemStack();
itemstack.setAmount(itemAmount);
((Player) sender).setItemInHand(itemstack);
}
catch (ExchangeRuleParseException e) {
sender.sendMessage(ChatColor.RED + "You are not holding an exchange rule.");
}
catch (NumberFormatException e) {
sender.sendMessage(ChatColor.RED + "Error when parsing number.");
}
catch (IllegalArgumentException e) {
sender.sendMessage(e.getMessage());
}
catch (Exception e) {
e.printStackTrace();
}
return true;
}
}
|
package de.dhbw.mannheim.cloudraid.fs;
import java.io.File;
import java.nio.file.Files;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
public class RecursiveFileSystemWatcher extends Thread {
private File dir;
private final static String TMP = System.getProperty("os.name")
.toLowerCase().contains("windows") ? "C:\\temp\\cloudraid\\"
: "/tmp/cloudraid/";
private final static File TMP_FILE = new File(TMP);
/**
* A map containing all known files.
*/
private static ConcurrentHashMap<String, Long> fileMap = new ConcurrentHashMap<String, Long>();
private Vector<String> keySet = new Vector<String>();
private long sleepTime = 10000;
/**
* Creates a RecursiveFileSystemWatcher that runs every 10s.
*/
public RecursiveFileSystemWatcher() {
dir = new File(System.getProperty("user.home") + "/tmp/");
System.out.println("Watching directory " + dir.getAbsolutePath());
this.setPriority(MIN_PRIORITY);
}
/**
* Creates a RecursiveFileSystemWatcher that runs in the given interval.
*
* @param sleepTime
* The sleeping time in ms.
*/
public RecursiveFileSystemWatcher(long sleepTime) {
this();
this.sleepTime = sleepTime;
}
/**
* {@inheritDoc}
*/
public void run() {
while (!isInterrupted()) {
keySet = new Vector<String>(fileMap.keySet());
if (!this.dir.exists()) {
System.err.println("The watch directory does not exist");
break;
} else {
this.checkDir(this.dir);
}
// all files still in "keySet" were not found, this means they were
// deleted
for (String k : keySet) {
System.out.println(k + " was deleted.");
fileMap.remove(k);
}
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
System.err.println("The file system watcher is stopped");
}
/**
* Runs through the list of files in the given directory and handles the
* files according to their type.
*
* @param file
* The directory to be handled.
*/
private void checkDir(File dir) {
if (dir.isDirectory()) {
for (File f : dir.listFiles()) {
if (Files.isSymbolicLink(f.toPath())) {
System.err.println("I do not handle the symbolic link at "
+ f.getAbsolutePath());
} else if (f.isDirectory()) {
this.checkDir(f);
} else if (f.isFile()) {
this.checkFile(f);
} else {
System.err
.println("Whoops! I don't know how to handle the file "
+ f.getAbsolutePath());
}
}
}
}
/**
* Checks the given file and handles it according to the status.
*
* @param file
* The file to be handled.
*/
private void checkFile(File file) {
String name = file.getAbsolutePath();
if (fileMap.containsKey(name)) {
if (file.lastModified() == fileMap.get(name)) {
// nothing to do, file already indexed
// System.out.println(file.getAbsolutePath() +
// " already exists.");
} else {
// the file changed
System.out.println(file.getAbsolutePath() + " was changed.");
}
keySet.remove(name);
} else {
// a new file is found
System.out.println(file.getAbsolutePath() + " is a new file.");
fileMap.put(file.getAbsolutePath(), file.lastModified());
}
}
public static void main(String[] args) {
new RecursiveFileSystemWatcher(60000).start();
new RecursiveFileSystemWatcher(50000).start();
}
}
|
package dr.evomodel.treedatalikelihood;
import beagle.Beagle;
import dr.evomodel.branchmodel.BranchModel;
import dr.evomodel.substmodel.EigenDecomposition;
import dr.evomodel.substmodel.SubstitutionModel;
import dr.evomodel.treedatalikelihood.BeagleDataLikelihoodDelegate.PreOrderSettings;
import dr.evolution.tree.Tree;
import dr.util.Timer;
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
/**
* @author Andrew Rambaut
* @author Filip Bielejec
* @author Marc A. Suchard
* @version $Id$
*/
public final class SubstitutionModelDelegate implements EvolutionaryProcessDelegate, Serializable {
private static final boolean DEBUG = false;
private static final boolean RUN_IN_SERIES = false;
public static final boolean MEASURE_RUN_TIME = false;
// private final boolean cacheQMatrices;
private final PreOrderSettings settings;
public double updateTime;
public double convolveTime;
private static final int BUFFER_POOL_SIZE_DEFAULT = 100;
private final Tree tree;
private final List<SubstitutionModel> substitutionModelList;
private final BranchModel branchModel;
private final int eigenCount;
private final int nodeCount;
private final int extraBufferCount;
private final int reserveBufferIndex;
private final BufferIndexHelper eigenBufferHelper;
private final BufferIndexHelper matrixBufferHelper;
private Deque<Integer> availableBuffers = new ArrayDeque<Integer>();
/**
* A class which handles substitution models including epoch models where multiple
* substitution models on a branch are convolved.
* @param tree
* @param branchModel Describes which substitution models use on each branch
*/
public SubstitutionModelDelegate(Tree tree, BranchModel branchModel) {
this(tree, branchModel, 0, BUFFER_POOL_SIZE_DEFAULT, PreOrderSettings.getDefault());
}
/**
* A class which handles substitution models including epoch models where multiple
* substitution models on a branch are convolved.
* @param tree
* @param branchModel Describes which substitution models use on each branch
* @param partitionNumber which data partition is this (used to offset eigen and matrix buffer numbers)
*/
public SubstitutionModelDelegate(Tree tree, BranchModel branchModel, int partitionNumber) {
this(tree, branchModel, partitionNumber, BUFFER_POOL_SIZE_DEFAULT, PreOrderSettings.getDefault());
}
public SubstitutionModelDelegate(Tree tree, BranchModel branchModel, int partitionNumber, int bufferPoolSize){
this(tree, branchModel, partitionNumber, bufferPoolSize, PreOrderSettings.getDefault());
}
public SubstitutionModelDelegate(Tree tree, BranchModel branchModel, PreOrderSettings settings) {
this(tree, branchModel, 0, BUFFER_POOL_SIZE_DEFAULT, settings);
}
public SubstitutionModelDelegate(Tree tree, BranchModel branchModel, int partitionNumber, int bufferPoolSize,
PreOrderSettings settings) {
if (MEASURE_RUN_TIME) {
updateTime = 0;
convolveTime = 0;
}
this.tree = tree;
this.substitutionModelList = branchModel.getSubstitutionModels();
this.branchModel = branchModel;
eigenCount = substitutionModelList.size();
nodeCount = tree.getNodeCount();
// two eigen buffers for each decomposition for store and restore.
eigenBufferHelper = new BufferIndexHelper(eigenCount, 0, partitionNumber);
// two matrices for each node less the root
matrixBufferHelper = new BufferIndexHelper(nodeCount, 0, partitionNumber);
this.extraBufferCount = branchModel.requiresMatrixConvolution() ?
(bufferPoolSize > 0 ? bufferPoolSize : BUFFER_POOL_SIZE_DEFAULT) : 0;
if (branchModel.requiresMatrixConvolution() && this.extraBufferCount < eigenCount) {
throw new RuntimeException("SubstitutionModelDelegate requires at least " + eigenCount + " extra buffers to convolve matrices");
}
for (int i = 0; i < extraBufferCount; i++) {
pushAvailableBuffer(i + matrixBufferHelper.getBufferCount());
}
// one extra created as a reserve
// which is used to free up buffers when the avail stack is empty.
reserveBufferIndex = matrixBufferHelper.getBufferCount() + extraBufferCount;
if (DEBUG) {
System.out.println("Creating reserve buffer with index: "
+ reserveBufferIndex);
}
// this.cacheQMatrices = cacheQMatrices;
this.settings = settings;
}// END: Constructor
@Override
public boolean canReturnComplexDiagonalization() {
for (SubstitutionModel model : substitutionModelList) {
if (model.canReturnComplexDiagonalization()) {
return true;
}
}
return false;
}
@Override
public int getEigenBufferCount() {
return eigenBufferHelper.getBufferCount();
}
@Override
public int getMatrixBufferCount() {
// plus one for the reserve buffer
return matrixBufferHelper.getBufferCount() + extraBufferCount + 1;
}
@Override
public int getInfinitesimalMatrixBufferIndex(int branchIndex) {
return getMatrixBufferCount() + getEigenIndex(branchIndex);
}
private int getInfinitesimalMatrixBufferIndexByEigenIndex(int eigenIndex) {
return getMatrixBufferCount() + eigenIndex;
}
@Override
public int getInfinitesimalSquaredMatrixBufferIndex(int branchIndex) {
return getMatrixBufferCount() + getEigenBufferCount() + getEigenIndex(branchIndex);
}
@Override
public int getFirstOrderDifferentialMatrixBufferIndex(int branchIndex) {
int bufferIndex = matrixBufferHelper.getBufferCount() + getInfinitesimalMatrixBufferCount(settings) + branchIndex;
return bufferIndex;
}
@Override
public int getSecondOrderDifferentialMatrixBufferIndex(int branchIndex) {
return getFirstOrderDifferentialMatrixBufferIndex(branchIndex) + nodeCount - 1;
}
@Override
public void cacheInfinitesimalMatrix(Beagle beagle, int bufferIndex, double[] differentialMatrix) {
throw new RuntimeException("Not yet implemented");
}
@Override
public void cacheInfinitesimalSquaredMatrix(Beagle beagle, int bufferIndex, double[] differentialMatrix) {
throw new RuntimeException("Not yet implemented");
}
@Override
public void cacheFirstOrderDifferentialMatrix(Beagle beagle, int branchIndex, double[] differentialMassMatrix) {
beagle.setTransitionMatrix(getFirstOrderDifferentialMatrixBufferIndex(branchIndex), differentialMassMatrix, 0.0);
}
private int getSquaredInfinitesimalMatrixBufferIndexByEigenIndex(int eigenIndex) {
return getMatrixBufferCount() + getEigenBufferCount() + eigenIndex;
}
@Override
public int getCachedMatrixBufferCount(PreOrderSettings settings) {
int matrixBufferCount = getInfinitesimalMatrixBufferCount(settings) + getDifferentialMassMatrixBufferCount(settings);
return matrixBufferCount;
}
private int getInfinitesimalMatrixBufferCount(PreOrderSettings settings) {
if (settings.branchRateDerivative) {
return 2 * getEigenBufferCount();
} else {
return 0;
}
}
private int getDifferentialMassMatrixBufferCount(PreOrderSettings settings) {
if (settings.branchInfinitesimalDerivative) {
return 2 * (nodeCount - 1);
} else {
return 0;
}
}
@Override
public int getSubstitutionModelCount() {
return substitutionModelList.size();
}
@Override
public SubstitutionModel getSubstitutionModel(int index) {
return substitutionModelList.get(index);
}
@Override
public int getEigenIndex(int bufferIndex) {
return eigenBufferHelper.getOffsetIndex(bufferIndex);
}
@Override
public int getMatrixIndex(int branchIndex) {
return matrixBufferHelper.getOffsetIndex(branchIndex);
}
@Override
public double[] getRootStateFrequencies() {
return branchModel.getRootFrequencyModel().getFrequencies();
}// END: getStateFrequencies
@Override
public void updateSubstitutionModels(Beagle beagle, boolean flipBuffers) {
for (int i = 0; i < eigenCount; i++) {
if (flipBuffers) {
eigenBufferHelper.flipOffset(i);
}
SubstitutionModel substitutionModel = substitutionModelList.get(i);
EigenDecomposition ed = substitutionModel.getEigenDecomposition();
beagle.setEigenDecomposition(
eigenBufferHelper.getOffsetIndex(i),
ed.getEigenVectors(),
ed.getInverseEigenVectors(),
ed.getEigenValues());
}
}
@Override
public SubstitutionModel getSubstitutionModelForBranch(int branchIndex) {
BranchModel.Mapping mapping = branchModel.getBranchModelMapping(tree.getNode(branchIndex));
int[] order = mapping.getOrder();
if (order.length > 1) {
throw new RuntimeException("Not yet implemented");
}
return getSubstitutionModel(order[0]);
}
@Override
public void updateTransitionMatrices(Beagle beagle, int[] branchIndices, double[] edgeLength, int updateCount, boolean flipBuffers) {
int[][] probabilityIndices = new int[eigenCount][updateCount];
double[][] edgeLengths = new double[eigenCount][updateCount];
int[] counts = new int[eigenCount];
List<Deque<Integer>> convolutionList = new ArrayList<Deque<Integer>>();
for (int i = 0; i < updateCount; i++) {
BranchModel.Mapping mapping = branchModel.getBranchModelMapping(tree.getNode(branchIndices[i]));
int[] order = mapping.getOrder();
double[] weights = mapping.getWeights();
if (order.length == 1) {
int k = order[0];
if (flipBuffers) {
matrixBufferHelper.flipOffset(branchIndices[i]);
}
probabilityIndices[k][counts[k]] = matrixBufferHelper.getOffsetIndex(branchIndices[i]);
edgeLengths[k][counts[k]] = edgeLength[i];
counts[k]++;
} else {
double sum = 0.0;
for (double w : weights) {
sum += w;
}
if (getAvailableBufferCount() < order.length) {
// too few buffers available, process what we have and continue...
if (flipBuffers) { throw new UnsupportedOperationException("flipping not implemented for Epoch models"); }
computeTransitionMatrices(beagle, probabilityIndices, edgeLengths, counts);
convolveMatrices(beagle, convolutionList);
// reset the counts
for (int k = 0; k < eigenCount; k++) {
counts[k] = 0;
}
}
Deque<Integer> bufferIndices = new ArrayDeque<Integer>();
for (int j = 0; j < order.length; j++) {
int buffer = popAvailableBuffer();
if (buffer < 0) {
// no buffers available
throw new RuntimeException("Ran out of buffers for transition matrices - computing current list.");
}
int k = order[j];
probabilityIndices[k][counts[k]] = buffer;
edgeLengths[k][counts[k]] = weights[j] * edgeLength[i] / sum;
counts[k]++;
bufferIndices.add(buffer);
}
bufferIndices.add(matrixBufferHelper.getOffsetIndex(branchIndices[i]));
convolutionList.add(bufferIndices);
}// END: if convolution needed
}// END: i loop
computeTransitionMatrices(beagle, probabilityIndices, edgeLengths, counts);
convolveMatrices(beagle, convolutionList);
}// END: updateTransitionMatrices
@Override
public void flipTransitionMatrices(int[] branchIndices, int updateCount) {
for (int i = 0; i < updateCount; i++) {
matrixBufferHelper.flipOffset(branchIndices[i]);
}
}
private void computeTransitionMatrices(Beagle beagle, int[][] probabilityIndices, double[][] edgeLengths, int[] counts) {
Timer timer;
if (MEASURE_RUN_TIME) {
timer = new Timer();
timer.start();
}
if (DEBUG) {
System.out.print("Computing matrices:");
}
for (int i = 0; i < eigenCount; i++) {
if (DEBUG) {
for (int j = 0; j < counts[i]; j++) {
// System.out.print(" " + probabilityIndices[i][j]);
System.out.print(" " + probabilityIndices[i][j] + " (" + edgeLengths[i][j] + ")");
}
}
if (counts[i] > 0) {
beagle.updateTransitionMatrices(eigenBufferHelper.getOffsetIndex(i),
probabilityIndices[i],
null, // firstDerivativeIndices
null, // secondDerivativeIndices
edgeLengths[i],
counts[i]);
}
}
if (DEBUG) {
System.out.println();
}
if (MEASURE_RUN_TIME) {
timer.stop();
double timeInSeconds = timer.toSeconds();
updateTime += timeInSeconds;
}
}//END: computeTransitionMatrices
private void convolveMatrices(Beagle beagle, List<Deque<Integer>> convolutionList) {
Timer timer;
if (MEASURE_RUN_TIME) {
timer = new Timer();
timer.start();
}
while (convolutionList.size() > 0) {
int[] firstConvolutionBuffers = new int[nodeCount];
int[] secondConvolutionBuffers = new int[nodeCount];
int[] resultConvolutionBuffers = new int[nodeCount];
int operationsCount = 0;
List<Deque<Integer>> empty = new ArrayList<Deque<Integer>>();
for (Deque<Integer> convolve : convolutionList) {
if (convolve.size() > 3) {
firstConvolutionBuffers[operationsCount] = convolve.pop();
secondConvolutionBuffers[operationsCount] = convolve.pop();
int buffer;
boolean done;
do {
done = true;
buffer = popAvailableBuffer();
if (buffer < 0) {
// no buffers available
// throw new RuntimeException("All out of buffers");
// we have run out of buffers, process what we have and continue...
if (DEBUG) {
System.out.println("Ran out of buffers for convolving - computing current list.");
System.out.print("Convolving " + operationsCount + " matrices:");
for (int i = 0; i < operationsCount; i++) {
System.out.print(" " + firstConvolutionBuffers[i] + "*" + secondConvolutionBuffers[i] + "->" + resultConvolutionBuffers[i]);
}
System.out.println();
}
if (operationsCount > 0) {
convolveAndRelease(beagle, firstConvolutionBuffers, secondConvolutionBuffers, resultConvolutionBuffers, operationsCount);
// copy the uncompleted operation back down to the beginning of the operations list
firstConvolutionBuffers[0] = firstConvolutionBuffers[operationsCount];
secondConvolutionBuffers[0] = secondConvolutionBuffers[operationsCount];
// reset the operation count
operationsCount = 0;
done = false;
// there should be enough spare buffers to get a resultConvolutionBuffer for this operation now
} else {
// only one partially setup operation so there would be none to free up
// in this case we will use the reserve buffer
resultConvolutionBuffers[operationsCount] = getReserveBuffer();
convolveAndRelease(beagle, firstConvolutionBuffers, secondConvolutionBuffers, resultConvolutionBuffers, 1);
convolve.push(getReserveBuffer());
done = true; // break out of the do loop
}
}
} while (!done);
if (buffer >= 0) {
// if the buffer is still negative then the loop above will have used the reserve buffer
// to complete the convolution.
resultConvolutionBuffers[operationsCount] = buffer;
convolve.push(buffer);
operationsCount++;
}
} else if (convolve.size() == 3) {
firstConvolutionBuffers[operationsCount] = convolve.pop();
secondConvolutionBuffers[operationsCount] = convolve.pop();
resultConvolutionBuffers[operationsCount] = convolve.pop();
operationsCount++;
} else {
throw new RuntimeException("Unexpected convolve list size");
}
if (convolve.size() == 0) {
empty.add(convolve);
}
}
if (DEBUG) {
System.out.print("Convolving " + operationsCount+ " matrices:");
for (int i = 0; i < operationsCount; i++) {
System.out.print(" " + firstConvolutionBuffers[i] + "*" + secondConvolutionBuffers[i] + "->" + resultConvolutionBuffers[i]);
}
System.out.println();
}
convolveAndRelease(beagle, firstConvolutionBuffers, secondConvolutionBuffers, resultConvolutionBuffers, operationsCount);
convolutionList.removeAll(empty);
}
if (MEASURE_RUN_TIME) {
timer.stop();
double timeInSeconds = timer.toSeconds();
convolveTime += timeInSeconds;
}
}// END: convolveTransitionMatrices
private void convolveAndRelease(Beagle beagle, int[] firstConvolutionBuffers, int[] secondConvolutionBuffers, int[] resultConvolutionBuffers, int operationsCount) {
if (RUN_IN_SERIES) {
if (operationsCount > 1) {
throw new RuntimeException("Unable to convolve matrices in series");
}
}
beagle.convolveTransitionMatrices(firstConvolutionBuffers,
secondConvolutionBuffers,
resultConvolutionBuffers,
operationsCount // count
);
for (int i = 0; i < operationsCount; i++) {
if (firstConvolutionBuffers[i] >= matrixBufferHelper.getBufferCount() && firstConvolutionBuffers[i] != reserveBufferIndex) {
pushAvailableBuffer(firstConvolutionBuffers[i]);
}
if (secondConvolutionBuffers[i] >= matrixBufferHelper.getBufferCount() && secondConvolutionBuffers[i] != reserveBufferIndex) {
pushAvailableBuffer(secondConvolutionBuffers[i]);
}
}
}//END: convolveAndRelease
private int getAvailableBufferCount() {
if (RUN_IN_SERIES) {
return 0;
} else {
return availableBuffers.size();
}
}
private int popAvailableBuffer() {
if (availableBuffers.isEmpty()) {
return -1;
}
return availableBuffers.pop();
}
/**
* the reserve buffer is one extra buffer used to free up some spare buffers
* @return
*/
private int getReserveBuffer() {
return reserveBufferIndex;
}
private void pushAvailableBuffer(int index) {
availableBuffers.push(index);
}
@Override
public void storeState() {
eigenBufferHelper.storeState();
matrixBufferHelper.storeState();
}
@Override
public void restoreState() {
eigenBufferHelper.restoreState();
matrixBufferHelper.restoreState();
}
}// END: class
|
package dr.inference.operators.hmc;
import dr.inference.hmc.GradientWrtParameterProvider;
import dr.inference.hmc.PathGradient;
import dr.inference.model.Likelihood;
import dr.inference.model.Parameter;
import dr.inference.operators.AbstractAdaptableOperator;
import dr.inference.operators.AdaptationMode;
import dr.inference.operators.GeneralOperator;
import dr.inference.operators.PathDependent;
import dr.math.MathUtils;
import dr.math.MultivariateFunction;
import dr.math.NumericalDerivative;
import dr.math.matrixAlgebra.ReadableVector;
import dr.math.matrixAlgebra.WrappedVector;
import dr.util.Transform;
/**
* @author Max Tolkoff
* @author Marc A. Suchard
*/
public class HamiltonianMonteCarloOperator extends AbstractAdaptableOperator
implements GeneralOperator, PathDependent {
final GradientWrtParameterProvider gradientProvider;
protected double stepSize;
protected LeapFrogEngine leapFrogEngine;
protected final Parameter parameter;
protected final MassPreconditioner preconditioning;
private final Options runtimeOptions;
protected final double[] mask;
protected final Transform transform;
HamiltonianMonteCarloOperator(AdaptationMode mode, double weight, GradientWrtParameterProvider gradientProvider,
Parameter parameter, Transform transform, Parameter mask,
double stepSize, int nSteps,
double randomStepCountFraction,
double gradientCheckTolerance) {
this(mode, weight, gradientProvider,
parameter, transform, mask,
new Options(stepSize, nSteps, randomStepCountFraction,
0, 0, 0,
0, gradientCheckTolerance,
10, 0.1),
MassPreconditioner.Type.NONE
);
}
public HamiltonianMonteCarloOperator(AdaptationMode mode, double weight,
GradientWrtParameterProvider gradientProvider,
Parameter parameter, Transform transform, Parameter maskParameter,
Options runtimeOptions,
MassPreconditioner.Type preconditioningType) {
super(mode, 0.8); // Stan default
setWeight(weight);
this.gradientProvider = gradientProvider;
this.runtimeOptions = runtimeOptions;
this.stepSize = runtimeOptions.initialStepSize;
this.preconditioning = preconditioningType.factory(gradientProvider, transform, runtimeOptions);
this.parameter = parameter;
this.mask = buildMask(maskParameter);
this.transform = transform;
this.leapFrogEngine = constructLeapFrogEngine(transform);
}
protected LeapFrogEngine constructLeapFrogEngine(Transform transform) {
return (transform != null ?
new LeapFrogEngine.WithTransform(parameter, transform,
getDefaultInstabilityHandler(), preconditioning, mask) :
new LeapFrogEngine.Default(parameter,
getDefaultInstabilityHandler(), preconditioning, mask));
}
@Override
public String getOperatorName() {
return "Vanilla HMC operator";
}
private boolean shouldUpdatePreconditioning() {
return ((runtimeOptions.preconditioningUpdateFrequency > 0)
&& (((getCount() % runtimeOptions.preconditioningUpdateFrequency == 0)
&& (getCount() > runtimeOptions.preconditioningDelay))));
}
private static double[] buildMask(Parameter maskParameter) {
if (maskParameter == null) return null;
double[] mask = new double[maskParameter.getDimension()];
for (int i = 0; i < mask.length; ++i) {
mask[i] = (maskParameter.getParameterValue(i) == 0.0) ? 0.0 : 1.0;
}
return mask;
}
@Override
public double doOperation() {
throw new RuntimeException("Should not be executed");
}
@Override
public double doOperation(Likelihood joint) {
if (shouldCheckStepSize()) {
checkStepSize();
}
if (shouldCheckGradient()) {
checkGradient(joint);
}
if (shouldUpdatePreconditioning()) {
preconditioning.storeSecant(
new WrappedVector.Raw(leapFrogEngine.getLastGradient()),
new WrappedVector.Raw(leapFrogEngine.getLastPosition())
);
preconditioning.updateMass();
}
try {
return leapFrog();
} catch (NumericInstabilityException e) {
return Double.NEGATIVE_INFINITY;
}
}
@Override
public void setPathParameter(double beta) {
if (gradientProvider instanceof PathGradient) {
((PathGradient) gradientProvider).setPathParameter(beta);
}
}
private boolean shouldCheckStepSize() {
return getCount() < 1 && getMode() == AdaptationMode.ADAPTATION_ON;
}
private void checkStepSize() {
double[] initialPosition = parameter.getParameterValues();
int iterations = 0;
boolean acceptableSize = false;
while (!acceptableSize && iterations < runtimeOptions.checkStepSizeMaxIterations) {
try {
leapFrog();
double logLikelihood = gradientProvider.getLikelihood().getLogLikelihood();
if (!Double.isNaN(logLikelihood) && !Double.isInfinite(logLikelihood)) {
acceptableSize = true;
}
} catch (Exception exception) {
// Do nothing
}
if (!acceptableSize) {
stepSize *= runtimeOptions.checkStepSizeReductionFactor;
}
ReadableVector.Utils.setParameter(initialPosition, parameter); // Restore initial position
++iterations;
}
if (!acceptableSize) {
throw new RuntimeException("Unable to find acceptable initial HMC step-size");
}
}
private boolean shouldCheckGradient() {
return getCount() < runtimeOptions.gradientCheckCount;
}
private void checkGradient(final Likelihood joint) {
if (parameter.getDimension() != gradientProvider.getDimension()) {
throw new RuntimeException("Unequal dimensions");
}
MultivariateFunction numeric = new MultivariateFunction() {
@Override
public double evaluate(double[] argument) {
if (transform == null) {
ReadableVector.Utils.setParameter(argument, parameter);
return joint.getLogLikelihood();
} else {
double[] untransformedValue = transform.inverse(argument, 0, argument.length);
ReadableVector.Utils.setParameter(untransformedValue, parameter);
return joint.getLogLikelihood() - transform.getLogJacobian(untransformedValue, 0, untransformedValue.length);
}
}
@Override
public int getNumArguments() {
return parameter.getDimension();
}
@Override
public double getLowerBound(int n) {
return parameter.getBounds().getLowerLimit(n);
}
@Override
public double getUpperBound(int n) {
return parameter.getBounds().getUpperLimit(n);
}
};
double[] analyticalGradientOriginal = gradientProvider.getGradientLogDensity();
double[] restoredParameterValue = parameter.getParameterValues();
if (transform == null) {
double[] numericGradientOriginal = NumericalDerivative.gradient(numeric, parameter.getParameterValues());
if (!MathUtils.isClose(analyticalGradientOriginal, numericGradientOriginal, runtimeOptions.gradientCheckTolerance)) {
String sb = "Gradients do not match:\n" +
"\tAnalytic: " + new WrappedVector.Raw(analyticalGradientOriginal) + "\n" +
"\tNumeric : " + new WrappedVector.Raw(numericGradientOriginal) + "\n";
throw new RuntimeException(sb);
}
} else {
double[] transformedParameter = transform.transform(parameter.getParameterValues(), 0,
parameter.getParameterValues().length);
double[] numericGradientTransformed = NumericalDerivative.gradient(numeric, transformedParameter);
double[] analyticalGradientTransformed = transform.updateGradientLogDensity(analyticalGradientOriginal,
parameter.getParameterValues(), 0, parameter.getParameterValues().length);
if (!MathUtils.isClose(analyticalGradientTransformed, numericGradientTransformed, runtimeOptions.gradientCheckTolerance)) {
String sb = "Transformed Gradients do not match:\n" +
"\tAnalytic: " + new WrappedVector.Raw(analyticalGradientTransformed) + "\n" +
"\tNumeric : " + new WrappedVector.Raw(numericGradientTransformed) + "\n";
throw new RuntimeException(sb);
}
}
ReadableVector.Utils.setParameter(restoredParameterValue, parameter);
}
static double[] mask(double[] vector, double[] mask) {
assert (mask == null || mask.length == vector.length);
if (mask != null) {
for (int i = 0; i < vector.length; ++i) {
vector[i] *= mask[i];
}
}
return vector;
}
static WrappedVector mask(WrappedVector vector, double[] mask) {
assert (mask == null || mask.length == vector.getDim());
if (mask != null) {
for (int i = 0; i < vector.getDim(); ++i) {
vector.set(i, vector.get(i) * mask[i]);
}
}
return vector;
}
private static final boolean DEBUG = false;
public static class Options {
final double initialStepSize;
final int nSteps;
final double randomStepCountFraction;
final int preconditioningUpdateFrequency;
final int preconditioningDelay;
final int preconditioningMemory;
final int gradientCheckCount;
final double gradientCheckTolerance;
final int checkStepSizeMaxIterations;
final double checkStepSizeReductionFactor;
public Options(double initialStepSize, int nSteps, double randomStepCountFraction,
int preconditioningUpdateFrequency, int preconditioningDelay, int preconditioningMemory,
int gradientCheckCount, double gradientCheckTolerance,
int checkStepSizeMaxIterations, double checkStepSizeReductionFactor) {
this.initialStepSize = initialStepSize;
this.nSteps = nSteps;
this.randomStepCountFraction = randomStepCountFraction;
this.preconditioningUpdateFrequency = preconditioningUpdateFrequency;
this.preconditioningDelay = preconditioningDelay;
this.preconditioningMemory = preconditioningMemory;
this.gradientCheckCount = gradientCheckCount;
this.gradientCheckTolerance = gradientCheckTolerance;
this.checkStepSizeMaxIterations = checkStepSizeMaxIterations;
this.checkStepSizeReductionFactor = checkStepSizeReductionFactor;
}
}
static class NumericInstabilityException extends Exception { }
private int getNumberOfSteps() {
int count = runtimeOptions.nSteps;
if (runtimeOptions.randomStepCountFraction > 0.0) {
double draw = count * (1.0 + runtimeOptions.randomStepCountFraction * (MathUtils.nextDouble() - 0.5));
count = Math.max(1, (int) draw);
}
return count;
}
double getKineticEnergy(ReadableVector momentum) {
final int dim = momentum.getDim();
double energy = 0.0;
for (int i = 0; i < dim; i++) {
energy += momentum.get(i) * preconditioning.getVelocity(i, momentum);
}
return energy / 2.0;
}
private double leapFrog() throws NumericInstabilityException {
if (DEBUG) {
System.err.println("HMC step size: " + stepSize);
}
final double[] position = leapFrogEngine.getInitialPosition();
final WrappedVector momentum = mask(preconditioning.drawInitialMomentum(), mask);
final double prop = getKineticEnergy(momentum) +
leapFrogEngine.getParameterLogJacobian();
leapFrogEngine.updateMomentum(position, momentum.getBuffer(),
mask(gradientProvider.getGradientLogDensity(), mask), stepSize / 2);
int nStepsThisLeap = getNumberOfSteps();
for (int i = 0; i < nStepsThisLeap; i++) { // Leap-frog
leapFrogEngine.updatePosition(position, momentum, stepSize);
if (i < (nStepsThisLeap - 1)) {
try {
leapFrogEngine.updateMomentum(position, momentum.getBuffer(),
mask(gradientProvider.getGradientLogDensity(), mask), stepSize);
} catch (ArithmeticException e) {
throw new NumericInstabilityException();
}
}
}
leapFrogEngine.updateMomentum(position, momentum.getBuffer(),
mask(gradientProvider.getGradientLogDensity(), mask), stepSize / 2);
final double res = getKineticEnergy(momentum) +
leapFrogEngine.getParameterLogJacobian();
return prop - res; //hasting ratio
}
@Override
protected double getAdaptableParameterValue() {
return Math.log(stepSize);
}
@Override
public void setAdaptableParameterValue(double value) {
if (DEBUG) {
System.err.println("Setting adaptable parameter: " + getAdaptableParameter() + " -> " + value);
}
stepSize = Math.exp(value);
}
@Override
public double getRawParameter() {
return stepSize;
}
enum InstabilityHandler {
REJECT {
@Override
void checkValue(double x) throws NumericInstabilityException {
if (Double.isNaN(x)) throw new NumericInstabilityException();
}
},
DEBUG {
@Override
void checkValue(double x) throws NumericInstabilityException {
if (Double.isNaN(x)) {
System.err.println("Numerical instability in HMC momentum; throwing exception");
throw new NumericInstabilityException();
}
}
},
IGNORE {
@Override
void checkValue(double x) {
// Do nothing
}
};
abstract void checkValue(double x) throws NumericInstabilityException;
}
protected InstabilityHandler getDefaultInstabilityHandler() {
if (DEBUG) {
return InstabilityHandler.DEBUG;
} else {
return InstabilityHandler.REJECT;
}
}
@Override
public String getAdaptableParameterName() {
return "stepSize";
}
interface LeapFrogEngine {
double[] getInitialPosition();
double getParameterLogJacobian();
void updateMomentum(final double[] position,
final double[] momentum,
final double[] gradient,
final double functionalStepSize) throws NumericInstabilityException;
void updatePosition(final double[] position,
final WrappedVector momentum,
final double functionalStepSize);
void setParameter(double[] position);
double[] getLastGradient();
double[] getLastPosition();
class Default implements LeapFrogEngine {
final protected Parameter parameter;
final private InstabilityHandler instabilityHandler;
final private MassPreconditioner preconditioning;
final double[] mask;
double[] lastGradient;
double[] lastPosition;
protected Default(Parameter parameter, InstabilityHandler instabilityHandler,
MassPreconditioner preconditioning,
double[] mask) {
this.parameter = parameter;
this.instabilityHandler = instabilityHandler;
this.preconditioning = preconditioning;
this.mask = mask;
}
@Override
public double[] getInitialPosition() {
return parameter.getParameterValues();
}
@Override
public double getParameterLogJacobian() {
return 0;
}
@Override
public double[] getLastGradient() {
return lastGradient;
}
@Override
public double[] getLastPosition() {
return lastPosition;
}
@Override
public void updateMomentum(double[] position, double[] momentum, double[] gradient,
double functionalStepSize) throws NumericInstabilityException {
final int dim = momentum.length;
for (int i = 0; i < dim; ++i) {
momentum[i] += functionalStepSize * gradient[i];
instabilityHandler.checkValue(momentum[i]);
}
lastGradient = gradient;
lastPosition = position;
}
@Override
public void updatePosition(double[] position, WrappedVector momentum,
double functionalStepSize) {
final int dim = momentum.getDim();
for (int i = 0; i < dim; i++) {
position[i] += functionalStepSize * preconditioning.getVelocity(i, momentum);
}
setParameter(position);
}
public void setParameter(double[] position) {
ReadableVector.Utils.setParameter(position, parameter); // May not work with MaskedParameter?
}
}
class WithTransform extends Default {
final private Transform transform;
double[] unTransformedPosition;
private WithTransform(Parameter parameter, Transform transform,
InstabilityHandler instabilityHandler,
MassPreconditioner preconditioning,
double[] mask) {
super(parameter, instabilityHandler, preconditioning, mask);
this.transform = transform;
}
@Override
public double getParameterLogJacobian() {
return transform.getLogJacobian(unTransformedPosition, 0, unTransformedPosition.length);
}
@Override
public double[] getInitialPosition() {
unTransformedPosition = super.getInitialPosition();
return transform.transform(unTransformedPosition, 0, unTransformedPosition.length);
}
@Override
public void updateMomentum(double[] position, double[] momentum, double[] gradient,
double functionalStepSize) throws NumericInstabilityException {
gradient = transform.updateGradientLogDensity(gradient, unTransformedPosition,
0, unTransformedPosition.length);
mask(gradient, mask);
super.updateMomentum(position, momentum, gradient, functionalStepSize);
}
@Override
public void setParameter(double[] position) {
unTransformedPosition = transform.inverse(position, 0, position.length);
super.setParameter(unTransformedPosition);
}
}
}
}
|
package edu.ucla.cens.genjson;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
public class PromptGroupThreeJsonMessageCreator implements JsonMessageCreator {
// # Diary group
// "date":"2009-11-03 10:18:33",
// "time":1257272467077,
// "timezone":"EST",
// "location": {
// "latitude":38.8977,
// "longitude":-77.0366
// "version_id":1,
// "group_id":3,
// "tags": [],
// "responses":[
// {"prompt_id":1,
// "response":0},
// {"prompt_id":2,
// "response":0},
// {"prompt_id":3,
// "response":0},
// {"prompt_id":4,
// "response":0},
// {"prompt_id":5,
// "response":0},
// {"prompt_id":6,
// "response":0},
// {"prompt_id":7,
// "response":0},
// {"prompt_id":8,
// "response":0},
// {"prompt_id":9,
// "response":0},
// {"prompt_id":10,
// "response":0},
// {"prompt_id":11,
// "response":0},
// {"prompt_id":12,
// "response":0},
// {"prompt_id":13,
// "response":["t","t","t","t","t","t"]},
// {"prompt_id":14,
// "response":0},
// {"prompt_id":15,
// "response":0}
// {"prompt_id":16,
// "response":0},
// {"prompt_id":17,
// "response":0},
// {"prompt_id":18,
// "response":0},
// {"prompt_id":19,
// "response":0},
// {"prompt_id":20,
// "response":0},
// {"prompt_id":21,
// "response":0}
/**
* Returns a JSONArray with numberOfEntries elements that are all of the prompt group id 3 type.
*/
public JSONArray createMessage(int numberOfEntries) {
JSONArray jsonArray = new JSONArray();
String tz = ValueCreator.tz(); // use the same tz for all messages in the returned array (the most likely use case)
int versionId = 1;
int groupId = 3;
List<String> tags = new ArrayList<String>();
for(int i = 0; i < numberOfEntries; i++) {
String date = ValueCreator.date();
long epoch = ValueCreator.epoch();
double latitude = ValueCreator.latitude();
double longitude = ValueCreator.longitude();
Map<String, Object> map = new HashMap<String, Object>();
map.put("date", date);
map.put("time", epoch);
map.put("timezone", tz);
map.put("version_id", versionId);
map.put("group_id", groupId);
map.put("tags", tags); // always empty for now
Map<String, Object> location = new HashMap<String, Object>();
location.put("latitude", latitude);
location.put("longitude", longitude);
map.put("location", location);
List<Map<String, Object>> responses = new ArrayList<Map<String, Object>>();
// p0 is simply a "parent" question
for(int j = 1; j < 4; j++) {
Map<String, Object> p = new HashMap<String, Object>();
p.put("prompt_id", j);
p.put("response", ValueCreator.randomBoolean() ? 1 : 0);
responses.add(p);
}
for(int j = 4; j < 9; j++) {
Map<String, Object> p = new HashMap<String, Object>();
p.put("prompt_id", j);
p.put("response", ValueCreator.randomPositiveIntModulus(5));
responses.add(p);
}
for(int j = 9; j < 11; j++) {
Map<String, Object> p = new HashMap<String, Object>();
p.put("prompt_id", j);
p.put("response", ValueCreator.randomBoolean() ? 1 : 0);
responses.add(p);
}
Map<String, Object> p11 = new HashMap<String, Object>();
p11.put("prompt_id", 11);
p11.put("response", ValueCreator.randomPositiveIntModulus(3));
responses.add(p11);
Map<String, Object> p12 = new HashMap<String, Object>();
p12.put("prompt_id", 12);
p12.put("response", ValueCreator.randomPositiveIntModulus(6));
responses.add(p12);
List<String> booleans = new ArrayList<String>();
for(int j = 0; j < 6; j++) {
booleans.add(ValueCreator.randomBoolean() ? "t" : "f");
}
Map<String, Object> p13 = new HashMap<String, Object>();
p13.put("prompt_id", 13);
p13.put("response", booleans);
responses.add(p13);
for(int j = 14; j < 17; j++) {
Map<String, Object> p = new HashMap<String, Object>();
p.put("prompt_id", j);
p.put("response", ValueCreator.randomPositiveIntModulus(10));
responses.add(p);
}
for(int j = 17; j < 19; j++) {
Map<String, Object> p = new HashMap<String, Object>();
p.put("prompt_id", j);
p.put("response", ValueCreator.randomBoolean() ? 1 : 0);
responses.add(p);
}
Map<String, Object> p19 = new HashMap<String, Object>();
p19.put("prompt_id", 19);
p19.put("response", ValueCreator.randomPositiveIntModulus(7));
responses.add(p19);
Map<String, Object> p20 = new HashMap<String, Object>();
p20.put("prompt_id", 20);
p20.put("response", ValueCreator.randomBoolean() ? 1 : 0);
responses.add(p20);
Map<String, Object> p21 = new HashMap<String, Object>();
p21.put("prompt_id", 21);
p21.put("response", ValueCreator.randomPositiveIntModulus(7));
responses.add(p21);
map.put("responses", responses);
jsonArray.put(map);
}
return jsonArray;
}
}
|
package level;
import java.awt.image.BufferedImage;
import java.io.IOException;
import javax.imageio.ImageIO;
public class LoadLevel extends Level {
public LoadLevel(String path) {
super(path);
}
protected void loadLevel(String path) {
try {
BufferedImage image = ImageIO.read(LoadLevel.class.getResource(path));
int w = width = image.getWidth();
int h = height = image.getHeight();
tiles = new int[w * h];
image.getRGB(0, 0, w, h, tiles, 0, w);
} catch (IOException e) {
e.printStackTrace();
System.out.println("Exception! Could not load level file!");
}
}
protected void generateLevel() {
}
}
|
package org.jivesoftware.messenger.user;
import org.jivesoftware.database.DbConnectionManager;
import org.jivesoftware.util.Log;
import org.jivesoftware.util.StringUtils;
import org.jivesoftware.util.LocaleUtils;
import java.sql.*;
import java.util.*;
import java.util.Date;
/**
* Default implementation of the UserProvider interface, which reads and writes data
* from the <tt>jiveUser</tt> database table.
*
* @author Matt Tucker
*/
public class DefaultUserProvider implements UserProvider {
private static final String LOAD_USER =
"SELECT name, email, creationDate, modificationDate FROM jiveUser WHERE username=?";
private static final String USER_COUNT =
"SELECT count(*) FROM jiveUser";
private static final String ALL_USERS =
"SELECT username FROM jiveUser";
private static final String INSERT_USER =
"INSERT INTO jiveUser (username,password,name,email,creationDate,modificationDate) " +
"VALUES (?,?,?,?,?,?)";
private static final String DELETE_USER_PROPS =
"DELETE FROM jiveUserProp WHERE username=?";
private static final String DELETE_VCARD_PROPS =
"DELETE FROM jiveVCard WHERE username=?";
private static final String DELETE_USER =
"DELETE FROM jiveUser WHERE username=?";
private static final String UPDATE_NAME =
"UPDATE jiveUser SET name=? WHERE username=?";
private static final String UPDATE_EMAIL =
"UPDATE jiveUser SET email=? WHERE username=?";
private static final String UPDATE_CREATION_DATE =
"UPDATE jiveUser SET creationDate=? WHERE username=?";
private static final String UPDATE_MODIFICATION_DATE =
"UPDATE jiveUser SET modificationDate=? WHERE username=?";
private static final String LOAD_PASSWORD =
"SELECT password FROM jiveUser WHERE username=?";
private static final String UPDATE_PASSWORD =
"UPDATE jiveUser SET password=? WHERE username=?";
public User loadUser(String username) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(LOAD_USER);
pstmt.setString(1, username);
ResultSet rs = pstmt.executeQuery();
if (!rs.next()) {
throw new UserNotFoundException();
}
String name = rs.getString(1);
String email = rs.getString(2);
Date creationDate = new Date(Long.parseLong(rs.getString(3).trim()));
Date modificationDate = new Date(Long.parseLong(rs.getString(4).trim()));
rs.close();
return new User(username, name, email, creationDate, modificationDate);
}
catch (Exception e) {
throw new UserNotFoundException(e);
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
}
public User createUser(String username, String password, String name, String email)
throws UserAlreadyExistsException
{
try {
loadUser(username);
// The user already exists since no exception, so:
throw new UserAlreadyExistsException("Username " + username + " already exists");
}
catch (UserNotFoundException unfe) {
// The user doesn't already exist so we can create a new user
Date now = new Date();
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(INSERT_USER);
pstmt.setString(1, username);
pstmt.setString(2, password);
if (name == null) {
pstmt.setNull(3, Types.VARCHAR);
}
else {
pstmt.setString(3, name);
}
if (email == null) {
pstmt.setNull(4, Types.VARCHAR);
}
else {
pstmt.setString(4, email);
}
pstmt.setString(5, StringUtils.dateToMillis(now));
pstmt.setString(6, StringUtils.dateToMillis(now));
pstmt.execute();
}
catch (Exception e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return new User(username, name, email, now, now);
}
}
public void deleteUser(String username) {
Connection con = null;
PreparedStatement pstmt = null;
boolean abortTransaction = false;
try {
con = DbConnectionManager.getTransactionConnection();
// Delete all of the users's extended properties
pstmt = con.prepareStatement(DELETE_USER_PROPS);
pstmt.setString(1, username);
pstmt.execute();
pstmt.close();
// Delete all of the users's vcard properties
pstmt = con.prepareStatement(DELETE_VCARD_PROPS);
pstmt.setString(1, username);
pstmt.execute();
pstmt.close();
// Delete the actual user entry
pstmt = con.prepareStatement(DELETE_USER);
pstmt.setString(1, username);
pstmt.execute();
}
catch (Exception e) {
Log.error(e);
abortTransaction = true;
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
DbConnectionManager.closeTransactionConnection(con, abortTransaction);
}
}
public int getUserCount() {
int count = 0;
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(USER_COUNT);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
count = rs.getInt(1);
}
rs.close();
}
catch (SQLException e) {
Log.error(e);
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return count;
}
public Collection<User> getUsers() {
List<String> usernames = new ArrayList<String>(500);
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(ALL_USERS);
ResultSet rs = pstmt.executeQuery();
// Set the fetch size. This will prevent some JDBC drivers from trying
// to load the entire result set into memory.
DbConnectionManager.setFetchSize(rs, 500);
while (rs.next()) {
usernames.add(rs.getString(1));
}
rs.close();
}
catch (SQLException e) {
Log.error(e);
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return new UserCollection((String[])usernames.toArray(new String[usernames.size()]));
}
public Collection<User> getUsers(int startIndex, int numResults) {
List<String> usernames = new ArrayList<String>(numResults);
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(ALL_USERS);
ResultSet rs = pstmt.executeQuery();
DbConnectionManager.setFetchSize(rs, startIndex + numResults);
DbConnectionManager.scrollResultSet(rs, startIndex);
int count = 0;
while (rs.next() && count < numResults) {
usernames.add(rs.getString(1));
count++;
}
rs.close();
}
catch (SQLException e) {
Log.error(e);
}
finally {
try { if (pstmt != null) { pstmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return new UserCollection((String[])usernames.toArray(new String[usernames.size()]));
}
public void setName(String username, String name) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(UPDATE_NAME);
pstmt.setString(1, name);
pstmt.setString(2, username);
pstmt.executeUpdate();
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public void setEmail(String username, String email) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(UPDATE_EMAIL);
pstmt.setString(1, email);
pstmt.setString(2, username);
pstmt.executeUpdate();
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public void setCreationDate(String username, Date creationDate) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(UPDATE_CREATION_DATE);
pstmt.setString(1, StringUtils.dateToMillis(creationDate));
pstmt.setString(2, username);
pstmt.executeUpdate();
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public void setModificationDate(String username, Date modificationDate) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(UPDATE_MODIFICATION_DATE);
pstmt.setString(1, StringUtils.dateToMillis(modificationDate));
pstmt.setString(2, username);
pstmt.executeUpdate();
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public String getPassword(String username) throws UserNotFoundException {
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(LOAD_PASSWORD);
pstmt.setString(1, username);
ResultSet rs = pstmt.executeQuery();
if (!rs.next()) {
throw new UserNotFoundException(username);
}
return rs.getString(1);
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public void setPassword(String username, String password) throws UserNotFoundException
{
Connection con = null;
PreparedStatement pstmt = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(UPDATE_PASSWORD);
pstmt.setString(1, password);
pstmt.setString(2, username);
pstmt.executeUpdate();
}
catch (SQLException sqle) {
throw new UserNotFoundException(sqle);
}
finally {
try { if (pstmt != null) pstmt.close(); }
catch (Exception e) { Log.error(e); }
try { if (con != null) con.close(); }
catch (Exception e) { Log.error(e); }
}
}
public Set<String> getSearchFields() throws UnsupportedOperationException {
return new LinkedHashSet<String>(Arrays.asList("Username", "Name", "Email"));
}
public Collection<User> findUsers(Set<String> fields, String query)
throws UnsupportedOperationException
{
if (fields.isEmpty()) {
return Collections.emptyList();
}
if (!getSearchFields().containsAll(fields)) {
throw new IllegalArgumentException("Search fields " + fields + " are not valid.");
}
// SQL LIKE queries don't map directly into a keyword/wildcard search like we want.
// Therefore, we do a best approximiation by replacing '*' with '%' and then
// surrounding the whole query with two '%'. This will return more data than desired,
// but is better than returning less data than desired.
query = "%" + query.replace('*', '%') + "%";
if (query.endsWith("%%")) {
query = query.substring(0, query.length()-1);
}
List<String> usernames = new ArrayList<String>(50);
Connection con = null;
Statement stmt = null;
try {
con = DbConnectionManager.getConnection();
stmt = con.createStatement();
StringBuilder sql = new StringBuilder();
sql.append("SELECT username FROM jiveUser WHERE");
boolean first = true;
if (fields.contains("Username")) {
sql.append(" username LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
first = false;
}
if (fields.contains("Name")) {
if (!first) {
sql.append(" AND");
}
sql.append(" name LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
first = false;
}
if (fields.contains("Email")) {
if (!first) {
sql.append(" AND");
}
sql.append(" email LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
}
ResultSet rs = stmt.executeQuery(sql.toString());
while (rs.next()) {
usernames.add(rs.getString(1));
}
rs.close();
}
catch (SQLException e) {
Log.error(e);
}
finally {
try { if (stmt != null) { stmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return new UserCollection((String[])usernames.toArray(new String[usernames.size()]));
}
public Collection<User> findUsers(Set<String> fields, String query, int startIndex,
int numResults) throws UnsupportedOperationException
{
if (fields.isEmpty()) {
return Collections.emptyList();
}
if (!getSearchFields().containsAll(fields)) {
throw new IllegalArgumentException("Search fields " + fields + " are not valid.");
}
// SQL LIKE queries don't map directly into a keyword/wildcard search like we want.
// Therefore, we do a best approximiation by replacing '*' with '%' and then
// surrounding the whole query with two '%'. This will return more data than desired,
// but is better than returning less data than desired.
query = "%" + query.replace('*', '%') + "%";
if (query.endsWith("%%")) {
query = query.substring(0, query.length()-1);
}
List<String> usernames = new ArrayList<String>(50);
Connection con = null;
Statement stmt = null;
try {
con = DbConnectionManager.getConnection();
stmt = con.createStatement();
StringBuilder sql = new StringBuilder();
sql.append("SELECT username FROM jiveUser WHERE");
boolean first = true;
if (fields.contains("Username")) {
sql.append(" username LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
first = false;
}
if (fields.contains("Name")) {
if (!first) {
sql.append(" AND");
}
sql.append(" name LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
first = false;
}
if (fields.contains("Email")) {
if (!first) {
sql.append(" AND");
}
sql.append(" email LIKE '").append(StringUtils.escapeForSQL(query)).append("'");
}
ResultSet rs = stmt.executeQuery(sql.toString());
// Scroll to the start index.
DbConnectionManager.scrollResultSet(rs, startIndex);
int count = 0;
while (rs.next() && count < numResults) {
usernames.add(rs.getString(1));
count++;
}
rs.close();
}
catch (SQLException e) {
Log.error(e);
}
finally {
try { if (stmt != null) { stmt.close(); } }
catch (Exception e) { Log.error(e); }
try { if (con != null) { con.close(); } }
catch (Exception e) { Log.error(e); }
}
return new UserCollection((String[])usernames.toArray(new String[usernames.size()]));
}
public boolean isReadOnly() {
return false;
}
}
|
package com.intellij.lang.jsgraphql.types.language;
import com.google.common.collect.ImmutableList;
import com.intellij.lang.jsgraphql.types.Internal;
import com.intellij.lang.jsgraphql.types.PublicApi;
import com.intellij.lang.jsgraphql.types.util.TraversalControl;
import com.intellij.lang.jsgraphql.types.util.TraverserContext;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.Consumer;
import static com.intellij.lang.jsgraphql.types.Assert.assertNotNull;
import static com.intellij.lang.jsgraphql.types.collect.ImmutableKit.emptyList;
import static com.intellij.lang.jsgraphql.types.language.NodeChildrenContainer.newNodeChildrenContainer;
import static java.util.Collections.emptyMap;
@PublicApi
public class Argument extends AbstractNode<Argument> implements NamedNode<Argument> {
public static final String CHILD_VALUE = "value";
private final String name;
private final Value value;
@Internal
protected Argument(String name,
Value value,
SourceLocation sourceLocation,
List<Comment> comments,
IgnoredChars ignoredChars,
Map<String, String> additionalData,
@Nullable PsiElement element,
@Nullable List<? extends Node> sourceNodes) {
super(sourceLocation, comments, ignoredChars, additionalData, element, sourceNodes);
this.name = name;
this.value = value;
}
/**
* alternative to using a Builder for convenience
*
* @param name of the argument
* @param value of the argument
*/
public Argument(String name, Value value) {
this(name, value, null, emptyList(), IgnoredChars.EMPTY, emptyMap(), null, null);
}
public static Builder newArgument() {
return new Builder();
}
public static Builder newArgument(String name, Value value) {
return new Builder().name(name).value(value);
}
@Override
public String getName() {
return name;
}
public Value getValue() {
return value;
}
@Override
public List<Node> getChildren() {
return value != null ? ImmutableList.of(value) : emptyList();
}
@Override
public NodeChildrenContainer getNamedChildren() {
return newNodeChildrenContainer()
.child(CHILD_VALUE, value)
.build();
}
@Override
public Argument withNewChildren(NodeChildrenContainer newChildren) {
return transform(builder -> builder
.value(newChildren.getChildOrNull(CHILD_VALUE))
);
}
@Override
public boolean isEqualTo(Node o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Argument that = (Argument) o;
return Objects.equals(this.name, that.name);
}
@Override
public Argument deepCopy() {
return new Argument(name, deepCopy(value), getSourceLocation(), getComments(), getIgnoredChars(), getAdditionalData(), getElement(), getSourceNodes());
}
@Override
public String toString() {
return "Argument{" +
"name='" + name + '\'' +
", value=" + value +
'}';
}
@Override
public TraversalControl accept(TraverserContext<Node> context, NodeVisitor visitor) {
return visitor.visitArgument(this, context);
}
public Argument transform(Consumer<Builder> builderConsumer) {
Builder builder = new Builder(this);
builderConsumer.accept(builder);
return builder.build();
}
public static final class Builder implements NodeBuilder {
private SourceLocation sourceLocation;
private ImmutableList<Comment> comments = emptyList();
private String name;
private Value value;
private IgnoredChars ignoredChars = IgnoredChars.EMPTY;
private Map<String, String> additionalData = new LinkedHashMap<>();
private @Nullable PsiElement element;
private @Nullable List<? extends Node> sourceNodes;
private Builder() {
}
private Builder(Argument existing) {
this.sourceLocation = existing.getSourceLocation();
this.comments = ImmutableList.copyOf(existing.getComments());
this.name = existing.getName();
this.value = existing.getValue();
this.ignoredChars = existing.getIgnoredChars();
this.additionalData = new LinkedHashMap<>(existing.getAdditionalData());
this.element = existing.getElement();
this.sourceNodes = existing.getSourceNodes();
}
public Builder sourceLocation(SourceLocation sourceLocation) {
this.sourceLocation = sourceLocation;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder value(Value value) {
this.value = value;
return this;
}
public Builder comments(List<Comment> comments) {
this.comments = ImmutableList.copyOf(comments);
return this;
}
public Builder ignoredChars(IgnoredChars ignoredChars) {
this.ignoredChars = ignoredChars;
return this;
}
public Builder additionalData(Map<String, String> additionalData) {
this.additionalData = assertNotNull(additionalData);
return this;
}
public Builder additionalData(String key, String value) {
this.additionalData.put(key, value);
return this;
}
public Builder element(@Nullable PsiElement element) {
this.element = element;
return this;
}
public Builder sourceNodes(@Nullable List<? extends Node> sourceNodes) {
this.sourceNodes = sourceNodes;
return this;
}
public Argument build() {
return new Argument(name, value, sourceLocation, comments, ignoredChars, additionalData, element, sourceNodes);
}
}
}
|
package HxCKDMS.XEnchants.hooks;
import java.util.Random;
import HxCKDMS.XEnchants.XEnchants;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import net.minecraft.block.Block;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.monster.*;
import net.minecraft.entity.passive.EntityAnimal;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraft.util.ChatComponentText;
import net.minecraftforge.event.entity.living.LivingAttackEvent;
import net.minecraftforge.event.entity.living.LivingDeathEvent;
import net.minecraftforge.event.world.BlockEvent;
public class ToolEventHookContainer
{
// Integers, ya idiot
int VampireLevel;
int AutoSmeltLevel;
int LifeStealLevel;
int VBRV = 0;
// Misc. variables
@SubscribeEvent
public void LivingAttackEvent(LivingAttackEvent event){
EntityLivingBase entity = event.entityLiving;
Entity ent = event.source.getSourceOfDamage();
if (ent instanceof EntityPlayerMP){
EntityPlayerMP Attacker = (EntityPlayerMP) ent;
EntityLiving Victim = (EntityLiving) entity;
ItemStack item = Attacker.getHeldItem();
LifeStealLevel = EnchantmentHelper.getEnchantmentLevel(XEnchants.LifeSteal.effectId, item);
if (LifeStealLevel > 0){
double PH = Victim.prevHealth;
double CH = Victim.getHealth();
float RH = (float)CH - (float)PH;
Attacker.heal(RH * LifeStealLevel);
}
}
}
@SubscribeEvent
public void LivingDeathEvent(LivingDeathEvent event)
{
EntityLivingBase entity = event.entityLiving;
Entity ent = event.source.getSourceOfDamage();
if (ent instanceof EntityPlayerMP){
EntityLiving Victim = (EntityLiving) entity;
EntityPlayerMP Attacker = (EntityPlayerMP) ent;
ItemStack item = Attacker.getHeldItem();
VampireLevel = EnchantmentHelper.getEnchantmentLevel(XEnchants.Vampirism.effectId, item);
if (VampireLevel > 0){
if (event.entityLiving instanceof EntityAnimal){
VBRV = 3;
}else if (event.entityLiving instanceof EntityPlayerMP){
VBRV = 15;
Attacker.addChatMessage(new ChatComponentText("\u00A74You have just tasted victorious blood"));
}else if (event.entityLiving instanceof EntityVillager){
VBRV = 12;
Attacker.addChatMessage(new ChatComponentText("\u00A74You have just tasted some rich blood"));
}else if (event.entityLiving instanceof EntityZombie || event.entityLiving instanceof EntitySkeleton){
VBRV = -1;
Attacker.addChatMessage(new ChatComponentText("\u00A74You have just tasted some horrible blood"));
}else if (event.entityLiving instanceof EntitySlime){
VBRV = 2;
}else if (event.entityLiving instanceof EntityEnderman){
VBRV = 4;
}else if (event.entityLiving instanceof EntityMob){
VBRV = 5;
}else{
VBRV = 3;
}
int curFood = Attacker.getFoodStats().getFoodLevel();
int newFud = (VBRV * VampireLevel) + curFood;
Attacker.getFoodStats().setFoodLevel(newFud);
}
}
}
@SubscribeEvent
public void onHarvestBlocks(BlockEvent.HarvestDropsEvent event)
{
if (event.harvester != null){
EntityPlayer player = event.harvester;
Block block = event.block;
ItemStack itemStackBlock = new ItemStack(Item.getItemFromBlock(block), 1);
ItemStack heldItem = player.getHeldItem();
ItemStack result;
AutoSmeltLevel = EnchantmentHelper.getEnchantmentLevel(XEnchants.FlameTouch.effectId, heldItem);
if(AutoSmeltLevel > 0)
{
result = FurnaceRecipes.smelting().getSmeltingResult(itemStackBlock);
if(result != null)
{
result.stackSize = AutoSmeltLevel;
for(int i = 0; i < event.drops.size(); i++){
event.drops.remove(i);
}
event.drops.add(result);
}
}
}
}
}
|
package bammerbom.ultimatecore.bukkit.commands;
import bammerbom.ultimatecore.bukkit.UltimateCommand;
import bammerbom.ultimatecore.bukkit.r;
import bammerbom.ultimatecore.bukkit.resources.classes.MetaItemStack;
import bammerbom.ultimatecore.bukkit.resources.utils.InventoryUtil;
import bammerbom.ultimatecore.bukkit.resources.utils.ItemUtil;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import java.util.Arrays;
import java.util.List;
public class CmdItem implements UltimateCommand {
@Override
public String getName() {
return "item";
}
@Override
public String getPermission() {
return "uc.item";
}
@Override
public List<String> getAliases() {
return Arrays.asList("i");
}
@SuppressWarnings("deprecation")
@Override
public void run(final CommandSender cs, String label, String[] args) {
if (!r.perm(cs, "uc.item", false, true)) {
return;
}
if (!r.isPlayer(cs)) {
return;
}
Player p = (Player) cs;
if (!r.checkArgs(args, 0)) {
r.sendMes(cs, "itemUsage");
return;
}
ItemStack item;
try {
item = new ItemStack(ItemUtil.searchItem(args[0]));
} catch (Exception e) {
r.sendMes(cs, "itemItemNotFound", "%Item", args[0]);
return;
}
if (item == null || item.getType() == null || item.getType().equals(Material.AIR)) {
r.sendMes(cs, "itemItemNotFound", "%Item", args[0]);
return;
}
if (InventoryUtil.isFullInventory(p.getInventory())) {
r.sendMes(cs, "itemInventoryFull");
return;
}
Integer amount = item.getMaxStackSize();
if (r.checkArgs(args, 1)) {
if (!r.isInt(args[1])) {
r.sendMes(cs, "numberFormat", "%Number", args[1]);
return;
}
amount = Integer.parseInt(args[1]);
}
item.setAmount(amount);
if (r.checkArgs(args, 2)) {
if (r.isInt(args[2])) {
item.setDurability(Short.parseShort(args[2]));
}
MetaItemStack meta = new MetaItemStack(item);
int metaStart = r.isInt(args[2]) ? 3 : 2;
if (args.length > metaStart) {
try {
String s = r.getFinalArg(args, metaStart);
if (s.startsWith("\\{")) {
item = Bukkit.getUnsafe().modifyItemStack(item, s);
} else {
try {
meta.parseStringMeta(cs, r.perm(cs, "uc.item.unsafe", false, false), args, metaStart);
item = meta.getItemStack();
} catch (IllegalArgumentException ex) {
if (ex.getMessage() != null && ex.getMessage().contains("Enchantment level is either too " + "low or too high")) {
r.sendMes(cs, "enchantUnsafe");
return;
}
return;
}
}
} catch (Exception e) {
r.sendMes(cs, "itemMetadataFailed");
return;
}
}
}
InventoryUtil.addItem(p.getInventory(), item);
r.sendMes(cs, "itemMessage", "%Item", ItemUtil.getName(item), "%Amount", amount, "%Player", r.getDisplayName(p));
}
@Override
public List<String> onTabComplete(CommandSender cs, Command cmd, String alias, String[] args, String curs, Integer curn) {
return null;
}
}
|
package br.com.jq.syncthia.bdcreator.table;
import br.com.jq.syncthia.bdcreator.annotations.GetAnnotation;
import br.com.jq.syncthia.bdcreator.exceptions.CantPersistAutomaticException;
public abstract class TableEntity {
private GetAnnotation getAnnotation;
public TableEntity() {
getAnnotation = new GetAnnotation();
}
protected final boolean persistEntityInternal() throws CantPersistAutomaticException {
Table t = getAnnotation.getRelatedTable(getClass());
if (t == null) {
throw new CantPersistAutomaticException();
}
return false;
}
protected boolean persistEntityManually() {
return false;
}
public final boolean persistEntity() {
try {
return persistEntityInternal();
} catch (CantPersistAutomaticException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return persistEntityManually();
}
}
}
|
package com.bouncestorage.swiftproxy.v1;
import static com.google.common.base.Throwables.propagate;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Base64;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.StringTokenizer;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import javax.validation.constraints.NotNull;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.ClientErrorException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.Encoded;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import com.bouncestorage.swiftproxy.BlobStoreResource;
import com.bouncestorage.swiftproxy.COPY;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterators;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import org.glassfish.grizzly.http.server.Request;
import org.glassfish.grizzly.utils.Pair;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.ContainerNotFoundException;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobBuilder;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.GetOptions;
import org.jclouds.http.HttpResponseException;
import org.jclouds.io.MutableContentMetadata;
import org.jclouds.openstack.swift.v1.CopyObjectException;
@Path("/v1/{account}/{container}/{object:.*}")
public final class ObjectResource extends BlobStoreResource {
private static final String META_HEADER_PREFIX = "x-object-meta-";
private static final int MAX_OBJECT_NAME_LENGTH = 1024;
private static GetOptions parseRange(GetOptions options, String range) {
if (range != null) {
range = range.replaceAll(" ", "").toLowerCase();
String bytesUnit = "bytes=";
int idx = range.indexOf(bytesUnit);
if (idx == 0) {
String byteRangeSet = range.substring(bytesUnit.length());
Iterator<Object> iter = Iterators.forEnumeration(new StringTokenizer(byteRangeSet, ","));
StreamSupport.stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED), false)
.map(rangeSpec -> (String) rangeSpec)
.map(rangeSpec -> {
int dash = rangeSpec.indexOf("-");
if (dash == -1) {
throw new BadRequestException("Range");
}
String firstBytePos = rangeSpec.substring(0, dash);
String lastBytePos = rangeSpec.substring(dash + 1);
Long firstByte = firstBytePos.isEmpty() ? null : Long.parseLong(firstBytePos);
Long lastByte = lastBytePos.isEmpty() ? null : Long.parseLong(lastBytePos);
return new Pair<>(firstByte, lastByte);
})
.forEach(rangeSpec -> {
if (rangeSpec.getFirst() == null) {
if (rangeSpec.getSecond() == 0) {
throw new ClientErrorException(Response.Status.REQUESTED_RANGE_NOT_SATISFIABLE);
}
options.tail(rangeSpec.getSecond());
} else if (rangeSpec.getSecond() == null) {
options.startAt(rangeSpec.getFirst());
} else {
options.range(rangeSpec.getFirst(), rangeSpec.getSecond());
}
});
}
}
return options;
}
@GET
public Response getObject(@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String object,
@NotNull @PathParam("account") String account,
@HeaderParam("X-Auth-Token") String authToken,
@HeaderParam("X-Newest") boolean newest,
@QueryParam("signature") String signature,
@QueryParam("expires") String expires,
@QueryParam("multipart-manifest") String multiPartManifest,
@HeaderParam("Range") String range,
@HeaderParam("If-Match") String ifMatch,
@HeaderParam("If-None-Match") String ifNoneMatch,
@HeaderParam("If-Modified-Since") String ifModifiedSince,
@HeaderParam("If-Unmodified-Since") String ifUnmodifiedSince) {
GetOptions options = parseRange(new GetOptions(), range);
Blob blob = getBlobStore().getBlob(container, object, options);
if (blob == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
BlobMetadata meta = blob.getMetadata();
try (InputStream is = blob.getPayload().openStream()) {
return addObjectHeaders(meta, Response.ok(is)).build();
} catch (IOException e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
private static String normalizePath(String pathName) {
String objectName = Joiner.on("/").join(Iterators.forEnumeration(new StringTokenizer(pathName, "/")));
if (pathName.endsWith("/")) {
objectName += "/";
}
return objectName;
}
private static String contentType(String contentType) {
// workaround the stupidity in jclouds where it always strip trailing / from
// blob name listings. this allows us to detect that it's happened
if ("application/directory".equals(contentType)) {
return "application/x-directory";
}
return contentType;
}
private Map<String, String> getUserMetadata(Request request) {
return StreamSupport.stream(request.getHeaderNames().spliterator(), false)
.peek(name -> logger.info("header: {}", name))
.filter(name -> name.toLowerCase().startsWith(META_HEADER_PREFIX))
.filter(name -> {
if (name.equals(META_HEADER_PREFIX)) {
throw new BadRequestException();
}
return true;
})
.peek(name -> logger.info("usermetadata: {}", name))
.collect(Collectors.toMap(
name -> name.substring(META_HEADER_PREFIX.length()),
name -> request.getHeader(name)));
}
private static Pair<String, String> validateCopyParam(String destination) {
if (destination == null) {
return null;
}
Pair<String, String> res;
if (destination.charAt(0) == '/') {
String[] tokens = destination.split("/", 3);
if (tokens.length != 3) {
return null;
}
res = new Pair<>(tokens[1], tokens[2]);
} else {
String[] tokens = destination.split("/", 2);
if (tokens.length != 2) {
return null;
}
res = new Pair<>(tokens[0], tokens[1]);
}
return res;
}
@COPY
@Consumes(" ")
public Response copyObject(@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String objectName,
@NotNull @PathParam("account") String account,
@HeaderParam("X-Auth-Token") String authToken,
@NotNull @HeaderParam("Destination") String destination,
@NotNull @HeaderParam("Destination-Account") String destAccount,
@HeaderParam(HttpHeaders.CONTENT_TYPE) String contentType,
@HeaderParam(HttpHeaders.CONTENT_ENCODING) String contentEncoding,
@HeaderParam(HttpHeaders.CONTENT_DISPOSITION) String contentDisposition,
@Context Request request) {
if (objectName.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
Pair<String, String> dest = validateCopyParam(destination);
if (dest == null) {
return Response.status(Response.Status.PRECONDITION_FAILED).build();
}
String destContainer = dest.getFirst();
String destObject = dest.getSecond();
// TODO: unused
if (destAccount == null) {
destAccount = account;
}
if (destObject.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
logger.info("copy {}/{} to {}/{}", container, objectName, destContainer, destObject);
Map<String, String> additionalUserMeta = getUserMetadata(request);
BlobStore blobStore = getBlobStore();
if (!blobStore.containerExists(container) || !blobStore.containerExists(destContainer)) {
return notFound();
}
String copiedFrom;
try {
copiedFrom = container + "/" + URLDecoder.decode(objectName, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw propagate(e);
}
CopyOptions options;
if (additionalUserMeta.isEmpty()) {
options = CopyOptions.NONE;
} else {
BlobMetadata meta = blobStore.blobMetadata(container, objectName);
if (meta == null) {
return notFound();
}
options = CopyOptions.builder()
.userMetadata(meta.getUserMetadata())
.userMetadata(additionalUserMeta).build();
}
try {
String etag = blobStore.copyBlob(container, objectName, destContainer, destObject, options);
return Response.status(Response.Status.CREATED)
.header(HttpHeaders.ETAG, etag)
.header(HttpHeaders.CONTENT_LENGTH, 0)
.header(HttpHeaders.CONTENT_TYPE, contentType)
.header(HttpHeaders.DATE, new Date())
.header("X-Copied-From", copiedFrom)
.build();
} catch (CopyObjectException e) {
if (e.getCause() instanceof HttpResponseException) {
throw (HttpResponseException) e.getCause();
} else {
throw e;
}
}
}
// TODO: actually handle this, jclouds doesn't support metadata update yet
@POST
@Consumes(" ")
public Response postObject(@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String objectName,
@NotNull @PathParam("account") String account,
@HeaderParam("X-Auth-Token") String authToken,
@HeaderParam("X-Delete-At") long deleteAt,
@HeaderParam(HttpHeaders.CONTENT_DISPOSITION) String contentDisposition,
@HeaderParam(HttpHeaders.CONTENT_ENCODING) String contentEncoding,
@HeaderParam("X-Delete-After") long deleteAfter,
@HeaderParam(HttpHeaders.CONTENT_TYPE) String contentType,
@HeaderParam("X-Detect-Content-Type") boolean detectContentType,
@Context Request request) {
if (objectName.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
if (!getBlobStore().containerExists(container)) {
return notFound();
}
Blob blob = getBlobStore().getBlob(container, objectName);
if (blob == null) {
return notFound();
}
blob.getMetadata().setUserMetadata(getUserMetadata(request));
copyContentHeaders(blob, contentDisposition, contentEncoding, contentType);
getBlobStore().putBlob(container, blob);
return Response.accepted()
.header(HttpHeaders.CONTENT_LENGTH, 0)
.header(HttpHeaders.CONTENT_TYPE, contentType)
.header(HttpHeaders.DATE, new Date())
.build();
}
private static void copyContentHeaders(Blob blob, String contentDisposition, String contentEncoding, String contentType) {
MutableContentMetadata contentMetadata = blob.getMetadata().getContentMetadata();
if (contentDisposition != null) {
contentMetadata.setContentDisposition(contentDisposition);
}
if (contentType != null) {
contentMetadata.setContentType(contentType);
}
if (contentEncoding != null) {
contentMetadata.setContentEncoding(contentEncoding);
}
}
// TODO: Handle object metadata
@PUT
public Response putObject(@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String objectName,
@NotNull @PathParam("account") String account,
@QueryParam("multipart-manifest") boolean multiPartManifest,
@QueryParam("signature") String signature,
@QueryParam("expires") String expires,
@HeaderParam("X-Object-Manifest") String objectManifest,
@HeaderParam("X-Auth-Token") String authToken,
@HeaderParam(HttpHeaders.CONTENT_LENGTH) String contentLengthParam,
@HeaderParam("Transfer-Encoding") String transferEncoding,
@HeaderParam(HttpHeaders.CONTENT_TYPE) String contentType,
@HeaderParam("X-Detect-Content-Type") boolean detectContentType,
@HeaderParam("X-Copy-From") String copyFrom,
@HeaderParam("X-Copy-From-Account") String copyFromAccount,
@HeaderParam(HttpHeaders.ETAG) String eTag,
@HeaderParam(HttpHeaders.CONTENT_DISPOSITION) String contentDisposition,
@HeaderParam(HttpHeaders.CONTENT_ENCODING) String contentEncoding,
@HeaderParam("X-Delete-At") long deleteAt,
@HeaderParam("X-Delete-After") long deleteAfter,
@HeaderParam(HttpHeaders.IF_NONE_MATCH) String ifNoneMatch,
@Context Request request) {
//objectName = normalizePath(objectName);
if (objectName.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
if (transferEncoding != null && !"chunked".equals(transferEncoding)) {
return Response.status(Response.Status.NOT_IMPLEMENTED).build();
}
if (contentLengthParam == null && !"chunked".equals(transferEncoding)) {
return Response.status(Response.Status.LENGTH_REQUIRED).build();
}
long contentLength = contentLengthParam == null ? 0 : Long.parseLong(contentLengthParam);
logger.info("PUT {}", objectName);
if (copyFromAccount == null) {
copyFromAccount = account;
}
if (copyFrom != null) {
Pair<String, String> copy = validateCopyParam(copyFrom);
return copyObject(copy.getFirst(), copy.getSecond(), copyFromAccount, authToken,
container + "/" + objectName, account, contentType, contentEncoding, contentDisposition,
request);
}
if (!getBlobStore().containerExists(container)) {
return notFound();
}
HashCode contentMD5 = null;
if (eTag != null) {
try {
contentMD5 = HashCode.fromBytes(Base64.getDecoder().decode(eTag));
} catch (IllegalArgumentException iae) {
throw new ClientErrorException(422); // Unprocessable Entity
}
if (contentMD5.bits() != Hashing.md5().bits()) {
throw new ClientErrorException(422); // Unprocessable Entity
}
}
try (InputStream is = request.getInputStream()) {
BlobBuilder.PayloadBlobBuilder builder = getBlobStore().blobBuilder(objectName)
.userMetadata(getUserMetadata(request))
.payload(is)
.contentType(contentType(contentType));
if (contentLengthParam != null) {
builder.contentLength(contentLength);
}
if (contentMD5 != null) {
builder.contentMD5(contentMD5);
}
try {
String remoteETag = getBlobStore().putBlob(container, builder.build());
return Response.status(Response.Status.CREATED).header(HttpHeaders.ETAG, remoteETag)
.header(HttpHeaders.CONTENT_LENGTH, 0)
.header(HttpHeaders.CONTENT_TYPE, contentType)
.header(HttpHeaders.DATE, new Date()).build();
} catch (ContainerNotFoundException e) {
return notFound();
}
} catch (IOException e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
@HEAD
public Response headObject(@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String objectName,
@NotNull @PathParam("account") String account,
@HeaderParam("X-Auth-Token") String authToken) {
if (objectName.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
BlobMetadata meta = getBlobStore().blobMetadata(container, objectName);
if (meta == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
return addObjectHeaders(meta, Response.ok()).build();
}
@DELETE
public Response deleteObject(@NotNull @PathParam("account") String account,
@NotNull @PathParam("container") String container,
@NotNull @Encoded @PathParam("object") String objectName,
@QueryParam("multipart-manifest") String multipartManifest,
@HeaderParam("X-Auth-Token") String authToken) {
if (objectName.length() > MAX_OBJECT_NAME_LENGTH) {
return badRequest();
}
BlobStore store = getBlobStore();
if (!store.containerExists(container)) {
return Response.status(Response.Status.NOT_FOUND).build();
}
BlobMetadata meta = store.blobMetadata(container, objectName);
if (meta == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
store.removeBlob(container, objectName);
return Response.noContent()
.type(meta.getContentMetadata().getContentType())
.build();
}
private Response.ResponseBuilder addObjectHeaders(BlobMetadata metaData, Response.ResponseBuilder responseBuilder) {
metaData.getUserMetadata().entrySet()
.forEach(entry -> responseBuilder.header(META_HEADER_PREFIX + entry.getKey(), entry.getValue()));
return responseBuilder.header(HttpHeaders.CONTENT_LENGTH, metaData.getContentMetadata().getContentLength())
.header(HttpHeaders.LAST_MODIFIED, metaData.getLastModified())
.header(HttpHeaders.ETAG, metaData.getETag())
.header("X-Static-Large-Object", false)
.header(HttpHeaders.DATE, new Date())
.header(HttpHeaders.CONTENT_TYPE, metaData.getContentMetadata().getContentType());
}
}
|
package com.cc4102.stringDict;
import java.util.ArrayList;
import com.cc4102.stringDict.linearProbing.Par;
/**
* @author Lucas Puebla Silva
*
*/
public class LinearProbingHashingTree implements StringDictionary {
private int[] hashLength;
private int[] hashOccupation;
private int[] maxOccupation;
private Par[][] hashTable;
/**
*
* Constructor of class LinearProbingHashingTree.
*
* <p>
* This object uses a Par[] as it's elements. Every Par contains a String key and
* ArrayList<Integer values, which correspond to it's occurrences.
* </p>
*
* @param hl hashLength, don't use a value lower than 8, still debugging...
*/
public LinearProbingHashingTree(int hl) {
hashLength = new int[2];
hashOccupation = new int[2];
maxOccupation = new int[2];
hashTable = new Par[2][hl];
for (int i = 0; i < 2; i++) {
hashLength[i] = hl < 8 ? 8 : hl;
hashOccupation[i] = 0;
maxOccupation[i] = hashLength[i] > 0 ? (int) (hashLength[i] * 0.4) : 1;
hashTable[i] = new Par[hashLength[i]];
}
}
/**
* Wrapper for the StringDictionary method getLength().
* <p>
* Uses the getHashLength() method to return the above.
* </p>
*
* @return
*/
public int getLength(int text) {
return this.getHashLength(text);
}
/**
* Gives the current hashLength.
*
* @return
*/
private int getHashLength(int text) {
return hashLength[text];
}
/**
* Wrapper for StringDictionary method getRoot.
* <p>
* Uses the getHashTable() method to return the above.
* </p>
*
* @return
*/
public Object getRoot(int text) {
return this.getHashTable(text);
}
/**
* Returns the hashTable, which is the root.
*
* @return
*/
public Par[] getHashTable(int text) {
return hashTable[text];
}
/**
* Checks whether elem is contained within the hashTable.
*
* @param elem String corresponding to the key.
*/
public boolean contains(String elem, int text) {
boolean contains = false;
int hash = this.getHash(elem, text);
int offset = 0;
while (offset < hashLength[text]) {
if (hashTable[text][(hash + offset) % hashLength[text]] == null) {
offset++;
break;
}
if (elem.equals(hashTable[text][(hash + offset) % hashLength[text]].getKey())) {
contains = true;
break;
}
offset++;
}
return contains;
}
/**
* Only for testing purposes!
*
* <p>
* Enables a countdown of how many tries it took to find the key elem in the hashTable.
* </p>
*
* @param elem String corresponding to the key.
* @return
*/
public int searchCount(String elem, int text) {
int hash = this.getHash(elem, text);
int offset = 0;
while (offset < hashLength[text]) {
if (hashTable[text][(hash + offset) % hashLength[text]] == null) {
offset++;
continue;
}
if (elem.equals(hashTable[text][(hash + offset) % hashLength[text]].getKey())) {
break;
}
offset++;
}
return offset;
}
/**
* Inserts a Par element into the hashTable.
* <p>
* This is only used when rehashing.
* </p>
*
* @param elem corresponds to a Par object to be inserted.
*/
private void insert(Par elem, int text) {
if (hashOccupation[text] == maxOccupation[text] - 1) {
this.rehash(text);
}
String key = elem.getKey();
ArrayList<Integer> values = elem.getValues();
int hash = getHash(key, text);
int offset = 0;
while (hashTable[text][(hash + offset) % hashLength[text]] != null
&& offset < hashLength[text]) {
offset++;
}
hashTable[text][(hash + offset) % hashLength[text]] = new Par(key, values);
hashOccupation[text]++;
}
/**
* Inserts a word and it's occurrence position into the hashTable.
*
* <p>
* The element inserted is a Par object. If the word already exists, then it adds the occurrence
* position to Par.values.
* </p>
*
* @param word is the key
* @param pos is the position of the ocurrence of the key
*/
public void insert(String word, int pos, int text) {
if (hashOccupation[text] == maxOccupation[text] - 1) {
this.rehash(text);
}
int hash = getHash(word, text);
int offset = 0;
boolean repeated = false;
while (hashTable[text][(hash + offset) % hashLength[text]] != null
&& offset < hashLength[text]) {
if ((hashTable[text][(hash + offset) % hashLength[text]].getKey()).equals(word)) {
repeated = true;
break;
} else {
offset++;
}
}
if (repeated) {
hashTable[text][(hash + offset) % hashLength[text]].addVal(pos);
} else {
hashTable[text][(hash + offset) % hashLength[text]] = new Par(word, pos);
hashOccupation[text]++;
}
}
/**
* Extracts the non-null elements from the hashTable.
* <p>
* Used only during the rehashing process.
* </p>
*
* @return
*/
private Par[] extract(int text) {
Par[] tmp = new Par[hashOccupation[text] + 1];
int i = 0;
for (Par elem : hashTable[text]) {
if (elem != null)
tmp[i++] = elem;
}
return tmp;
}
/**
* Updates the instance variables when rehashing.
* <p>
* The rehashing process doubles the hashTable's length.
* </p>
*/
private void update(int text) {
hashLength[text] = 2 * hashLength[text];
maxOccupation[text] = (int) (hashLength[text] * 0.4);
hashOccupation[text] = 0;
hashTable[text] = new Par[hashLength[text]];
}
/**
* Rehashing process, reinserts all the non-null elements into the new hashTable.
* <p>
* In order to preserve the O(1) time, it is necessary to insert with the new hashLength
* modularity.
* </p>
*/
private void rehash(int text) {
Par[] tmp = this.extract(text);
this.update(text);
for (Par elem : tmp) {
if (elem != null)
this.insert(elem, text);
}
}
/**
* For testing purposes only!
*
* <p>
* Inserts the Par(elem, pos) in the last position of the hashTable. This allows to check the
* circularity of the insertion.
* </p>
*
* @param elem is the key
* @param pos is the position of the occurrence of the key
*/
public void insertAtEnd(String elem, int pos, int text) {
if (hashOccupation[text] >= maxOccupation[text]) {
this.rehash(text);
}
int hash = hashLength[text] - 1;
int offset = 0;
while (hashTable[text][(hash + offset) % hashLength[text]] != null
&& offset < hashLength[text]) {
offset++;
}
hashTable[text][(hash + offset) % hashLength[text]] = new Par(elem, pos);
hashOccupation[text]++;
}
/**
* Hashing function.
*
* @param elem is the key
* @return the hash value associated to the key
*/
// Puede que sea una funcion muy mala!!
// sin el abs tira vaores negativos!! No se pq!!
// TODO
private int getHash(String elem, int text) {
int hash = 7;
for (int i = 0; i < elem.length(); i++) {
hash = hash * 31 + elem.charAt(i);
}
return Math.abs(hash % hashLength[text]);
}
/**
* For testing purposes only!
*
* @param elem is the key
* @return the hash value associated to the key
*/
public int getHashNum(String elem, int text) {
return this.getHash(elem, text);
}
/**
* Gets the actual occupied size of the hashTable, or how many elements have been inserted so far.
*
* @return the amount of elements inserted so far
*/
public int getSize(int text) {
return hashOccupation[text];
}
/**
* Gives the maximum occupation quantity in order to preserve a 40% or less occupation rate.
*
* @return 40% of the actual hashLength
*/
public int getMaxOccupation(int text) {
return maxOccupation[text];
}
/**
* Gives the list of the key occurrences positions.
*
* @param key is the key
* @return the occurrences or an empty ArrayList<Integer if the word does not exist
*/
public ArrayList<Integer> search(String key, int text) {
ArrayList<Integer> values = new ArrayList<Integer>();
Par tmp;
int hash = this.getHash(key, text);
int offset = 0;
while (offset < hashLength[text]) {
tmp = hashTable[text][(hash + offset) % hashLength[text]];
if (tmp == null) {
break;
}
if (key.equals(tmp.getKey())) {
values = tmp.getValues();
break;
}
offset++;
}
return values;
}
/**
* TODO
*/
public String[] getKeys(int text) {
String[] tmp = new String[hashOccupation[text]];
int i = 0;
for (Par elem : hashTable[text]) {
if (elem != null) {
tmp[i++] = elem.getKey();
}
}
return tmp;
}
/**
* TODO
*/
public int count(String key, int text) {
int res = 0;
ArrayList<Integer> tmp = this.search(key, text);
if (tmp != null) {
res = tmp.size();
}
return res;
}
public String getClassStr() {
return "LinearProbingHashingTree";
}
public double getSimilarity() {
return 0; // TODO
}
public String[] getKeys() {
return null; // TODO
}
}
|
package com.conveyal.analyst.server.utils;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.index.SpatialIndex;
import com.vividsolutions.jts.index.strtree.STRtree;
import models.Query;
import models.Shapefile;
import models.Shapefile.ShapeFeature;
import org.mapdb.Fun.Tuple3;
import org.opentripplanner.analyst.ResultSet;
import org.opentripplanner.analyst.cluster.ResultEnvelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.awt.*;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
public class QueryResults {
private static final Logger LOG = LoggerFactory.getLogger(QueryResults.class);
public static Map<String, QueryResults> queryResultsCache = new WeakHashMap<>();
/**
* Keep track of IDs
* This is an Integer not an int so we can synchronize on it.
*/
private static Integer nextId = 0;
/*
* Min and max values for this result. Set to null initially so that there is no confusion
* as to whether they've been set.
*/
public Double minValue = null;
public Double maxValue = null;
/** The maximum possible accessibility value an item could take, if everything was accessible. */
public double maxPossible;
/**
* Number of classes to display on the map.
*/
public static final int nClasses = 6;
/**
* From whence do the geometries of this QueryResults come?
*/
public String shapeFileId;
/**
* What attribute of that shapefile are we using?
*/
public String attributeId;
public ConcurrentHashMap<String, QueryResultItem> items = new ConcurrentHashMap<String, QueryResultItem>();
public ConcurrentHashMap<Integer, QueryResults> subtracted = new ConcurrentHashMap<Integer, QueryResults>();
public ConcurrentHashMap<Tuple3<String, String, String>, QueryResults> aggregated =
new ConcurrentHashMap<Tuple3<String, String, String>, QueryResults>();
public Classifier classifier;
/** Cache the spatial index */
private transient SpatialIndex spIdx = null;
/** We want the IDs to be weak so that they never get serialized, as they are reset every time the server is restarted */
private transient int id;
/** Is this the point estimate, lower bound, etc.? */
private ResultEnvelope.Which which;
public QueryResults() {
}
public QueryResults(Query q, Integer timeLimit, ResultEnvelope.Which which, String attributeId) {
Shapefile origin = Shapefile.getShapefile(q.originShapefileId);
Shapefile dest = Shapefile.getShapefile(q.destinationShapefileId);
this.which = which;
double value;
for (Iterator<ResultSet> it = q.getResults().getAll(dest.categoryId + "." + attributeId, which); it.hasNext();) {
ResultSet feature = it.next();
value = (double) feature.sum(timeLimit, dest.categoryId + "." + attributeId);
if(maxValue == null || value > maxValue)
maxValue = value;
if(minValue == null || minValue > value)
minValue = value;
QueryResultItem item = new QueryResultItem();
item.value = value;
item.feature = origin.getShapeFeatureStore().getById(feature.id);
items.put(feature.id, item);
}
shapeFileId = origin.id;
this.attributeId = attributeId;
this.maxPossible = dest.attributes.get(attributeId).sum;
// assign a unique ID
synchronized (nextId) {
id = nextId++;
}
//linearClassifier = new LinearClassifier(values, new Color(0.5f, 0.5f, 1.0f, 0.5f), new Color(0.0f, 0.0f, 1.0f, 0.5f));
classifier = new NaturalBreaksClassifier(this, nClasses, new Color(1.0f, 1.0f, 1.0f, 0.25f), new Color(0.0f, 0.0f, 1.0f, 0.5f));
}
/*public Color getColorById(String id) {
Color c = linearClassifier.getColorValue(items.get(id).value);
if(c == null)
return new Color(0.8f, 0.8f, 0.8f, 0.5f);
else
return c;
}*/
/**
* Aggregate these QueryResults into the units specified by the shapefile aggregateTo.
* Weight by the point set weightBy. For example, suppose you've calculated block-level
* statistics for a large state, and now you want to aggregate them up to the county level.
* It doesn't make sense to simply average the accessibility of every block in that county;
* that would commit the Modifiable Areal Unit Problem. Suppose that there is one block where
* 300 people live that is highly accessible, and another where only 10 people live that is
* highly inaccessible. You want to weight by the people to get an accurate picture of the
* accessibility for the average resident in that county.
*
* This used to be performed by the functions normalizeBy and groupBy. The output of this function
* is similar, but not identical, to the combination of those functions. The difference is because,
* when weighting, normalizeBy always found the weight of a polygon by whether it contained the centroid
* of another. In the case where both the original query and the weights run against the same shapefile,
* this means that areas which don't contain their own centroids (not terribly uncommon) didn't get mapped
* to themselves. In this implementation, if the two come from the same shapefile, the matching for
* the weighting is done by feature ID.
*/
public QueryResults aggregate (Shapefile aggregateTo, Shapefile weightBy, String weightByAttribute) {
// see if we've already performed this aggregation; if so, return it from the cache
synchronized (aggregated) {
Tuple3<String, String, String> key = new Tuple3<String, String, String> (aggregateTo.id, weightBy.id, weightByAttribute);
if (aggregated.containsKey(key))
return aggregated.get(key);
// Do the features come from the same shapefile?
boolean sameShapefile = shapeFileId.equals(weightBy.id);
// if they don't come from the same shapefile, create the weights pycnoplactically
// we'll need a spatial index for that
SpatialIndex weightIdx = null;
DataStore<ShapeFeature> weightStore = null;
if (!sameShapefile) {
// get the spatial index
weightIdx = weightBy.getSpatialIndex();
}
else {
// just use the id -> feature mapping directly
weightStore = weightBy.getShapeFeatureStore();
}
// build a spatial index for the features of this queryresult
SpatialIndex spIdx = getSpatialIndex();
QueryResults out = new QueryResults();
// this does not actually load all the features into memory; this is a MapDB, and
// DataStore is delegating to MapDB's map values() function, which returns a disk-backed
// collection
for (final ShapeFeature aggregateFeature : aggregateTo.getShapeFeatureStore().getAll()) {
// TODO: this should be moved into Akka actors and parallelized
// TODO: ensure STRtree is threadsafe. There is some debate on this point.
Envelope env = aggregateFeature.geom.getEnvelopeInternal();
// find all of the items that could overlap this geometry
List<QueryResultItem> potentialMatches = spIdx.query(env);
// this is the weighted value of all of the original geographies within this
// aggregate geography
double weightedVal = 0.0;
// This is the sum of the weights of all of the original geographies within this
// aggregate geography
double sumOfWeights = 0.0;
for (QueryResultItem match : potentialMatches) {
// clean the geometry
Geometry matchGeom = match.feature.geom;
// calculate the weight of this geography in the aggregate geography
double weight;
if (sameShapefile) {
weight = weightStore.getById(match.feature.id).getAttribute(weightByAttribute);
}
else {
weight = 0;
// query the spatial index
List<ShapeFeature> potentialWeights =
weightIdx.query(matchGeom.getEnvelopeInternal());
for (ShapeFeature weightFeature : potentialWeights) {
// calculate the weight of the entire item geometry that we are weighting by
Geometry weightGeom = weightFeature.geom;
double totalWeight = weightFeature.getAttribute(weightByAttribute);
// figure out how much of this weight should be assigned to the original geometry
double weightArea = GeoUtils.getArea(weightGeom);
// don't divide by zeroish
if (weightArea < 0.0000000001)
continue;
Geometry overlap = weightGeom.intersection(matchGeom);
if (overlap.isEmpty())
continue;
double overlapArea = GeoUtils.getArea(overlap);
weight += totalWeight * (overlapArea / weightArea);
}
}
// this aggregate geography may not completely contain the original geography.
// discount weight to account for that.
double matchArea = GeoUtils.getArea(matchGeom);
if (matchArea < 0.0000000001)
continue;
Geometry overlap = matchGeom.intersection(aggregateFeature.geom);
if (overlap.isEmpty())
continue;
weight *= GeoUtils.getArea(overlap) / matchArea;
weightedVal += match.value * weight;
sumOfWeights += weight;
}
// add this feature to the new query result
QueryResultItem item = new QueryResultItem();
// don't divide by zero
item.value = sumOfWeights > 0.0000001 ? weightedVal / sumOfWeights : 0;
item.feature = aggregateFeature;
out.items.put(aggregateFeature.id, item);
out.shapeFileId = aggregateTo.id;
if (out.maxValue == null || item.value > out.maxValue)
out.maxValue = item.value;
if (out.minValue == null || out.minValue > item.value)
out.minValue = item.value;
}
// we preserve the maxPossible from the original. It does not change under aggregation.
out.maxPossible = this.maxPossible;
if (this.classifier instanceof BimodalNaturalBreaksClassifier && this.minValue < 0 && this.maxValue > 0)
out.classifier = new BimodalNaturalBreaksClassifier(out, nClasses, 0d,
new Color(.9f, .9f, .1f, .5f), new Color(.5f, .5f, .5f, .5f), new Color(0f, 0f, 1f, .5f));
else
out.classifier = new NaturalBreaksClassifier(out, nClasses, new Color(1.0f, 1.0f, 1.0f, 0.5f), new Color(0.0f, 0.0f, 1.0f, 0.5f));
out.shapeFileId = aggregateTo.id;
aggregated.put(key, out);
// TODO: set attribute ID.
return out;
}
}
/**
* Subtract the other queryresults from this one, and return the query results.
* The other queryresults must have come from or been aggregated to the same shapefile.
*/
public QueryResults subtract(QueryResults otherQr) {
synchronized (subtracted) {
if (subtracted.containsKey(otherQr.id))
return subtracted.get(otherQr.id);
// TODO: check that indicator is same also
if (!shapeFileId.equals(otherQr.shapeFileId) || !attributeId.equals(otherQr.attributeId)) {
throw new IllegalArgumentException("Query results in difference operation do not come from same attribute of same shapefile!");
}
QueryResults ret = new QueryResults();
ret.shapeFileId = this.shapeFileId;
for (String id : items.keySet()) {
QueryResultItem item1 = this.items.get(id);
QueryResultItem item2 = otherQr.items.get(id);
if (item2 == null)
// if it's unreachable in either leave it out of the difference
continue;
QueryResultItem newItem = new QueryResultItem();
newItem.feature = item1.feature;
newItem.value = item1.value - item2.value;
if (ret.maxValue == null || newItem.value > ret.maxValue)
ret.maxValue = newItem.value;
if (ret.minValue == null || ret.minValue > newItem.value)
ret.minValue = newItem.value;
ret.items.put(id, newItem);
}
// we preserve the maxPossible from the original. This is because we want to represent percentages as
// a percentage of total possible still, not a percent change.
ret.maxPossible = this.maxPossible;
if (ret.minValue < 0 && ret.maxValue > 0)
ret.classifier = new BimodalNaturalBreaksClassifier(ret, nClasses, 0d,
new Color(.9f, .9f, .1f, .5f), new Color(.5f, .5f, .5f, .5f), new Color(0f, 0f, 1f, .5f));
else
ret.classifier = new NaturalBreaksClassifier(ret, nClasses, new Color(1.0f, 1.0f, 1.0f, 0.5f), new Color(0.0f, 0.0f, 1.0f, 0.5f));
subtracted.put(otherQr.id, ret);
return ret;
}
}
/** Get a spatial index for the items of this queryresults */
public SpatialIndex getSpatialIndex () {
return getSpatialIndex(false);
}
/**
* Get a spatial index for the items of this queryresults.
* @param forceRebuild force the spatial index to be rebuilt.
* Should be set to true if the items of the result have changed in number or
* geography (if they've changed in value there is no need to rebuild).
*/
public SpatialIndex getSpatialIndex (boolean forceRebuild) {
if (forceRebuild || spIdx == null) {
// we can't build an STRtree with only one node, so we make sure we make a minimum of
// two nodes even if we leave one empty
spIdx = new STRtree(Math.max(items.size(), 2));
for (QueryResultItem i : this.items.values()) {
spIdx.insert(i.feature.geom.getEnvelopeInternal(), i);
}
}
return spIdx;
}
public static class QueryResultItem {
public ShapeFeature feature;
public Double value = 0.0;
public Double normalizedTotal = 0.0;
public Double original = 0.0;
}
}
|
package com.crawljax.core.configuration;
import java.io.File;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
/**
* This class is used to create a CrawljaxConfiguration object configured with settings from a file.
*
* @author Frank Groeneveld
* @version $Id$
*/
public class PropertiesFile {
private CrawljaxConfiguration config;
private static String projectRelativePath = "project.path.relative";
private static String outputFolderName = "output.path";
private static String genFilepath = "generated.pages.filepath";
private static String siteUrl = "site.url";
private static String crawlDepth = "crawl.depth";
private static String crawlMaxStates = "crawl.max.states";
private static String crawlMaxTime = "crawl.max.runtime";
private static String crawlThrehold = "crawl.threshold";
// TODO danny, is this used?
private static String robotEvents = "robot.events";
private static String crawlTags = "crawl.tags";
private static String crawlExludeTags = "crawl.tags.exclude";
private static String crawlFilterAttributes = "crawl.filter.attributes";
private static String hibernateProperties = "hibernate.properties";
// TODO danny, is this used?
private static String crawlManualEnterForm = "crawl.forms.manual";
private static String crawlFormRandomInput = "crawl.forms.randominput";
private static String formProperties = "forms.properties";
private static String browser = "browser";
private static String crawlWaitReload = "crawl.wait.reload";
private static String crawlWaitEvent = "crawl.wait.event";
private static String hibernateSchema = "hibernate.hbm2ddl.auto";
private static String useDatabase = "database.use";
// if each candidate clickable should be clicked only once
private static String clickOnce = "click.once";
// TODO danny, can these be removed?
private static String debugVariables = "reportbuilder.debugvariables";
private static String detectEventHandlers = "eventHandlers.detect";
private static String supportDomEvents = "eventHandlers.supportDomEvents";
private static String supportAddEvents = "eventHandlers.supportAddEvents";
private static String supportJQuery = "eventHandlers.supportJQuery";
private static String genFilepathValue = "target/generated-sources/";
private static String proxyEnabled = "proxy.enabled";
/**
* default is Firefox.
*/
private static String browserValue = "firefox";
/**
* Initialize and read out properties from filename.
*
* @param filename
* Name of the properties file.
* @throws ConfigurationException
* On errors in properties file.
*/
public PropertiesFile(String filename) throws ConfigurationException {
config = new CrawljaxConfiguration();
File f = new File(filename);
if (!f.exists()) {
throw new ConfigurationException("Configuration file not found: " + filename);
}
read(new PropertiesConfiguration(filename));
}
/**
* Read properties from the config file and set them in the config object.
*
* @param file
* The properties file.
*/
private void read(PropertiesConfiguration file) {
if (file.containsKey(outputFolderName)) {
config.setOutputFolder(file.getString(outputFolderName));
}
config.setProjectRelativePath(file.getString(projectRelativePath));
config.setCrawlSpecification(getCrawlSpecification(file));
if (file.containsKey(proxyEnabled) && file.getBoolean(proxyEnabled)) {
config.setProxyConfiguration(new ProxyConfiguration());
}
}
/**
* @param file
* Properties file.
* @return The CrawljaxConfiguration object that represents the file.
*/
private CrawlSpecification getCrawlSpecification(PropertiesConfiguration file) {
CrawlSpecification crawler = new CrawlSpecification(file.getString(siteUrl));
/*
* TODO: use getBoolean. we use getInt for backward compatibility. in the future we can use
* file.getBoolean
*/
crawler.setClickOnce(file.getInt(clickOnce) == 1);
crawler.setDepth(file.getInt(crawlDepth));
crawler.setMaximumStates(file.getInt(crawlMaxStates));
crawler.setMaximumRuntime(file.getInt(crawlMaxTime));
crawler.setWaitTimeAfterEvent(file.getInt(crawlWaitEvent));
crawler.setWaitTimeAfterReloadUrl(file.getInt(crawlWaitReload));
crawler.setRandomInputInForms(file.getInt(crawlFormRandomInput) == 1);
return crawler;
}
/**
* @return The configuration object that represents the file contents.
*/
public CrawljaxConfiguration getConfiguration() {
return config;
}
}
|
package com.ctrip.zeus.service.build.conf;
import com.ctrip.zeus.model.entity.Group;
import com.ctrip.zeus.model.entity.GroupSlb;
import com.ctrip.zeus.model.entity.Slb;
import com.ctrip.zeus.model.entity.VirtualServer;
import com.ctrip.zeus.service.model.PathRewriteParser;
import com.ctrip.zeus.util.AssertUtils;
import com.netflix.config.DynamicPropertyFactory;
import com.netflix.config.DynamicStringProperty;
import java.util.List;
public class LocationConf {
private static DynamicStringProperty whiteList = DynamicPropertyFactory.getInstance().getStringProperty("bastion.white.list", null);
public static String generate(Slb slb, VirtualServer vs, Group group, String upstreamName)throws Exception {
StringBuilder b = new StringBuilder(1024);
b.append("location ").append(getPath(slb, vs, group)).append(" {\n");
b.append("proxy_set_header Host $host").append(";\n");
b.append("proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n");
b.append("proxy_set_header X-Real-IP $remote_addr;");
b.append("set $upstream ").append(upstreamName).append(";\n");
addBastionCommand(b,upstreamName);
//rewrite should after set $upstream
addRewriteCommand(b,slb,vs,group);
if (group.getSsl())
{
b.append("proxy_pass https://$upstream ;\n");
}else {
b.append("proxy_pass http://$upstream ;\n");
}
b.append("}").append("\n");
return b.toString();
}
private static String getPath(Slb slb, VirtualServer vs, Group group) throws Exception{
String res=null;
for (GroupSlb groupSlb : group.getGroupSlbs()) {
if (slb.getId().equals(groupSlb.getSlbId()) && vs.getId().equals(groupSlb.getVirtualServer().getId())) {
res= groupSlb.getPath();
}
}
AssertUtils.assertNotNull(res, "Location path is null,Please check your configuration of SlbName:[" + slb.getName() + "] VirtualServer :[" + vs.getId() + "]");
return res;
}
private static String getRewrite(Slb slb, VirtualServer vs, Group group) throws Exception{
String res=null;
for (GroupSlb groupSlb : group.getGroupSlbs()) {
if (slb.getId().equals(groupSlb.getSlbId()) && vs.getId().equals(groupSlb.getVirtualServer().getId())) {
res= groupSlb.getRewrite();
}
}
return res;
}
private static void addRewriteCommand(StringBuilder sb, Slb slb , VirtualServer vs , Group group) throws Exception {
if (sb != null){
String rewrite = getRewrite(slb,vs,group);
if (rewrite==null || rewrite.isEmpty() || !rewrite.contains(" ")){
return;
}
List<String> rewriteList = PathRewriteParser.getValues(rewrite);
for (String tmp : rewriteList)
{
sb.append("rewrite ").append(tmp).append(" break;\n");
}
// String[] rewrites = rewrite.split(";");
// for (int i = 0 ; i < rewrites.length ; i ++)
// sb.append("rewrite ").append(rewrites[i]).append(" break;\n");
}
}
private static void addBastionCommand(StringBuilder sb,String upstreamName){
sb.append("if ( $cookie_bastion != \"\" )")
.append("{\nset $upstream $cookie_bastion;\n}\n")
.append("if ( $upstream = \"\")")
.append("{\nset $upstream ").append(upstreamName).append(";\n}\n");
String wl = whiteList.get();
if (null == wl || wl.isEmpty() || wl.trim().equals("") || wl.contains("\""))
{
wl="denyAll";
}else if (wl.equals("allowAll")){
wl="";
}
sb.append("if ( $remote_addr !~* \"").append(wl).append("\")")
.append("{\nset $upstream ").append(upstreamName).append(";\n}\n");
sb.append("if ( $upstream != ").append(upstreamName).append(" ){\n")
.append("add_header Bastion $cookie_bastion;\n}\n");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.