lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
b4fd946ecfa0fe4760b376d98c2092c85713ce50
| 0
|
shs96c/buck,justinmuller/buck,LegNeato/buck,LegNeato/buck,darkforestzero/buck,shs96c/buck,davido/buck,brettwooldridge/buck,Addepar/buck,brettwooldridge/buck,marcinkwiatkowski/buck,nguyentruongtho/buck,Addepar/buck,ilya-klyuchnikov/buck,nguyentruongtho/buck,romanoid/buck,LegNeato/buck,darkforestzero/buck,nguyentruongtho/buck,facebook/buck,zhan-xiong/buck,dsyang/buck,rmaz/buck,davido/buck,justinmuller/buck,Addepar/buck,zhan-xiong/buck,clonetwin26/buck,justinmuller/buck,Addepar/buck,davido/buck,darkforestzero/buck,shybovycha/buck,robbertvanginkel/buck,SeleniumHQ/buck,shs96c/buck,shs96c/buck,LegNeato/buck,marcinkwiatkowski/buck,rmaz/buck,clonetwin26/buck,justinmuller/buck,clonetwin26/buck,justinmuller/buck,darkforestzero/buck,grumpyjames/buck,k21/buck,nguyentruongtho/buck,shybovycha/buck,brettwooldridge/buck,JoelMarcey/buck,brettwooldridge/buck,kageiit/buck,darkforestzero/buck,sdwilsh/buck,darkforestzero/buck,vschs007/buck,zhan-xiong/buck,romanoid/buck,dsyang/buck,dsyang/buck,dsyang/buck,romanoid/buck,dsyang/buck,dsyang/buck,zpao/buck,Addepar/buck,daedric/buck,shs96c/buck,k21/buck,zhan-xiong/buck,shybovycha/buck,grumpyjames/buck,sdwilsh/buck,romanoid/buck,daedric/buck,zhan-xiong/buck,zhan-xiong/buck,davido/buck,SeleniumHQ/buck,LegNeato/buck,JoelMarcey/buck,sdwilsh/buck,ilya-klyuchnikov/buck,SeleniumHQ/buck,zhan-xiong/buck,LegNeato/buck,darkforestzero/buck,ilya-klyuchnikov/buck,vschs007/buck,clonetwin26/buck,zpao/buck,brettwooldridge/buck,daedric/buck,romanoid/buck,davido/buck,robbertvanginkel/buck,davido/buck,shybovycha/buck,daedric/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,marcinkwiatkowski/buck,brettwooldridge/buck,daedric/buck,ilya-klyuchnikov/buck,marcinkwiatkowski/buck,nguyentruongtho/buck,SeleniumHQ/buck,dsyang/buck,vschs007/buck,davido/buck,daedric/buck,k21/buck,vschs007/buck,davido/buck,clonetwin26/buck,marcinkwiatkowski/buck,k21/buck,daedric/buck,ilya-klyuchnikov/buck,zhan-xiong/buck,daedric/buck,vschs007/buck,justinmuller/buck,vschs007/buck,ilya-klyuchnikov/buck,grumpyjames/buck,JoelMarcey/buck,romanoid/buck,grumpyjames/buck,romanoid/buck,facebook/buck,darkforestzero/buck,vschs007/buck,zpao/buck,JoelMarcey/buck,nguyentruongtho/buck,clonetwin26/buck,darkforestzero/buck,Addepar/buck,nguyentruongtho/buck,clonetwin26/buck,kageiit/buck,vschs007/buck,robbertvanginkel/buck,shs96c/buck,facebook/buck,sdwilsh/buck,shybovycha/buck,shs96c/buck,facebook/buck,sdwilsh/buck,rmaz/buck,k21/buck,k21/buck,ilya-klyuchnikov/buck,ilya-klyuchnikov/buck,rmaz/buck,darkforestzero/buck,vschs007/buck,k21/buck,LegNeato/buck,LegNeato/buck,Addepar/buck,LegNeato/buck,Addepar/buck,darkforestzero/buck,grumpyjames/buck,shybovycha/buck,rmaz/buck,marcinkwiatkowski/buck,justinmuller/buck,vschs007/buck,justinmuller/buck,LegNeato/buck,ilya-klyuchnikov/buck,SeleniumHQ/buck,davido/buck,rmaz/buck,SeleniumHQ/buck,shs96c/buck,shybovycha/buck,davido/buck,zhan-xiong/buck,robbertvanginkel/buck,sdwilsh/buck,daedric/buck,justinmuller/buck,rmaz/buck,kageiit/buck,grumpyjames/buck,daedric/buck,SeleniumHQ/buck,JoelMarcey/buck,grumpyjames/buck,sdwilsh/buck,grumpyjames/buck,kageiit/buck,davido/buck,SeleniumHQ/buck,romanoid/buck,vschs007/buck,Addepar/buck,JoelMarcey/buck,kageiit/buck,dsyang/buck,Addepar/buck,justinmuller/buck,zpao/buck,marcinkwiatkowski/buck,daedric/buck,SeleniumHQ/buck,marcinkwiatkowski/buck,SeleniumHQ/buck,clonetwin26/buck,facebook/buck,sdwilsh/buck,kageiit/buck,sdwilsh/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,JoelMarcey/buck,marcinkwiatkowski/buck,darkforestzero/buck,JoelMarcey/buck,zhan-xiong/buck,sdwilsh/buck,dsyang/buck,romanoid/buck,shybovycha/buck,brettwooldridge/buck,sdwilsh/buck,marcinkwiatkowski/buck,SeleniumHQ/buck,LegNeato/buck,robbertvanginkel/buck,SeleniumHQ/buck,zhan-xiong/buck,robbertvanginkel/buck,LegNeato/buck,grumpyjames/buck,grumpyjames/buck,ilya-klyuchnikov/buck,k21/buck,rmaz/buck,shybovycha/buck,robbertvanginkel/buck,brettwooldridge/buck,rmaz/buck,vschs007/buck,brettwooldridge/buck,justinmuller/buck,dsyang/buck,facebook/buck,justinmuller/buck,marcinkwiatkowski/buck,shybovycha/buck,Addepar/buck,zpao/buck,brettwooldridge/buck,zhan-xiong/buck,romanoid/buck,clonetwin26/buck,sdwilsh/buck,shybovycha/buck,brettwooldridge/buck,dsyang/buck,rmaz/buck,robbertvanginkel/buck,daedric/buck,Addepar/buck,ilya-klyuchnikov/buck,romanoid/buck,k21/buck,vschs007/buck,justinmuller/buck,shybovycha/buck,shybovycha/buck,robbertvanginkel/buck,romanoid/buck,k21/buck,robbertvanginkel/buck,sdwilsh/buck,rmaz/buck,zpao/buck,romanoid/buck,rmaz/buck,rmaz/buck,davido/buck,davido/buck,facebook/buck,robbertvanginkel/buck,clonetwin26/buck,clonetwin26/buck,shs96c/buck,brettwooldridge/buck,shs96c/buck,marcinkwiatkowski/buck,marcinkwiatkowski/buck,kageiit/buck,darkforestzero/buck,shs96c/buck,shs96c/buck,JoelMarcey/buck,k21/buck,daedric/buck,zhan-xiong/buck,ilya-klyuchnikov/buck,Addepar/buck,robbertvanginkel/buck,dsyang/buck,shs96c/buck,clonetwin26/buck,clonetwin26/buck,zpao/buck,brettwooldridge/buck,grumpyjames/buck,LegNeato/buck,k21/buck,robbertvanginkel/buck,grumpyjames/buck,k21/buck,dsyang/buck,SeleniumHQ/buck
|
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.json;
import com.facebook.buck.bser.BserDeserializer;
import com.facebook.buck.bser.BserSerializer;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.WatchmanDiagnosticEvent;
import com.facebook.buck.io.PathOrGlobMatcher;
import com.facebook.buck.io.ProjectWatch;
import com.facebook.buck.io.WatchmanDiagnostic;
import com.facebook.buck.log.Logger;
import com.facebook.buck.rules.ConstructorArgMarshaller;
import com.facebook.buck.rules.Description;
import com.facebook.buck.util.InputStreamConsumer;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.MoreThrowables;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.facebook.buck.util.Threads;
import com.facebook.buck.util.concurrent.AssertScopeExclusiveAccess;
import com.facebook.buck.util.immutables.BuckStyleTuple;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.immutables.value.Value;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import javax.annotation.Nullable;
/**
* Delegates to buck.py for parsing of buck build files. Constructed on demand for the
* parsing phase and must be closed afterward to free up resources.
*/
public class ProjectBuildFileParser implements AutoCloseable {
private static final Logger LOG = Logger.get(ProjectBuildFileParser.class);
private final ImmutableMap<String, String> environment;
@Nullable private BuckPythonProgram buckPythonProgram;
private Supplier<Path> rawConfigJson;
private Supplier<Path> ignorePathsJson;
@Nullable private ProcessExecutor.LaunchedProcess buckPyProcess;
@Nullable private BufferedOutputStream buckPyStdinWriter;
private final ProjectBuildFileParserOptions options;
private final ConstructorArgMarshaller marshaller;
private final BuckEventBus buckEventBus;
private final ProcessExecutor processExecutor;
private final BserDeserializer bserDeserializer;
private final BserSerializer bserSerializer;
private final AssertScopeExclusiveAccess assertSingleThreadedParsing;
private final boolean ignoreBuckAutodepsFiles;
private boolean isInitialized;
private boolean isClosed;
private boolean enableProfiling;
@Nullable private FutureTask<Void> stderrConsumerTerminationFuture;
@Nullable private Thread stderrConsumerThread;
@Nullable private ProjectBuildFileParseEvents.Started projectBuildFileParseEventStarted;
protected ProjectBuildFileParser(
final ProjectBuildFileParserOptions options,
final ConstructorArgMarshaller marshaller,
ImmutableMap<String, String> environment,
BuckEventBus buckEventBus,
ProcessExecutor processExecutor,
boolean ignoreBuckAutodepsFiles) {
this.buckPythonProgram = null;
this.options = options;
this.marshaller = marshaller;
this.environment = environment;
this.buckEventBus = buckEventBus;
this.processExecutor = processExecutor;
this.bserDeserializer = new BserDeserializer(BserDeserializer.KeyOrdering.SORTED);
this.bserSerializer = new BserSerializer();
this.assertSingleThreadedParsing = new AssertScopeExclusiveAccess();
this.ignoreBuckAutodepsFiles = ignoreBuckAutodepsFiles;
this.rawConfigJson =
Suppliers.memoize(
() -> {
try {
Path rawConfigJson1 = Files.createTempFile("raw_config", ".json");
Files.createDirectories(rawConfigJson1.getParent());
try (OutputStream output =
new BufferedOutputStream(Files.newOutputStream(rawConfigJson1))) {
bserSerializer.serializeToStream(options.getRawConfig(), output);
}
return rawConfigJson1;
} catch (IOException e) {
throw new RuntimeException(e);
}
});
this.ignorePathsJson =
Suppliers.memoize(
() -> {
try {
Path ignorePathsJson1 = Files.createTempFile("ignore_paths", ".json");
Files.createDirectories(ignorePathsJson1.getParent());
try (OutputStream output =
new BufferedOutputStream(Files.newOutputStream(ignorePathsJson1))) {
bserSerializer.serializeToStream(
options.getIgnorePaths().stream()
.map(PathOrGlobMatcher::getPathOrGlob)
.collect(MoreCollectors.toImmutableList()),
output);
}
return ignorePathsJson1;
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void setEnableProfiling(boolean enableProfiling) {
ensureNotClosed();
ensureNotInitialized();
this.enableProfiling = enableProfiling;
}
@VisibleForTesting
public boolean isClosed() {
return isClosed;
}
private void ensureNotClosed() {
Preconditions.checkState(!isClosed);
}
private void ensureNotInitialized() {
Preconditions.checkState(!isInitialized);
}
/**
* Initialization on demand moves around the performance impact of creating the Python
* interpreter to when parsing actually begins. This makes it easier to attribute this time
* to the actual parse phase.
*/
@VisibleForTesting
public void initIfNeeded() throws IOException {
ensureNotClosed();
if (!isInitialized) {
init();
isInitialized = true;
}
}
/**
* Initialize the parser, starting buck.py.
*/
private void init() throws IOException {
projectBuildFileParseEventStarted = new ProjectBuildFileParseEvents.Started();
buckEventBus.post(projectBuildFileParseEventStarted);
try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(
buckEventBus,
PerfEventId.of("ParserInit"))) {
ImmutableMap<String, String> pythonEnvironment =
new ImmutableMap.Builder<String, String>()
.putAll(new HashMap<String, String>() {{
putAll(environment);
put("PYTHONPATH",
options.getPythonModuleSearchPath().orElse(""));
}})
.build();
ProcessExecutorParams params = ProcessExecutorParams.builder()
.setCommand(buildArgs())
.setEnvironment(pythonEnvironment)
.build();
LOG.debug(
"Starting buck.py command: %s environment: %s",
params.getCommand(),
params.getEnvironment());
buckPyProcess = processExecutor.launchProcess(params);
LOG.debug("Started process %s successfully", buckPyProcess);
OutputStream stdin = buckPyProcess.getOutputStream();
InputStream stderr = buckPyProcess.getErrorStream();
InputStreamConsumer stderrConsumer = new InputStreamConsumer(
stderr,
(InputStreamConsumer.Handler) line -> buckEventBus.post(
ConsoleEvent.warning("Warning raised by BUCK file parser: %s", line)));
stderrConsumerTerminationFuture = new FutureTask<>(stderrConsumer);
stderrConsumerThread = Threads.namedThread(
ProjectBuildFileParser.class.getSimpleName(),
stderrConsumerTerminationFuture);
stderrConsumerThread.start();
buckPyStdinWriter = new BufferedOutputStream(stdin);
}
}
private ImmutableList<String> buildArgs() throws IOException {
// Invoking buck.py and read JSON-formatted build rules from its stdout.
ImmutableList.Builder<String> argBuilder = ImmutableList.builder();
argBuilder.add(options.getPythonInterpreter());
// Ask python to unbuffer stdout so that we can coordinate based on the output as it is
// produced.
argBuilder.add("-u");
argBuilder.add(getPathToBuckPy(options.getDescriptions()).toString());
if (enableProfiling) {
argBuilder.add("--profile");
}
if (ignoreBuckAutodepsFiles) {
argBuilder.add("--ignore_buck_autodeps_files");
}
if (options.getAllowEmptyGlobs()) {
argBuilder.add("--allow_empty_globs");
}
if (options.getUseWatchmanGlob()) {
argBuilder.add("--use_watchman_glob");
}
if (options.getWatchmanGlobStatResults()) {
argBuilder.add("--watchman_glob_stat_results");
}
if (options.getWatchmanUseGlobGenerator()) {
argBuilder.add("--watchman_use_glob_generator");
}
if (options.getWatchman().getSocketPath().isPresent()) {
argBuilder.add(
"--watchman_socket_path",
options.getWatchman().getSocketPath().get().toAbsolutePath().toString());
}
if (options.getWatchmanQueryTimeoutMs().isPresent()) {
argBuilder.add(
"--watchman_query_timeout_ms",
options.getWatchmanQueryTimeoutMs().get().toString());
}
if (options.getUseMercurialGlob()) {
argBuilder.add("--use_mercurial_glob");
}
if (options.getEnableBuildFileSandboxing()) {
argBuilder.add("--enable_build_file_sandboxing");
}
// Add the --build_file_import_whitelist flags.
for (String module : options.getBuildFileImportWhitelist()) {
argBuilder.add("--build_file_import_whitelist");
argBuilder.add(module);
}
argBuilder.add("--project_root", options.getProjectRoot().toAbsolutePath().toString());
for (ImmutableMap.Entry<String, Path> entry : options.getCellRoots().entrySet()) {
argBuilder.add("--cell_root", entry.getKey() + "=" + entry.getValue());
}
argBuilder.add("--build_file_name", options.getBuildFileName());
if (!options.getAutodepsFilesHaveSignatures()) {
argBuilder.add("--no_autodeps_signatures");
}
// Tell the parser not to print exceptions to stderr.
argBuilder.add("--quiet");
// Add the --include flags.
for (String include : options.getDefaultIncludes()) {
argBuilder.add("--include");
argBuilder.add(include);
}
// Add all config settings.
argBuilder.add("--config", rawConfigJson.get().toString());
// Add ignore paths.
argBuilder.add("--ignore_paths", ignorePathsJson.get().toString());
return argBuilder.build();
}
/**
* Collect all rules from a particular build file.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public List<Map<String, Object>> getAll(Path buildFile)
throws BuildFileParseException, InterruptedException {
ImmutableList<Map<String, Object>> result = getAllRulesAndMetaRules(buildFile);
// Strip out the __includes, __configs, and __env meta rules, which are the last rules.
return Collections.unmodifiableList(result.subList(0, result.size() - 3));
}
/**
* Collect all rules from a particular build file, along with meta rules about the rules, for
* example which build files the rules depend on.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public ImmutableList<Map<String, Object>> getAllRulesAndMetaRules(Path buildFile)
throws BuildFileParseException, InterruptedException {
try {
return getAllRulesInternal(buildFile);
} catch (IOException e) {
MoreThrowables.propagateIfInterrupt(e);
throw BuildFileParseException.createForBuildFileParseError(buildFile, e);
}
}
@VisibleForTesting
protected ImmutableList<Map<String, Object>> getAllRulesInternal(Path buildFile)
throws IOException, BuildFileParseException {
ensureNotClosed();
initIfNeeded();
// Check isInitialized implications (to avoid Eradicate warnings).
Preconditions.checkNotNull(buckPyStdinWriter);
Preconditions.checkNotNull(buckPyProcess);
ParseBuckFileEvent.Started parseBuckFileStarted = ParseBuckFileEvent.started(buildFile);
buckEventBus.post(parseBuckFileStarted);
ImmutableList<Map<String, Object>> values = ImmutableList.of();
String profile = "";
try (AssertScopeExclusiveAccess.Scope scope = assertSingleThreadedParsing.scope()) {
Path cellPath = options.getProjectRoot().toAbsolutePath();
String watchRoot = cellPath.toString();
String projectPrefix = "";
if (options.getWatchman().getProjectWatches().containsKey(cellPath)) {
ProjectWatch projectWatch = options.getWatchman().getProjectWatches().get(cellPath);
watchRoot = projectWatch.getWatchRoot();
if (projectWatch.getProjectPrefix().isPresent()) {
projectPrefix = projectWatch.getProjectPrefix().get();
}
}
bserSerializer.serializeToStream(
ImmutableMap.of(
"buildFile", buildFile.toString(),
"watchRoot", watchRoot,
"projectPrefix", projectPrefix),
buckPyStdinWriter);
buckPyStdinWriter.flush();
LOG.debug("Parsing output of process %s...", buckPyProcess);
Object deserializedValue;
try {
deserializedValue = bserDeserializer.deserializeBserValue(
buckPyProcess.getInputStream());
} catch (BserDeserializer.BserEofException e) {
LOG.warn(e, "Parser exited while decoding BSER data");
throw new IOException("Parser exited unexpectedly", e);
}
BuildFilePythonResult resultObject = handleDeserializedValue(deserializedValue);
Path buckPyPath = getPathToBuckPy(options.getDescriptions());
handleDiagnostics(
buildFile,
buckPyPath.getParent(),
resultObject.getDiagnostics(),
buckEventBus);
values = resultObject.getValues();
LOG.verbose("Got rules: %s", values);
LOG.debug("Parsed %d rules from %s", values.size(), buildFile);
profile = resultObject.getProfile();
if (profile != null) {
LOG.debug("Profile result: %s", profile);
}
return values;
} finally {
buckEventBus.post(ParseBuckFileEvent.finished(parseBuckFileStarted, values, profile));
}
}
@SuppressWarnings("unchecked")
private static BuildFilePythonResult handleDeserializedValue(@Nullable Object deserializedValue)
throws IOException {
if (!(deserializedValue instanceof Map<?, ?>)) {
throw new IOException(
String.format("Invalid parser output (expected map, got %s)", deserializedValue));
}
Map<String, Object> decodedResult = (Map<String, Object>) deserializedValue;
List<Map<String, Object>> values;
try {
values = (List<Map<String, Object>>) decodedResult.get("values");
} catch (ClassCastException e) {
throw new IOException("Invalid parser values", e);
}
List<Map<String, String>> diagnostics;
try {
diagnostics = (List<Map<String, String>>) decodedResult.get("diagnostics");
} catch (ClassCastException e) {
throw new IOException("Invalid parser diagnostics", e);
}
String profile;
try {
profile = (String) decodedResult.get("profile");
} catch (ClassCastException e) {
throw new IOException("Invalid parser profile", e);
}
return BuildFilePythonResult.of(
values,
diagnostics == null ? ImmutableList.of() : diagnostics,
profile == null ? "" : profile);
}
private static void handleDiagnostics(
Path buildFile,
Path buckPyDir,
List<Map<String, String>> diagnosticsList,
BuckEventBus buckEventBus) throws IOException, BuildFileParseException {
for (Map<String, String> diagnostic : diagnosticsList) {
String level = diagnostic.get("level");
String message = diagnostic.get("message");
String source = diagnostic.get("source");
if (level == null || message == null) {
throw new IOException(
String.format(
"Invalid diagnostic(level=%s, message=%s, source=%s)",
level,
message,
source));
}
if (source != null && source.equals("watchman")) {
handleWatchmanDiagnostic(buildFile, level, message, buckEventBus);
} else {
String header;
if (source != null) {
header = buildFile + " (" + source + ")";
} else {
header = buildFile.toString();
}
switch (level) {
case "debug":
LOG.debug("%s: %s", header, message);
break;
case "info":
LOG.info("%s: %s", header, message);
break;
case "warning":
LOG.warn("Warning raised by BUCK file parser for file %s: %s", header, message);
buckEventBus.post(
ConsoleEvent.warning("Warning raised by BUCK file parser: %s", message));
break;
case "error":
LOG.warn("Error raised by BUCK file parser for file %s: %s", header, message);
buckEventBus.post(
ConsoleEvent.severe("Error raised by BUCK file parser: %s", message));
break;
case "fatal":
LOG.warn("Fatal error raised by BUCK file parser for file %s: %s", header, message);
Object exception = diagnostic.get("exception");
throw BuildFileParseException.createForBuildFileParseError(
buildFile,
createParseException(buildFile, buckPyDir, message, exception));
default:
LOG.warn(
"Unknown diagnostic (level %s) raised by BUCK file parser for build file %s: %s",
level,
buildFile,
message);
break;
}
}
}
}
private static Optional<BuildFileSyntaxError> parseSyntaxError(Map<String, Object> exceptionMap) {
String type = (String) exceptionMap.get("type");
if (type.equals("SyntaxError")) {
return Optional.of(
BuildFileSyntaxError.of(
Paths.get((String) exceptionMap.get("filename")),
(Number) exceptionMap.get("lineno"),
(Number) exceptionMap.get("offset"),
(String) exceptionMap.get("text")));
} else {
return Optional.empty();
}
}
@SuppressWarnings("unchecked")
private static ImmutableList<BuildFileParseExceptionStackTraceEntry> parseStackTrace(
Map<String, Object> exceptionMap
) {
List<Map<String, Object>> traceback =
(List<Map<String, Object>>) exceptionMap.get("traceback");
ImmutableList.Builder<BuildFileParseExceptionStackTraceEntry> stackTraceBuilder =
ImmutableList.builder();
for (Map<String, Object> tracebackItem : traceback) {
stackTraceBuilder.add(
BuildFileParseExceptionStackTraceEntry.of(
Paths.get((String) tracebackItem.get("filename")),
(Number) tracebackItem.get("line_number"),
(String) tracebackItem.get("function_name"),
(String) tracebackItem.get("text")));
}
return stackTraceBuilder.build();
}
@VisibleForTesting
static BuildFileParseExceptionData parseExceptionData(
Map<String, Object> exceptionMap) {
return BuildFileParseExceptionData.of(
(String) exceptionMap.get("type"),
(String) exceptionMap.get("value"),
parseSyntaxError(exceptionMap),
parseStackTrace(exceptionMap)
);
}
private static String formatStackTrace(
Path buckPyDir,
ImmutableList<BuildFileParseExceptionStackTraceEntry> stackTrace
) {
StringBuilder formattedTraceback = new StringBuilder();
for (BuildFileParseExceptionStackTraceEntry entry : stackTrace) {
if (entry.getFileName().getParent().equals(buckPyDir)) {
// Skip stack trace entries for buck.py itself
continue;
}
String location;
if (entry.getFunctionName().equals("<module>")) {
location = "";
} else {
location = String.format(", in %s", entry.getFunctionName());
}
formattedTraceback.append(
String.format(
" File \"%s\", line %s%s\n %s\n",
entry.getFileName(),
entry.getLineNumber(),
location,
entry.getText()));
}
return formattedTraceback.toString();
}
@SuppressWarnings("unchecked")
private static IOException createParseException(
Path buildFile,
Path buckPyDir,
String message,
@Nullable Object exception) {
if (!(exception instanceof Map<?, ?>)) {
return new IOException(message);
} else {
Map<String, Object> exceptionMap = (Map<String, Object>) exception;
BuildFileParseExceptionData exceptionData = parseExceptionData(exceptionMap);
LOG.debug("Received exception from buck.py parser: %s", exceptionData);
Optional<BuildFileSyntaxError> syntaxErrorOpt = exceptionData.getSyntaxError();
if (syntaxErrorOpt.isPresent()) {
BuildFileSyntaxError syntaxError = syntaxErrorOpt.get();
String prefix;
if (buildFile.equals(syntaxError.getFileName())) {
// BuildFileParseException will include the filename
prefix = String.format(
"Syntax error on line %s",
syntaxError.getLineNumber());
} else {
// Parse error was in some other file included by the build file
prefix = String.format(
"Syntax error in %s\nLine %s",
syntaxError.getFileName(),
syntaxError.getLineNumber());
}
return new IOException(
String.format(
"%s, column %s:\n%s%s",
prefix,
syntaxError.getOffset(),
syntaxError.getText(),
Strings.padStart("^", syntaxError.getOffset().intValue(), ' ')));
} else {
String formattedStackTrace = formatStackTrace(
buckPyDir,
exceptionData.getStackTrace());
return new IOException(
String.format(
"%s: %s\nCall stack:\n%s",
exceptionData.getType(),
exceptionData.getValue(),
formattedStackTrace));
}
}
}
private static void handleWatchmanDiagnostic(
Path buildFile,
String level,
String message,
BuckEventBus buckEventBus) throws IOException {
WatchmanDiagnostic.Level watchmanDiagnosticLevel;
switch (level) {
// Watchman itself doesn't issue debug or info, but in case
// engineers hacking on stuff add calls, let's log them
// then return.
case "debug":
LOG.debug("%s (watchman): %s", buildFile, message);
return;
case "info":
LOG.info("%s (watchman): %s", buildFile, message);
return;
case "warning":
watchmanDiagnosticLevel = WatchmanDiagnostic.Level.WARNING;
break;
case "error":
watchmanDiagnosticLevel = WatchmanDiagnostic.Level.ERROR;
break;
case "fatal":
throw new IOException(
String.format(
"%s: %s",
buildFile,
message));
default:
throw new RuntimeException(
String.format(
"Unrecognized watchman diagnostic level: %s (message=%s)",
level,
message));
}
WatchmanDiagnostic watchmanDiagnostic = WatchmanDiagnostic.of(
watchmanDiagnosticLevel,
message);
buckEventBus.post(new WatchmanDiagnosticEvent(watchmanDiagnostic));
}
@Override
@SuppressWarnings("PMD.EmptyCatchBlock")
public void close() throws BuildFileParseException, InterruptedException, IOException {
if (isClosed) {
return;
}
try {
if (isInitialized) {
// Check isInitialized implications (to avoid Eradicate warnings).
Preconditions.checkNotNull(buckPyStdinWriter);
Preconditions.checkNotNull(buckPyProcess);
// Allow buck.py to terminate gracefully.
try {
buckPyStdinWriter.close();
} catch (IOException e) {
// Safe to ignore since we've already flushed everything we wanted
// to write.
}
if (stderrConsumerThread != null) {
stderrConsumerThread.join();
stderrConsumerThread = null;
try {
Preconditions.checkNotNull(stderrConsumerTerminationFuture).get();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException) cause;
} else {
throw new RuntimeException(e);
}
}
stderrConsumerTerminationFuture = null;
}
LOG.debug("Waiting for process %s to exit...", buckPyProcess);
ProcessExecutor.Result result = processExecutor.waitForLaunchedProcess(buckPyProcess);
if (result.getExitCode() != 0) {
LOG.warn(result.getMessageForUnexpectedResult(buckPyProcess.toString()));
throw BuildFileParseException.createForUnknownParseError(
result.getMessageForResult("Parser did not exit cleanly"));
}
LOG.debug("Process %s exited cleanly.", buckPyProcess);
try {
synchronized (this) {
if (buckPythonProgram != null) {
buckPythonProgram.close();
}
}
} catch (IOException e) {
// Eat any exceptions from deleting the temporary buck.py file.
}
}
} finally {
if (isInitialized) {
buckEventBus.post(
new ProjectBuildFileParseEvents.Finished(
Preconditions.checkNotNull(projectBuildFileParseEventStarted)));
}
isClosed = true;
}
}
private synchronized Path getPathToBuckPy(ImmutableSet<Description<?>> descriptions)
throws IOException {
if (buckPythonProgram == null) {
buckPythonProgram = BuckPythonProgram.newInstance(marshaller, descriptions);
}
return buckPythonProgram.getExecutablePath();
}
@Value.Immutable
@BuckStyleTuple
interface AbstractBuildFilePythonResult {
List<Map<String, Object>> getValues();
List<Map<String, String>> getDiagnostics();
String getProfile();
}
}
|
src/com/facebook/buck/json/ProjectBuildFileParser.java
|
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.json;
import com.facebook.buck.bser.BserDeserializer;
import com.facebook.buck.bser.BserSerializer;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.WatchmanDiagnosticEvent;
import com.facebook.buck.io.PathOrGlobMatcher;
import com.facebook.buck.io.ProjectWatch;
import com.facebook.buck.io.WatchmanDiagnostic;
import com.facebook.buck.log.Logger;
import com.facebook.buck.rules.ConstructorArgMarshaller;
import com.facebook.buck.rules.Description;
import com.facebook.buck.util.InputStreamConsumer;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.MoreThrowables;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.facebook.buck.util.Threads;
import com.facebook.buck.util.concurrent.AssertScopeExclusiveAccess;
import com.facebook.buck.util.immutables.BuckStyleTuple;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.immutables.value.Value;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import javax.annotation.Nullable;
/**
* Delegates to buck.py for parsing of buck build files. Constructed on demand for the
* parsing phase and must be closed afterward to free up resources.
*/
public class ProjectBuildFileParser implements AutoCloseable {
private static final Logger LOG = Logger.get(ProjectBuildFileParser.class);
private final ImmutableMap<String, String> environment;
@Nullable private BuckPythonProgram buckPythonProgram;
private Supplier<Path> rawConfigJson;
private Supplier<Path> ignorePathsJson;
@Nullable private ProcessExecutor.LaunchedProcess buckPyProcess;
@Nullable private BufferedOutputStream buckPyStdinWriter;
private final ProjectBuildFileParserOptions options;
private final ConstructorArgMarshaller marshaller;
private final BuckEventBus buckEventBus;
private final ProcessExecutor processExecutor;
private final BserDeserializer bserDeserializer;
private final BserSerializer bserSerializer;
private final AssertScopeExclusiveAccess assertSingleThreadedParsing;
private final boolean ignoreBuckAutodepsFiles;
private boolean isInitialized;
private boolean isClosed;
private boolean enableProfiling;
@Nullable private FutureTask<Void> stderrConsumerTerminationFuture;
@Nullable private Thread stderrConsumerThread;
@Nullable private ProjectBuildFileParseEvents.Started projectBuildFileParseEventStarted;
protected ProjectBuildFileParser(
final ProjectBuildFileParserOptions options,
final ConstructorArgMarshaller marshaller,
ImmutableMap<String, String> environment,
BuckEventBus buckEventBus,
ProcessExecutor processExecutor,
boolean ignoreBuckAutodepsFiles) {
this.buckPythonProgram = null;
this.options = options;
this.marshaller = marshaller;
this.environment = environment;
this.buckEventBus = buckEventBus;
this.processExecutor = processExecutor;
this.bserDeserializer = new BserDeserializer(BserDeserializer.KeyOrdering.SORTED);
this.bserSerializer = new BserSerializer();
this.assertSingleThreadedParsing = new AssertScopeExclusiveAccess();
this.ignoreBuckAutodepsFiles = ignoreBuckAutodepsFiles;
this.rawConfigJson =
Suppliers.memoize(
() -> {
try {
Path rawConfigJson1 = Files.createTempFile("raw_config", ".json");
Files.createDirectories(rawConfigJson1.getParent());
try (OutputStream output =
new BufferedOutputStream(Files.newOutputStream(rawConfigJson1))) {
bserSerializer.serializeToStream(options.getRawConfig(), output);
}
return rawConfigJson1;
} catch (IOException e) {
throw new RuntimeException(e);
}
});
this.ignorePathsJson =
Suppliers.memoize(
() -> {
try {
Path ignorePathsJson1 = Files.createTempFile("ignore_paths", ".json");
Files.createDirectories(ignorePathsJson1.getParent());
try (OutputStream output =
new BufferedOutputStream(Files.newOutputStream(ignorePathsJson1))) {
bserSerializer.serializeToStream(
options.getIgnorePaths().stream()
.map(PathOrGlobMatcher::getPathOrGlob)
.collect(MoreCollectors.toImmutableList()),
output);
}
return ignorePathsJson1;
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void setEnableProfiling(boolean enableProfiling) {
ensureNotClosed();
ensureNotInitialized();
this.enableProfiling = enableProfiling;
}
@VisibleForTesting
public boolean isClosed() {
return isClosed;
}
private void ensureNotClosed() {
Preconditions.checkState(!isClosed);
}
private void ensureNotInitialized() {
Preconditions.checkState(!isInitialized);
}
/**
* Initialization on demand moves around the performance impact of creating the Python
* interpreter to when parsing actually begins. This makes it easier to attribute this time
* to the actual parse phase.
*/
@VisibleForTesting
public void initIfNeeded() throws IOException {
ensureNotClosed();
if (!isInitialized) {
init();
isInitialized = true;
}
}
/**
* Initialize the parser, starting buck.py.
*/
private void init() throws IOException {
projectBuildFileParseEventStarted = new ProjectBuildFileParseEvents.Started();
buckEventBus.post(projectBuildFileParseEventStarted);
try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(
buckEventBus,
PerfEventId.of("ParserInit"))) {
ImmutableMap<String, String> pythonEnvironment =
new ImmutableMap.Builder<String, String>()
.putAll(new HashMap<String, String>() {{
putAll(environment);
put("PYTHONPATH",
options.getPythonModuleSearchPath().orElse(""));
}})
.build();
ProcessExecutorParams params = ProcessExecutorParams.builder()
.setCommand(buildArgs())
.setEnvironment(pythonEnvironment)
.build();
LOG.debug(
"Starting buck.py command: %s environment: %s",
params.getCommand(),
params.getEnvironment());
buckPyProcess = processExecutor.launchProcess(params);
LOG.debug("Started process %s successfully", buckPyProcess);
OutputStream stdin = buckPyProcess.getOutputStream();
InputStream stderr = buckPyProcess.getErrorStream();
InputStreamConsumer stderrConsumer = new InputStreamConsumer(
stderr,
(InputStreamConsumer.Handler) line -> buckEventBus.post(
ConsoleEvent.warning("Warning raised by BUCK file parser: %s", line)));
stderrConsumerTerminationFuture = new FutureTask<>(stderrConsumer);
stderrConsumerThread = Threads.namedThread(
ProjectBuildFileParser.class.getSimpleName(),
stderrConsumerTerminationFuture);
stderrConsumerThread.start();
buckPyStdinWriter = new BufferedOutputStream(stdin);
}
}
private ImmutableList<String> buildArgs() throws IOException {
// Invoking buck.py and read JSON-formatted build rules from its stdout.
ImmutableList.Builder<String> argBuilder = ImmutableList.builder();
argBuilder.add(options.getPythonInterpreter());
// Ask python to unbuffer stdout so that we can coordinate based on the output as it is
// produced.
argBuilder.add("-u");
argBuilder.add(getPathToBuckPy(options.getDescriptions()).toString());
if (enableProfiling) {
argBuilder.add("--profile");
}
if (ignoreBuckAutodepsFiles) {
argBuilder.add("--ignore_buck_autodeps_files");
}
if (options.getAllowEmptyGlobs()) {
argBuilder.add("--allow_empty_globs");
}
if (options.getUseWatchmanGlob()) {
argBuilder.add("--use_watchman_glob");
}
if (options.getWatchmanGlobStatResults()) {
argBuilder.add("--watchman_glob_stat_results");
}
if (options.getWatchmanUseGlobGenerator()) {
argBuilder.add("--watchman_use_glob_generator");
}
if (options.getWatchman().getSocketPath().isPresent()) {
argBuilder.add(
"--watchman_socket_path",
options.getWatchman().getSocketPath().get().toAbsolutePath().toString());
}
if (options.getWatchmanQueryTimeoutMs().isPresent()) {
argBuilder.add(
"--watchman_query_timeout_ms",
options.getWatchmanQueryTimeoutMs().get().toString());
}
if (options.getUseMercurialGlob()) {
argBuilder.add("--use_mercurial_glob");
}
if (options.getEnableBuildFileSandboxing()) {
argBuilder.add("--enable_build_file_sandboxing");
}
// Add the --build_file_import_whitelist flags.
for (String module : options.getBuildFileImportWhitelist()) {
argBuilder.add("--build_file_import_whitelist");
argBuilder.add(module);
}
argBuilder.add("--project_root", options.getProjectRoot().toAbsolutePath().toString());
for (ImmutableMap.Entry<String, Path> entry : options.getCellRoots().entrySet()) {
argBuilder.add("--cell_root", entry.getKey() + "=" + entry.getValue());
}
argBuilder.add("--build_file_name", options.getBuildFileName());
if (!options.getAutodepsFilesHaveSignatures()) {
argBuilder.add("--no_autodeps_signatures");
}
// Tell the parser not to print exceptions to stderr.
argBuilder.add("--quiet");
// Add the --include flags.
for (String include : options.getDefaultIncludes()) {
argBuilder.add("--include");
argBuilder.add(include);
}
// Add all config settings.
argBuilder.add("--config", rawConfigJson.get().toString());
// Add ignore paths.
argBuilder.add("--ignore_paths", ignorePathsJson.get().toString());
return argBuilder.build();
}
/**
* Collect all rules from a particular build file.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public List<Map<String, Object>> getAll(Path buildFile)
throws BuildFileParseException, InterruptedException {
ImmutableList<Map<String, Object>> result = getAllRulesAndMetaRules(buildFile);
// Strip out the __includes, __configs, and __env meta rules, which are the last rules.
return Collections.unmodifiableList(result.subList(0, result.size() - 3));
}
/**
* Collect all rules from a particular build file, along with meta rules about the rules, for
* example which build files the rules depend on.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public ImmutableList<Map<String, Object>> getAllRulesAndMetaRules(Path buildFile)
throws BuildFileParseException, InterruptedException {
try {
return getAllRulesInternal(buildFile);
} catch (IOException e) {
MoreThrowables.propagateIfInterrupt(e);
throw BuildFileParseException.createForBuildFileParseError(buildFile, e);
}
}
@VisibleForTesting
protected ImmutableList<Map<String, Object>> getAllRulesInternal(Path buildFile)
throws IOException, BuildFileParseException {
ensureNotClosed();
initIfNeeded();
// Check isInitialized implications (to avoid Eradicate warnings).
Preconditions.checkNotNull(buckPyStdinWriter);
Preconditions.checkNotNull(buckPyProcess);
ParseBuckFileEvent.Started parseBuckFileStarted = ParseBuckFileEvent.started(buildFile);
buckEventBus.post(parseBuckFileStarted);
ImmutableList<Map<String, Object>> values = ImmutableList.of();
String profile = "";
try (AssertScopeExclusiveAccess.Scope scope = assertSingleThreadedParsing.scope()) {
Path cellPath = options.getProjectRoot().toAbsolutePath();
String watchRoot = cellPath.toString();
String projectPrefix = "";
if (options.getWatchman().getProjectWatches().containsKey(cellPath)) {
ProjectWatch projectWatch = options.getWatchman().getProjectWatches().get(cellPath);
watchRoot = projectWatch.getWatchRoot();
if (projectWatch.getProjectPrefix().isPresent()) {
projectPrefix = projectWatch.getProjectPrefix().get();
}
}
bserSerializer.serializeToStream(
ImmutableMap.of(
"buildFile", buildFile.toString(),
"watchRoot", watchRoot,
"projectPrefix", projectPrefix),
buckPyStdinWriter);
buckPyStdinWriter.flush();
LOG.debug("Parsing output of process %s...", buckPyProcess);
Object deserializedValue;
try {
deserializedValue = bserDeserializer.deserializeBserValue(
buckPyProcess.getInputStream());
} catch (BserDeserializer.BserEofException e) {
LOG.warn(e, "Parser exited while decoding BSER data");
throw new IOException("Parser exited unexpectedly", e);
}
BuildFilePythonResult resultObject = handleDeserializedValue(deserializedValue);
Path buckPyPath = getPathToBuckPy(options.getDescriptions());
handleDiagnostics(
buildFile,
buckPyPath.getParent(),
resultObject.getDiagnostics(),
buckEventBus);
values = resultObject.getValues();
LOG.verbose("Got rules: %s", values);
LOG.debug("Parsed %d rules from process", values.size());
profile = resultObject.getProfile();
if (profile != null) {
LOG.debug("Profile result: %s", profile);
}
return values;
} finally {
buckEventBus.post(ParseBuckFileEvent.finished(parseBuckFileStarted, values, profile));
}
}
@SuppressWarnings("unchecked")
private static BuildFilePythonResult handleDeserializedValue(@Nullable Object deserializedValue)
throws IOException {
if (!(deserializedValue instanceof Map<?, ?>)) {
throw new IOException(
String.format("Invalid parser output (expected map, got %s)", deserializedValue));
}
Map<String, Object> decodedResult = (Map<String, Object>) deserializedValue;
List<Map<String, Object>> values;
try {
values = (List<Map<String, Object>>) decodedResult.get("values");
} catch (ClassCastException e) {
throw new IOException("Invalid parser values", e);
}
List<Map<String, String>> diagnostics;
try {
diagnostics = (List<Map<String, String>>) decodedResult.get("diagnostics");
} catch (ClassCastException e) {
throw new IOException("Invalid parser diagnostics", e);
}
String profile;
try {
profile = (String) decodedResult.get("profile");
} catch (ClassCastException e) {
throw new IOException("Invalid parser profile", e);
}
return BuildFilePythonResult.of(
values,
diagnostics == null ? ImmutableList.of() : diagnostics,
profile == null ? "" : profile);
}
private static void handleDiagnostics(
Path buildFile,
Path buckPyDir,
List<Map<String, String>> diagnosticsList,
BuckEventBus buckEventBus) throws IOException, BuildFileParseException {
for (Map<String, String> diagnostic : diagnosticsList) {
String level = diagnostic.get("level");
String message = diagnostic.get("message");
String source = diagnostic.get("source");
if (level == null || message == null) {
throw new IOException(
String.format(
"Invalid diagnostic(level=%s, message=%s, source=%s)",
level,
message,
source));
}
if (source != null && source.equals("watchman")) {
handleWatchmanDiagnostic(buildFile, level, message, buckEventBus);
} else {
String header;
if (source != null) {
header = buildFile + " (" + source + ")";
} else {
header = buildFile.toString();
}
switch (level) {
case "debug":
LOG.debug("%s: %s", header, message);
break;
case "info":
LOG.info("%s: %s", header, message);
break;
case "warning":
LOG.warn("Warning raised by BUCK file parser for file %s: %s", header, message);
buckEventBus.post(
ConsoleEvent.warning("Warning raised by BUCK file parser: %s", message));
break;
case "error":
LOG.warn("Error raised by BUCK file parser for file %s: %s", header, message);
buckEventBus.post(
ConsoleEvent.severe("Error raised by BUCK file parser: %s", message));
break;
case "fatal":
LOG.warn("Fatal error raised by BUCK file parser for file %s: %s", header, message);
Object exception = diagnostic.get("exception");
throw BuildFileParseException.createForBuildFileParseError(
buildFile,
createParseException(buildFile, buckPyDir, message, exception));
default:
LOG.warn(
"Unknown diagnostic (level %s) raised by BUCK file parser for build file %s: %s",
level,
buildFile,
message);
break;
}
}
}
}
private static Optional<BuildFileSyntaxError> parseSyntaxError(Map<String, Object> exceptionMap) {
String type = (String) exceptionMap.get("type");
if (type.equals("SyntaxError")) {
return Optional.of(
BuildFileSyntaxError.of(
Paths.get((String) exceptionMap.get("filename")),
(Number) exceptionMap.get("lineno"),
(Number) exceptionMap.get("offset"),
(String) exceptionMap.get("text")));
} else {
return Optional.empty();
}
}
@SuppressWarnings("unchecked")
private static ImmutableList<BuildFileParseExceptionStackTraceEntry> parseStackTrace(
Map<String, Object> exceptionMap
) {
List<Map<String, Object>> traceback =
(List<Map<String, Object>>) exceptionMap.get("traceback");
ImmutableList.Builder<BuildFileParseExceptionStackTraceEntry> stackTraceBuilder =
ImmutableList.builder();
for (Map<String, Object> tracebackItem : traceback) {
stackTraceBuilder.add(
BuildFileParseExceptionStackTraceEntry.of(
Paths.get((String) tracebackItem.get("filename")),
(Number) tracebackItem.get("line_number"),
(String) tracebackItem.get("function_name"),
(String) tracebackItem.get("text")));
}
return stackTraceBuilder.build();
}
@VisibleForTesting
static BuildFileParseExceptionData parseExceptionData(
Map<String, Object> exceptionMap) {
return BuildFileParseExceptionData.of(
(String) exceptionMap.get("type"),
(String) exceptionMap.get("value"),
parseSyntaxError(exceptionMap),
parseStackTrace(exceptionMap)
);
}
private static String formatStackTrace(
Path buckPyDir,
ImmutableList<BuildFileParseExceptionStackTraceEntry> stackTrace
) {
StringBuilder formattedTraceback = new StringBuilder();
for (BuildFileParseExceptionStackTraceEntry entry : stackTrace) {
if (entry.getFileName().getParent().equals(buckPyDir)) {
// Skip stack trace entries for buck.py itself
continue;
}
String location;
if (entry.getFunctionName().equals("<module>")) {
location = "";
} else {
location = String.format(", in %s", entry.getFunctionName());
}
formattedTraceback.append(
String.format(
" File \"%s\", line %s%s\n %s\n",
entry.getFileName(),
entry.getLineNumber(),
location,
entry.getText()));
}
return formattedTraceback.toString();
}
@SuppressWarnings("unchecked")
private static IOException createParseException(
Path buildFile,
Path buckPyDir,
String message,
@Nullable Object exception) {
if (!(exception instanceof Map<?, ?>)) {
return new IOException(message);
} else {
Map<String, Object> exceptionMap = (Map<String, Object>) exception;
BuildFileParseExceptionData exceptionData = parseExceptionData(exceptionMap);
LOG.debug("Received exception from buck.py parser: %s", exceptionData);
Optional<BuildFileSyntaxError> syntaxErrorOpt = exceptionData.getSyntaxError();
if (syntaxErrorOpt.isPresent()) {
BuildFileSyntaxError syntaxError = syntaxErrorOpt.get();
String prefix;
if (buildFile.equals(syntaxError.getFileName())) {
// BuildFileParseException will include the filename
prefix = String.format(
"Syntax error on line %s",
syntaxError.getLineNumber());
} else {
// Parse error was in some other file included by the build file
prefix = String.format(
"Syntax error in %s\nLine %s",
syntaxError.getFileName(),
syntaxError.getLineNumber());
}
return new IOException(
String.format(
"%s, column %s:\n%s%s",
prefix,
syntaxError.getOffset(),
syntaxError.getText(),
Strings.padStart("^", syntaxError.getOffset().intValue(), ' ')));
} else {
String formattedStackTrace = formatStackTrace(
buckPyDir,
exceptionData.getStackTrace());
return new IOException(
String.format(
"%s: %s\nCall stack:\n%s",
exceptionData.getType(),
exceptionData.getValue(),
formattedStackTrace));
}
}
}
private static void handleWatchmanDiagnostic(
Path buildFile,
String level,
String message,
BuckEventBus buckEventBus) throws IOException {
WatchmanDiagnostic.Level watchmanDiagnosticLevel;
switch (level) {
// Watchman itself doesn't issue debug or info, but in case
// engineers hacking on stuff add calls, let's log them
// then return.
case "debug":
LOG.debug("%s (watchman): %s", buildFile, message);
return;
case "info":
LOG.info("%s (watchman): %s", buildFile, message);
return;
case "warning":
watchmanDiagnosticLevel = WatchmanDiagnostic.Level.WARNING;
break;
case "error":
watchmanDiagnosticLevel = WatchmanDiagnostic.Level.ERROR;
break;
case "fatal":
throw new IOException(
String.format(
"%s: %s",
buildFile,
message));
default:
throw new RuntimeException(
String.format(
"Unrecognized watchman diagnostic level: %s (message=%s)",
level,
message));
}
WatchmanDiagnostic watchmanDiagnostic = WatchmanDiagnostic.of(
watchmanDiagnosticLevel,
message);
buckEventBus.post(new WatchmanDiagnosticEvent(watchmanDiagnostic));
}
@Override
@SuppressWarnings("PMD.EmptyCatchBlock")
public void close() throws BuildFileParseException, InterruptedException, IOException {
if (isClosed) {
return;
}
try {
if (isInitialized) {
// Check isInitialized implications (to avoid Eradicate warnings).
Preconditions.checkNotNull(buckPyStdinWriter);
Preconditions.checkNotNull(buckPyProcess);
// Allow buck.py to terminate gracefully.
try {
buckPyStdinWriter.close();
} catch (IOException e) {
// Safe to ignore since we've already flushed everything we wanted
// to write.
}
if (stderrConsumerThread != null) {
stderrConsumerThread.join();
stderrConsumerThread = null;
try {
Preconditions.checkNotNull(stderrConsumerTerminationFuture).get();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException) cause;
} else {
throw new RuntimeException(e);
}
}
stderrConsumerTerminationFuture = null;
}
LOG.debug("Waiting for process %s to exit...", buckPyProcess);
ProcessExecutor.Result result = processExecutor.waitForLaunchedProcess(buckPyProcess);
if (result.getExitCode() != 0) {
LOG.warn(result.getMessageForUnexpectedResult(buckPyProcess.toString()));
throw BuildFileParseException.createForUnknownParseError(
result.getMessageForResult("Parser did not exit cleanly"));
}
LOG.debug("Process %s exited cleanly.", buckPyProcess);
try {
synchronized (this) {
if (buckPythonProgram != null) {
buckPythonProgram.close();
}
}
} catch (IOException e) {
// Eat any exceptions from deleting the temporary buck.py file.
}
}
} finally {
if (isInitialized) {
buckEventBus.post(
new ProjectBuildFileParseEvents.Finished(
Preconditions.checkNotNull(projectBuildFileParseEventStarted)));
}
isClosed = true;
}
}
private synchronized Path getPathToBuckPy(ImmutableSet<Description<?>> descriptions)
throws IOException {
if (buckPythonProgram == null) {
buckPythonProgram = BuckPythonProgram.newInstance(marshaller, descriptions);
}
return buckPythonProgram.getExecutablePath();
}
@Value.Immutable
@BuckStyleTuple
interface AbstractBuildFilePythonResult {
List<Map<String, Object>> getValues();
List<Map<String, String>> getDiagnostics();
String getProfile();
}
}
|
Make parser log a little nicer
Summary: Show which build files we're parsing.
Test Plan: Looked at log.
fbshipit-source-id: eebee45
|
src/com/facebook/buck/json/ProjectBuildFileParser.java
|
Make parser log a little nicer
|
|
Java
|
apache-2.0
|
888af0337dd25be19e12aa134533ac9f53f37c00
| 0
|
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
|
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.HashSet;
import java.util.List;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
/**
* The SegmentMerger class combines two or more Segments, represented by an IndexReader ({@link #add},
* into a single Segment. After adding the appropriate readers, call the merge method to combine the
* segments.
*<P>
* If the compoundFile flag is set, then the segments will be merged into a compound file.
*
*
* @see #merge
* @see #add
*/
final class SegmentMerger {
/** norms header placeholder */
static final byte[] NORMS_HEADER = new byte[]{'N','R','M',-1};
private Directory directory;
private String segment;
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
private List readers = new ArrayList();
private FieldInfos fieldInfos;
private int mergedDocs;
private CheckAbort checkAbort;
// Whether we should merge doc stores (stored fields and
// vectors files). When all segments we are merging
// already share the same doc store files, we don't need
// to merge the doc stores.
private boolean mergeDocStores;
/** Maximum number of contiguous documents to bulk-copy
when merging stored fields */
private final static int MAX_RAW_MERGE_DOCS = 4192;
/** This ctor used only by test code.
*
* @param dir The Directory to merge the other segments into
* @param name The name of the new segment
*/
SegmentMerger(Directory dir, String name) {
directory = dir;
segment = name;
}
SegmentMerger(IndexWriter writer, String name, MergePolicy.OneMerge merge) {
directory = writer.getDirectory();
segment = name;
if (merge != null)
checkAbort = new CheckAbort(merge, directory);
termIndexInterval = writer.getTermIndexInterval();
}
boolean hasProx() {
return fieldInfos.hasProx();
}
/**
* Add an IndexReader to the collection of readers that are to be merged
* @param reader
*/
final void add(IndexReader reader) {
readers.add(reader);
}
/**
*
* @param i The index of the reader to return
* @return The ith reader to be merged
*/
final IndexReader segmentReader(int i) {
return (IndexReader) readers.get(i);
}
/**
* Merges the readers specified by the {@link #add} method into the directory passed to the constructor
* @return The number of documents that were merged
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
final int merge() throws CorruptIndexException, IOException {
return merge(true);
}
/**
* Merges the readers specified by the {@link #add} method
* into the directory passed to the constructor.
* @param mergeDocStores if false, we will not merge the
* stored fields nor vectors files
* @return The number of documents that were merged
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
final int merge(boolean mergeDocStores) throws CorruptIndexException, IOException {
this.mergeDocStores = mergeDocStores;
// NOTE: it's important to add calls to
// checkAbort.work(...) if you make any changes to this
// method that will spend alot of time. The frequency
// of this check impacts how long
// IndexWriter.close(false) takes to actually stop the
// threads.
mergedDocs = mergeFields();
mergeTerms();
mergeNorms();
if (mergeDocStores && fieldInfos.hasVectors())
mergeVectors();
return mergedDocs;
}
/**
* close all IndexReaders that have been added.
* Should not be called before merge().
* @throws IOException
*/
final void closeReaders() throws IOException {
for (int i = 0; i < readers.size(); i++) { // close readers
IndexReader reader = (IndexReader) readers.get(i);
reader.close();
}
}
final List createCompoundFile(String fileName)
throws IOException {
CompoundFileWriter cfsWriter =
new CompoundFileWriter(directory, fileName, checkAbort);
List files =
new ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.length + 1);
// Basic files
for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.length; i++) {
String ext = IndexFileNames.COMPOUND_EXTENSIONS[i];
if (ext.equals(IndexFileNames.PROX_EXTENSION) && !hasProx())
continue;
if (mergeDocStores || (!ext.equals(IndexFileNames.FIELDS_EXTENSION) &&
!ext.equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
files.add(segment + "." + ext);
}
// Fieldable norm files
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
files.add(segment + "." + IndexFileNames.NORMS_EXTENSION);
break;
}
}
// Vector files
if (fieldInfos.hasVectors() && mergeDocStores) {
for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.length; i++) {
files.add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
}
}
// Now merge all added files
Iterator it = files.iterator();
while (it.hasNext()) {
cfsWriter.addFile((String) it.next());
}
// Perform the merge
cfsWriter.close();
return files;
}
private void addIndexed(IndexReader reader, FieldInfos fieldInfos, Collection names, boolean storeTermVectors, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean storePayloads, boolean omitTf) throws IOException {
Iterator i = names.iterator();
while (i.hasNext()) {
String field = (String)i.next();
fieldInfos.add(field, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector, !reader.hasNorms(field), storePayloads, omitTf);
}
}
private SegmentReader[] matchingSegmentReaders;
private int[] rawDocLengths;
private int[] rawDocLengths2;
private void setMatchingSegmentReaders() {
// If the i'th reader is a SegmentReader and has
// identical fieldName -> number mapping, then this
// array will be non-null at position i:
matchingSegmentReaders = new SegmentReader[readers.size()];
// If this reader is a SegmentReader, and all of its
// field name -> number mappings match the "merged"
// FieldInfos, then we can do a bulk copy of the
// stored fields:
for (int i = 0; i < readers.size(); i++) {
IndexReader reader = (IndexReader) readers.get(i);
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
boolean same = true;
FieldInfos segmentFieldInfos = segmentReader.getFieldInfos();
for (int j = 0; same && j < segmentFieldInfos.size(); j++)
same = fieldInfos.fieldName(j).equals(segmentFieldInfos.fieldName(j));
if (same)
matchingSegmentReaders[i] = segmentReader;
}
}
// Used for bulk-reading raw bytes for stored fields
rawDocLengths = new int[MAX_RAW_MERGE_DOCS];
rawDocLengths2 = new int[MAX_RAW_MERGE_DOCS];
}
/**
*
* @return The number of documents in all of the readers
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
private final int mergeFields() throws CorruptIndexException, IOException {
if (!mergeDocStores) {
// When we are not merging by doc stores, that means
// all segments were written as part of a single
// autoCommit=false IndexWriter session, so their field
// name -> number mapping are the same. So, we start
// with the fieldInfos of the last segment in this
// case, to keep that numbering.
final SegmentReader sr = (SegmentReader) readers.get(readers.size()-1);
fieldInfos = (FieldInfos) sr.fieldInfos.clone();
} else {
fieldInfos = new FieldInfos(); // merge field names
}
for (int i = 0; i < readers.size(); i++) {
IndexReader reader = (IndexReader) readers.get(i);
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
for (int j = 0; j < segmentReader.getFieldInfos().size(); j++) {
FieldInfo fi = segmentReader.getFieldInfos().fieldInfo(j);
fieldInfos.add(fi.name, fi.isIndexed, fi.storeTermVector, fi.storePositionWithTermVector, fi.storeOffsetWithTermVector, !reader.hasNorms(fi.name), fi.storePayloads, fi.omitTf);
}
} else {
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET), true, true, true, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION), true, true, false, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET), true, false, true, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR), true, false, false, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.OMIT_TF), false, false, false, false, true);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.STORES_PAYLOADS), false, false, false, true, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.INDEXED), false, false, false, false, false);
fieldInfos.add(reader.getFieldNames(IndexReader.FieldOption.UNINDEXED), false);
}
}
fieldInfos.write(directory, segment + ".fnm");
int docCount = 0;
setMatchingSegmentReaders();
if (mergeDocStores) {
// for merging we don't want to compress/uncompress the data, so to tell the FieldsReader that we're
// in merge mode, we use this FieldSelector
FieldSelector fieldSelectorMerge = new FieldSelector() {
public FieldSelectorResult accept(String fieldName) {
return FieldSelectorResult.LOAD_FOR_MERGE;
}
};
// merge field values
final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, fieldInfos);
try {
for (int i = 0; i < readers.size(); i++) {
final IndexReader reader = (IndexReader) readers.get(i);
final SegmentReader matchingSegmentReader = matchingSegmentReaders[i];
final FieldsReader matchingFieldsReader;
final boolean hasMatchingReader;
if (matchingSegmentReader != null) {
final FieldsReader fieldsReader = matchingSegmentReader.getFieldsReader();
if (fieldsReader != null && !fieldsReader.canReadRawDocs()) {
matchingFieldsReader = null;
hasMatchingReader = false;
} else {
matchingFieldsReader = fieldsReader;
hasMatchingReader = true;
}
} else {
hasMatchingReader = false;
matchingFieldsReader = null;
}
final int maxDoc = reader.maxDoc();
final boolean hasDeletions = reader.hasDeletions();
for (int j = 0; j < maxDoc;) {
if (!hasDeletions || !reader.isDeleted(j)) { // skip deleted docs
if (hasMatchingReader) {
// We can optimize this case (doing a bulk
// byte copy) since the field numbers are
// identical
int start = j;
int numDocs = 0;
do {
j++;
numDocs++;
if (j >= maxDoc)
break;
if (hasDeletions && matchingSegmentReader.isDeleted(j)) {
j++;
break;
}
} while(numDocs < MAX_RAW_MERGE_DOCS);
IndexInput stream = matchingFieldsReader.rawDocs(rawDocLengths, start, numDocs);
fieldsWriter.addRawDocuments(stream, rawDocLengths, numDocs);
docCount += numDocs;
if (checkAbort != null)
checkAbort.work(300*numDocs);
} else {
// NOTE: it's very important to first assign
// to doc then pass it to
// termVectorsWriter.addAllDocVectors; see
// LUCENE-1282
Document doc = reader.document(j, fieldSelectorMerge);
fieldsWriter.addDocument(doc);
j++;
docCount++;
if (checkAbort != null)
checkAbort.work(300);
}
} else
j++;
}
}
} finally {
fieldsWriter.close();
}
final long fdxFileLength = directory.fileLength(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION);
if (4+((long) docCount)*8 != fdxFileLength)
// This is most likely a bug in Sun JRE 1.6.0_04/_05;
// we detect that the bug has struck, here, and
// throw an exception to prevent the corruption from
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("mergeFields produced an invalid result: docCount is " + docCount + " but fdx file size is " + fdxFileLength + "; now aborting this merge to prevent index corruption");
} else
// If we are skipping the doc stores, that means there
// are no deletions in any of these segments, so we
// just sum numDocs() of each segment to get total docCount
for (int i = 0; i < readers.size(); i++)
docCount += ((IndexReader) readers.get(i)).numDocs();
return docCount;
}
/**
* Merge the TermVectors from each of the segments into the new one.
* @throws IOException
*/
private final void mergeVectors() throws IOException {
TermVectorsWriter termVectorsWriter =
new TermVectorsWriter(directory, segment, fieldInfos);
try {
for (int r = 0; r < readers.size(); r++) {
final SegmentReader matchingSegmentReader = matchingSegmentReaders[r];
TermVectorsReader matchingVectorsReader;
final boolean hasMatchingReader;
if (matchingSegmentReader != null) {
matchingVectorsReader = matchingSegmentReader.termVectorsReaderOrig;
// If the TV* files are an older format then they
// cannot read raw docs:
if (matchingVectorsReader != null && !matchingVectorsReader.canReadRawDocs()) {
matchingVectorsReader = null;
hasMatchingReader = false;
} else
hasMatchingReader = matchingVectorsReader != null;
} else {
hasMatchingReader = false;
matchingVectorsReader = null;
}
IndexReader reader = (IndexReader) readers.get(r);
final boolean hasDeletions = reader.hasDeletions();
int maxDoc = reader.maxDoc();
for (int docNum = 0; docNum < maxDoc;) {
// skip deleted docs
if (!hasDeletions || !reader.isDeleted(docNum)) {
if (hasMatchingReader) {
// We can optimize this case (doing a bulk
// byte copy) since the field numbers are
// identical
int start = docNum;
int numDocs = 0;
do {
docNum++;
numDocs++;
if (docNum >= maxDoc)
break;
if (hasDeletions && matchingSegmentReader.isDeleted(docNum)) {
docNum++;
break;
}
} while(numDocs < MAX_RAW_MERGE_DOCS);
matchingVectorsReader.rawDocs(rawDocLengths, rawDocLengths2, start, numDocs);
termVectorsWriter.addRawDocuments(matchingVectorsReader, rawDocLengths, rawDocLengths2, numDocs);
if (checkAbort != null)
checkAbort.work(300*numDocs);
} else {
// NOTE: it's very important to first assign
// to vectors then pass it to
// termVectorsWriter.addAllDocVectors; see
// LUCENE-1282
TermFreqVector[] vectors = reader.getTermFreqVectors(docNum);
termVectorsWriter.addAllDocVectors(vectors);
docNum++;
if (checkAbort != null)
checkAbort.work(300);
}
} else
docNum++;
}
}
} finally {
termVectorsWriter.close();
}
final long tvxSize = directory.fileLength(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
if (4+((long) mergedDocs)*16 != tvxSize)
// This is most likely a bug in Sun JRE 1.6.0_04/_05;
// we detect that the bug has struck, here, and
// throw an exception to prevent the corruption from
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("mergeVectors produced an invalid result: mergedDocs is " + mergedDocs + " but tvx size is " + tvxSize + "; now aborting this merge to prevent index corruption");
}
private SegmentMergeQueue queue = null;
private final void mergeTerms() throws CorruptIndexException, IOException {
SegmentWriteState state = new SegmentWriteState(null, directory, segment, null, mergedDocs, 0, termIndexInterval);
final FormatPostingsFieldsConsumer consumer = new FormatPostingsFieldsWriter(state, fieldInfos);
try {
queue = new SegmentMergeQueue(readers.size());
mergeTermInfos(consumer);
} finally {
consumer.finish();
if (queue != null) queue.close();
}
}
boolean omitTF;
private final void mergeTermInfos(final FormatPostingsFieldsConsumer consumer) throws CorruptIndexException, IOException {
int base = 0;
final int readerCount = readers.size();
for (int i = 0; i < readerCount; i++) {
IndexReader reader = (IndexReader) readers.get(i);
TermEnum termEnum = reader.terms();
SegmentMergeInfo smi = new SegmentMergeInfo(base, termEnum, reader);
int[] docMap = smi.getDocMap();
if (docMap != null) {
if (docMaps == null) {
docMaps = new int[readerCount][];
delCounts = new int[readerCount];
}
docMaps[i] = docMap;
delCounts[i] = smi.reader.maxDoc() - smi.reader.numDocs();
}
base += reader.numDocs();
if (smi.next())
queue.put(smi); // initialize queue
else
smi.close();
}
SegmentMergeInfo[] match = new SegmentMergeInfo[readers.size()];
String currentField = null;
FormatPostingsTermsConsumer termsConsumer = null;
while (queue.size() > 0) {
int matchSize = 0; // pop matching terms
match[matchSize++] = (SegmentMergeInfo) queue.pop();
Term term = match[0].term;
SegmentMergeInfo top = (SegmentMergeInfo) queue.top();
while (top != null && term.compareTo(top.term) == 0) {
match[matchSize++] = (SegmentMergeInfo) queue.pop();
top = (SegmentMergeInfo) queue.top();
}
if (currentField != term.field) {
currentField = term.field;
if (termsConsumer != null)
termsConsumer.finish();
final FieldInfo fieldInfo = fieldInfos.fieldInfo(currentField);
termsConsumer = consumer.addField(fieldInfo);
omitTF = fieldInfo.omitTf;
}
int df = appendPostings(termsConsumer, match, matchSize); // add new TermInfo
if (checkAbort != null)
checkAbort.work(df/3.0);
while (matchSize > 0) {
SegmentMergeInfo smi = match[--matchSize];
if (smi.next())
queue.put(smi); // restore queue
else
smi.close(); // done with a segment
}
}
}
private byte[] payloadBuffer;
private int[][] docMaps;
int[][] getDocMaps() {
return docMaps;
}
private int[] delCounts;
int[] getDelCounts() {
return delCounts;
}
/** Process postings from multiple segments all positioned on the
* same term. Writes out merged entries into freqOutput and
* the proxOutput streams.
*
* @param smis array of segments
* @param n number of cells in the array actually occupied
* @return number of documents across all segments where this term was found
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
private final int appendPostings(final FormatPostingsTermsConsumer termsConsumer, SegmentMergeInfo[] smis, int n)
throws CorruptIndexException, IOException {
final FormatPostingsDocsConsumer docConsumer = termsConsumer.addTerm(smis[0].term.text);
int df = 0;
for (int i = 0; i < n; i++) {
SegmentMergeInfo smi = smis[i];
TermPositions postings = smi.getPositions();
assert postings != null;
int base = smi.base;
int[] docMap = smi.getDocMap();
postings.seek(smi.termEnum);
while (postings.next()) {
df++;
int doc = postings.doc();
if (docMap != null)
doc = docMap[doc]; // map around deletions
doc += base; // convert to merged space
final int freq = postings.freq();
final FormatPostingsPositionsConsumer posConsumer = docConsumer.addDoc(doc, freq);
if (!omitTF) {
for (int j = 0; j < freq; j++) {
final int position = postings.nextPosition();
final int payloadLength = postings.getPayloadLength();
if (payloadLength > 0) {
if (payloadBuffer == null || payloadBuffer.length < payloadLength)
payloadBuffer = new byte[payloadLength];
postings.getPayload(payloadBuffer, 0);
}
posConsumer.addPosition(position, payloadBuffer, 0, payloadLength);
}
posConsumer.finish();
}
}
}
docConsumer.finish();
return df;
}
private void mergeNorms() throws IOException {
byte[] normBuffer = null;
IndexOutput output = null;
try {
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
if (output == null) {
output = directory.createOutput(segment + "." + IndexFileNames.NORMS_EXTENSION);
output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
}
for (int j = 0; j < readers.size(); j++) {
IndexReader reader = (IndexReader) readers.get(j);
int maxDoc = reader.maxDoc();
if (normBuffer == null || normBuffer.length < maxDoc) {
// the buffer is too small for the current segment
normBuffer = new byte[maxDoc];
}
reader.norms(fi.name, normBuffer, 0);
if (!reader.hasDeletions()) {
//optimized case for segments without deleted docs
output.writeBytes(normBuffer, maxDoc);
} else {
// this segment has deleted docs, so we have to
// check for every doc if it is deleted or not
for (int k = 0; k < maxDoc; k++) {
if (!reader.isDeleted(k)) {
output.writeByte(normBuffer[k]);
}
}
}
if (checkAbort != null)
checkAbort.work(maxDoc);
}
}
}
} finally {
if (output != null) {
output.close();
}
}
}
final static class CheckAbort {
private double workCount;
private MergePolicy.OneMerge merge;
private Directory dir;
public CheckAbort(MergePolicy.OneMerge merge, Directory dir) {
this.merge = merge;
this.dir = dir;
}
/**
* Records the fact that roughly units amount of work
* have been done since this method was last called.
* When adding time-consuming code into SegmentMerger,
* you should test different values for units to ensure
* that the time in between calls to merge.checkAborted
* is up to ~ 1 second.
*/
public void work(double units) throws MergePolicy.MergeAbortedException {
workCount += units;
if (workCount >= 10000.0) {
merge.checkAborted(dir);
workCount = 0;
}
}
}
}
|
src/java/org/apache/lucene/index/SegmentMerger.java
|
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.HashSet;
import java.util.List;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
/**
* The SegmentMerger class combines two or more Segments, represented by an IndexReader ({@link #add},
* into a single Segment. After adding the appropriate readers, call the merge method to combine the
* segments.
*<P>
* If the compoundFile flag is set, then the segments will be merged into a compound file.
*
*
* @see #merge
* @see #add
*/
final class SegmentMerger {
/** norms header placeholder */
static final byte[] NORMS_HEADER = new byte[]{'N','R','M',-1};
private Directory directory;
private String segment;
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
private List readers = new ArrayList();
private FieldInfos fieldInfos;
private int mergedDocs;
private CheckAbort checkAbort;
// Whether we should merge doc stores (stored fields and
// vectors files). When all segments we are merging
// already share the same doc store files, we don't need
// to merge the doc stores.
private boolean mergeDocStores;
/** Maximum number of contiguous documents to bulk-copy
when merging stored fields */
private final static int MAX_RAW_MERGE_DOCS = 4192;
/** This ctor used only by test code.
*
* @param dir The Directory to merge the other segments into
* @param name The name of the new segment
*/
SegmentMerger(Directory dir, String name) {
directory = dir;
segment = name;
}
SegmentMerger(IndexWriter writer, String name, MergePolicy.OneMerge merge) {
directory = writer.getDirectory();
segment = name;
if (merge != null)
checkAbort = new CheckAbort(merge, directory);
termIndexInterval = writer.getTermIndexInterval();
}
boolean hasProx() {
return fieldInfos.hasProx();
}
/**
* Add an IndexReader to the collection of readers that are to be merged
* @param reader
*/
final void add(IndexReader reader) {
readers.add(reader);
}
/**
*
* @param i The index of the reader to return
* @return The ith reader to be merged
*/
final IndexReader segmentReader(int i) {
return (IndexReader) readers.get(i);
}
/**
* Merges the readers specified by the {@link #add} method into the directory passed to the constructor
* @return The number of documents that were merged
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
final int merge() throws CorruptIndexException, IOException {
return merge(true);
}
/**
* Merges the readers specified by the {@link #add} method
* into the directory passed to the constructor.
* @param mergeDocStores if false, we will not merge the
* stored fields nor vectors files
* @return The number of documents that were merged
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
final int merge(boolean mergeDocStores) throws CorruptIndexException, IOException {
this.mergeDocStores = mergeDocStores;
// NOTE: it's important to add calls to
// checkAbort.work(...) if you make any changes to this
// method that will spend alot of time. The frequency
// of this check impacts how long
// IndexWriter.close(false) takes to actually stop the
// threads.
mergedDocs = mergeFields();
mergeTerms();
mergeNorms();
if (mergeDocStores && fieldInfos.hasVectors())
mergeVectors();
return mergedDocs;
}
/**
* close all IndexReaders that have been added.
* Should not be called before merge().
* @throws IOException
*/
final void closeReaders() throws IOException {
for (int i = 0; i < readers.size(); i++) { // close readers
IndexReader reader = (IndexReader) readers.get(i);
reader.close();
}
}
final List createCompoundFile(String fileName)
throws IOException {
CompoundFileWriter cfsWriter =
new CompoundFileWriter(directory, fileName, checkAbort);
List files =
new ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.length + 1);
// Basic files
for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.length; i++) {
String ext = IndexFileNames.COMPOUND_EXTENSIONS[i];
if (ext.equals(IndexFileNames.PROX_EXTENSION) && !hasProx())
continue;
if (mergeDocStores || (!ext.equals(IndexFileNames.FIELDS_EXTENSION) &&
!ext.equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
files.add(segment + "." + ext);
}
// Fieldable norm files
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
files.add(segment + "." + IndexFileNames.NORMS_EXTENSION);
break;
}
}
// Vector files
if (fieldInfos.hasVectors() && mergeDocStores) {
for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.length; i++) {
files.add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
}
}
// Now merge all added files
Iterator it = files.iterator();
while (it.hasNext()) {
cfsWriter.addFile((String) it.next());
}
// Perform the merge
cfsWriter.close();
return files;
}
private void addIndexed(IndexReader reader, FieldInfos fieldInfos, Collection names, boolean storeTermVectors, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean storePayloads, boolean omitTf) throws IOException {
Iterator i = names.iterator();
while (i.hasNext()) {
String field = (String)i.next();
fieldInfos.add(field, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector, !reader.hasNorms(field), storePayloads, omitTf);
}
}
private SegmentReader[] matchingSegmentReaders;
private int[] rawDocLengths;
private int[] rawDocLengths2;
private void setMatchingSegmentReaders() {
// If the i'th reader is a SegmentReader and has
// identical fieldName -> number mapping, then this
// array will be non-null at position i:
matchingSegmentReaders = new SegmentReader[readers.size()];
// If this reader is a SegmentReader, and all of its
// field name -> number mappings match the "merged"
// FieldInfos, then we can do a bulk copy of the
// stored fields:
for (int i = 0; i < readers.size(); i++) {
IndexReader reader = (IndexReader) readers.get(i);
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
boolean same = true;
FieldInfos segmentFieldInfos = segmentReader.getFieldInfos();
for (int j = 0; same && j < segmentFieldInfos.size(); j++)
same = fieldInfos.fieldName(j).equals(segmentFieldInfos.fieldName(j));
if (same)
matchingSegmentReaders[i] = segmentReader;
}
}
// Used for bulk-reading raw bytes for stored fields
rawDocLengths = new int[MAX_RAW_MERGE_DOCS];
rawDocLengths2 = new int[MAX_RAW_MERGE_DOCS];
}
/**
*
* @return The number of documents in all of the readers
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
private final int mergeFields() throws CorruptIndexException, IOException {
if (!mergeDocStores) {
// When we are not merging by doc stores, that means
// all segments were written as part of a single
// autoCommit=false IndexWriter session, so their field
// name -> number mapping are the same. So, we start
// with the fieldInfos of the last segment in this
// case, to keep that numbering.
final SegmentReader sr = (SegmentReader) readers.get(readers.size()-1);
fieldInfos = (FieldInfos) sr.fieldInfos.clone();
} else {
fieldInfos = new FieldInfos(); // merge field names
}
for (int i = 0; i < readers.size(); i++) {
IndexReader reader = (IndexReader) readers.get(i);
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
for (int j = 0; j < segmentReader.getFieldInfos().size(); j++) {
FieldInfo fi = segmentReader.getFieldInfos().fieldInfo(j);
fieldInfos.add(fi.name, fi.isIndexed, fi.storeTermVector, fi.storePositionWithTermVector, fi.storeOffsetWithTermVector, !reader.hasNorms(fi.name), fi.storePayloads, fi.omitTf);
}
} else {
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET), true, true, true, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION), true, true, false, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET), true, false, true, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR), true, false, false, false, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.OMIT_TF), false, false, false, false, true);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.STORES_PAYLOADS), false, false, false, true, false);
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.INDEXED), false, false, false, false, false);
fieldInfos.add(reader.getFieldNames(IndexReader.FieldOption.UNINDEXED), false);
}
}
fieldInfos.write(directory, segment + ".fnm");
int docCount = 0;
setMatchingSegmentReaders();
if (mergeDocStores) {
// for merging we don't want to compress/uncompress the data, so to tell the FieldsReader that we're
// in merge mode, we use this FieldSelector
FieldSelector fieldSelectorMerge = new FieldSelector() {
public FieldSelectorResult accept(String fieldName) {
return FieldSelectorResult.LOAD_FOR_MERGE;
}
};
// merge field values
final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, fieldInfos);
try {
for (int i = 0; i < readers.size(); i++) {
final IndexReader reader = (IndexReader) readers.get(i);
final SegmentReader matchingSegmentReader = matchingSegmentReaders[i];
final FieldsReader matchingFieldsReader;
final boolean hasMatchingReader;
if (matchingSegmentReader != null) {
final FieldsReader fieldsReader = matchingSegmentReader.getFieldsReader();
if (fieldsReader != null && !fieldsReader.canReadRawDocs()) {
matchingFieldsReader = null;
hasMatchingReader = false;
} else {
matchingFieldsReader = fieldsReader;
hasMatchingReader = true;
}
} else {
hasMatchingReader = false;
matchingFieldsReader = null;
}
final int maxDoc = reader.maxDoc();
final boolean hasDeletions = reader.hasDeletions();
for (int j = 0; j < maxDoc;) {
if (!hasDeletions || !reader.isDeleted(j)) { // skip deleted docs
if (hasMatchingReader) {
// We can optimize this case (doing a bulk
// byte copy) since the field numbers are
// identical
int start = j;
int numDocs = 0;
do {
j++;
numDocs++;
if (j >= maxDoc)
break;
if (hasDeletions && matchingSegmentReader.isDeleted(j)) {
j++;
break;
}
} while(numDocs < MAX_RAW_MERGE_DOCS);
IndexInput stream = matchingFieldsReader.rawDocs(rawDocLengths, start, numDocs);
fieldsWriter.addRawDocuments(stream, rawDocLengths, numDocs);
docCount += numDocs;
if (checkAbort != null)
checkAbort.work(300*numDocs);
} else {
// NOTE: it's very important to first assign
// to doc then pass it to
// termVectorsWriter.addAllDocVectors; see
// LUCENE-1282
Document doc = reader.document(j, fieldSelectorMerge);
fieldsWriter.addDocument(doc);
j++;
docCount++;
if (checkAbort != null)
checkAbort.work(300);
}
} else
j++;
}
}
} finally {
fieldsWriter.close();
}
final long fdxFileLength = directory.fileLength(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION);
if (4+docCount*8 != fdxFileLength)
// This is most likely a bug in Sun JRE 1.6.0_04/_05;
// we detect that the bug has struck, here, and
// throw an exception to prevent the corruption from
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("mergeFields produced an invalid result: docCount is " + docCount + " but fdx file size is " + fdxFileLength + "; now aborting this merge to prevent index corruption");
} else
// If we are skipping the doc stores, that means there
// are no deletions in any of these segments, so we
// just sum numDocs() of each segment to get total docCount
for (int i = 0; i < readers.size(); i++)
docCount += ((IndexReader) readers.get(i)).numDocs();
return docCount;
}
/**
* Merge the TermVectors from each of the segments into the new one.
* @throws IOException
*/
private final void mergeVectors() throws IOException {
TermVectorsWriter termVectorsWriter =
new TermVectorsWriter(directory, segment, fieldInfos);
try {
for (int r = 0; r < readers.size(); r++) {
final SegmentReader matchingSegmentReader = matchingSegmentReaders[r];
TermVectorsReader matchingVectorsReader;
final boolean hasMatchingReader;
if (matchingSegmentReader != null) {
matchingVectorsReader = matchingSegmentReader.termVectorsReaderOrig;
// If the TV* files are an older format then they
// cannot read raw docs:
if (matchingVectorsReader != null && !matchingVectorsReader.canReadRawDocs()) {
matchingVectorsReader = null;
hasMatchingReader = false;
} else
hasMatchingReader = matchingVectorsReader != null;
} else {
hasMatchingReader = false;
matchingVectorsReader = null;
}
IndexReader reader = (IndexReader) readers.get(r);
final boolean hasDeletions = reader.hasDeletions();
int maxDoc = reader.maxDoc();
for (int docNum = 0; docNum < maxDoc;) {
// skip deleted docs
if (!hasDeletions || !reader.isDeleted(docNum)) {
if (hasMatchingReader) {
// We can optimize this case (doing a bulk
// byte copy) since the field numbers are
// identical
int start = docNum;
int numDocs = 0;
do {
docNum++;
numDocs++;
if (docNum >= maxDoc)
break;
if (hasDeletions && matchingSegmentReader.isDeleted(docNum)) {
docNum++;
break;
}
} while(numDocs < MAX_RAW_MERGE_DOCS);
matchingVectorsReader.rawDocs(rawDocLengths, rawDocLengths2, start, numDocs);
termVectorsWriter.addRawDocuments(matchingVectorsReader, rawDocLengths, rawDocLengths2, numDocs);
if (checkAbort != null)
checkAbort.work(300*numDocs);
} else {
// NOTE: it's very important to first assign
// to vectors then pass it to
// termVectorsWriter.addAllDocVectors; see
// LUCENE-1282
TermFreqVector[] vectors = reader.getTermFreqVectors(docNum);
termVectorsWriter.addAllDocVectors(vectors);
docNum++;
if (checkAbort != null)
checkAbort.work(300);
}
} else
docNum++;
}
}
} finally {
termVectorsWriter.close();
}
final long tvxSize = directory.fileLength(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
if (4+mergedDocs*16 != tvxSize)
// This is most likely a bug in Sun JRE 1.6.0_04/_05;
// we detect that the bug has struck, here, and
// throw an exception to prevent the corruption from
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("mergeVectors produced an invalid result: mergedDocs is " + mergedDocs + " but tvx size is " + tvxSize + "; now aborting this merge to prevent index corruption");
}
private SegmentMergeQueue queue = null;
private final void mergeTerms() throws CorruptIndexException, IOException {
SegmentWriteState state = new SegmentWriteState(null, directory, segment, null, mergedDocs, 0, termIndexInterval);
final FormatPostingsFieldsConsumer consumer = new FormatPostingsFieldsWriter(state, fieldInfos);
try {
queue = new SegmentMergeQueue(readers.size());
mergeTermInfos(consumer);
} finally {
consumer.finish();
if (queue != null) queue.close();
}
}
boolean omitTF;
private final void mergeTermInfos(final FormatPostingsFieldsConsumer consumer) throws CorruptIndexException, IOException {
int base = 0;
final int readerCount = readers.size();
for (int i = 0; i < readerCount; i++) {
IndexReader reader = (IndexReader) readers.get(i);
TermEnum termEnum = reader.terms();
SegmentMergeInfo smi = new SegmentMergeInfo(base, termEnum, reader);
int[] docMap = smi.getDocMap();
if (docMap != null) {
if (docMaps == null) {
docMaps = new int[readerCount][];
delCounts = new int[readerCount];
}
docMaps[i] = docMap;
delCounts[i] = smi.reader.maxDoc() - smi.reader.numDocs();
}
base += reader.numDocs();
if (smi.next())
queue.put(smi); // initialize queue
else
smi.close();
}
SegmentMergeInfo[] match = new SegmentMergeInfo[readers.size()];
String currentField = null;
FormatPostingsTermsConsumer termsConsumer = null;
while (queue.size() > 0) {
int matchSize = 0; // pop matching terms
match[matchSize++] = (SegmentMergeInfo) queue.pop();
Term term = match[0].term;
SegmentMergeInfo top = (SegmentMergeInfo) queue.top();
while (top != null && term.compareTo(top.term) == 0) {
match[matchSize++] = (SegmentMergeInfo) queue.pop();
top = (SegmentMergeInfo) queue.top();
}
if (currentField != term.field) {
currentField = term.field;
if (termsConsumer != null)
termsConsumer.finish();
final FieldInfo fieldInfo = fieldInfos.fieldInfo(currentField);
termsConsumer = consumer.addField(fieldInfo);
omitTF = fieldInfo.omitTf;
}
int df = appendPostings(termsConsumer, match, matchSize); // add new TermInfo
if (checkAbort != null)
checkAbort.work(df/3.0);
while (matchSize > 0) {
SegmentMergeInfo smi = match[--matchSize];
if (smi.next())
queue.put(smi); // restore queue
else
smi.close(); // done with a segment
}
}
}
private byte[] payloadBuffer;
private int[][] docMaps;
int[][] getDocMaps() {
return docMaps;
}
private int[] delCounts;
int[] getDelCounts() {
return delCounts;
}
/** Process postings from multiple segments all positioned on the
* same term. Writes out merged entries into freqOutput and
* the proxOutput streams.
*
* @param smis array of segments
* @param n number of cells in the array actually occupied
* @return number of documents across all segments where this term was found
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
private final int appendPostings(final FormatPostingsTermsConsumer termsConsumer, SegmentMergeInfo[] smis, int n)
throws CorruptIndexException, IOException {
final FormatPostingsDocsConsumer docConsumer = termsConsumer.addTerm(smis[0].term.text);
int df = 0;
for (int i = 0; i < n; i++) {
SegmentMergeInfo smi = smis[i];
TermPositions postings = smi.getPositions();
assert postings != null;
int base = smi.base;
int[] docMap = smi.getDocMap();
postings.seek(smi.termEnum);
while (postings.next()) {
df++;
int doc = postings.doc();
if (docMap != null)
doc = docMap[doc]; // map around deletions
doc += base; // convert to merged space
final int freq = postings.freq();
final FormatPostingsPositionsConsumer posConsumer = docConsumer.addDoc(doc, freq);
if (!omitTF) {
for (int j = 0; j < freq; j++) {
final int position = postings.nextPosition();
final int payloadLength = postings.getPayloadLength();
if (payloadLength > 0) {
if (payloadBuffer == null || payloadBuffer.length < payloadLength)
payloadBuffer = new byte[payloadLength];
postings.getPayload(payloadBuffer, 0);
}
posConsumer.addPosition(position, payloadBuffer, 0, payloadLength);
}
posConsumer.finish();
}
}
}
docConsumer.finish();
return df;
}
private void mergeNorms() throws IOException {
byte[] normBuffer = null;
IndexOutput output = null;
try {
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
if (output == null) {
output = directory.createOutput(segment + "." + IndexFileNames.NORMS_EXTENSION);
output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
}
for (int j = 0; j < readers.size(); j++) {
IndexReader reader = (IndexReader) readers.get(j);
int maxDoc = reader.maxDoc();
if (normBuffer == null || normBuffer.length < maxDoc) {
// the buffer is too small for the current segment
normBuffer = new byte[maxDoc];
}
reader.norms(fi.name, normBuffer, 0);
if (!reader.hasDeletions()) {
//optimized case for segments without deleted docs
output.writeBytes(normBuffer, maxDoc);
} else {
// this segment has deleted docs, so we have to
// check for every doc if it is deleted or not
for (int k = 0; k < maxDoc; k++) {
if (!reader.isDeleted(k)) {
output.writeByte(normBuffer[k]);
}
}
}
if (checkAbort != null)
checkAbort.work(maxDoc);
}
}
}
} finally {
if (output != null) {
output.close();
}
}
}
final static class CheckAbort {
private double workCount;
private MergePolicy.OneMerge merge;
private Directory dir;
public CheckAbort(MergePolicy.OneMerge merge, Directory dir) {
this.merge = merge;
this.dir = dir;
}
/**
* Records the fact that roughly units amount of work
* have been done since this method was last called.
* When adding time-consuming code into SegmentMerger,
* you should test different values for units to ensure
* that the time in between calls to merge.checkAborted
* is up to ~ 1 second.
*/
public void work(double units) throws MergePolicy.MergeAbortedException {
workCount += units;
if (workCount >= 10000.0) {
merge.checkAborted(dir);
workCount = 0;
}
}
}
}
|
LUCENE-1519: upcast to long to prevent overflow
git-svn-id: 4c5078813df38efa56971a28e09a55254294f104@734415 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/lucene/index/SegmentMerger.java
|
LUCENE-1519: upcast to long to prevent overflow
|
|
Java
|
apache-2.0
|
6818a6f49ec809eadddaed622b5f31a61cf621fe
| 0
|
superspindel/cgeo,samueltardieu/cgeo,superspindel/cgeo,ThibaultR/cgeo,matej116/cgeo,pstorch/cgeo,S-Bartfast/cgeo,cgeo/cgeo,yummy222/cgeo,rsudev/c-geo-opensource,matej116/cgeo,madankb/cgeo,kumy/cgeo,marco-dev/c-geo-opensource,pstorch/cgeo,Huertix/cgeo,vishwakulkarni/cgeo,schwabe/cgeo,Bananeweizen/cgeo,madankb/cgeo,rsudev/c-geo-opensource,brok85/cgeo,mucek4/cgeo,tobiasge/cgeo,superspindel/cgeo,xiaoyanit/cgeo,SammysHP/cgeo,KublaikhanGeek/cgeo,schwabe/cgeo,cgeo/cgeo,SammysHP/cgeo,kumy/cgeo,vishwakulkarni/cgeo,KublaikhanGeek/cgeo,ThibaultR/cgeo,lewurm/cgeo,auricgoldfinger/cgeo,S-Bartfast/cgeo,xiaoyanit/cgeo,S-Bartfast/cgeo,auricgoldfinger/cgeo,Bananeweizen/cgeo,KublaikhanGeek/cgeo,mucek4/cgeo,schwabe/cgeo,samueltardieu/cgeo,kumy/cgeo,yummy222/cgeo,xiaoyanit/cgeo,mucek4/cgeo,Huertix/cgeo,Bananeweizen/cgeo,yummy222/cgeo,schwabe/cgeo,cgeo/cgeo,tobiasge/cgeo,ThibaultR/cgeo,pstorch/cgeo,cgeo/cgeo,rsudev/c-geo-opensource,Huertix/cgeo,vishwakulkarni/cgeo,marco-dev/c-geo-opensource,brok85/cgeo,tobiasge/cgeo,auricgoldfinger/cgeo,lewurm/cgeo,madankb/cgeo,brok85/cgeo,marco-dev/c-geo-opensource,samueltardieu/cgeo,lewurm/cgeo,matej116/cgeo,SammysHP/cgeo
|
package cgeo.geocaching.connector.gc;
import cgeo.geocaching.cgCache;
import cgeo.geocaching.enumerations.CacheType;
import android.graphics.Bitmap;
/**
* icon decoder for cache icons
*
*/
public abstract class IconDecoder {
public static void parseMapPNG(final cgCache cache, Bitmap bitmap, UTFGridPosition xy, int zoomlevel) {
if (zoomlevel >= 14) {
parseMapPNG14(cache, bitmap, xy);
} else {
parseMapPNG13(cache, bitmap, xy);
}
}
private static final int[] OFFSET_X = new int[] { 0, -1, -1, 0, 1, 1, 1, 0, -1, -2, -2, -2, -2, -1, 0, 1, 2, 2, 2, 2, 2, 1, 0, -1, -2 };
private static final int[] OFFSET_Y = new int[] { 0, 0, 1, 1, 1, 0, -1, -1, -1, -1, 0, 1, 2, 2, 2, 2, 2, 1, 0, -1, -2, -2, -2, -2, -2 };
/**
* The icon decoder walks a spiral around the center pixel position of the cache
* and searches for characteristic colors.
*
* @param cache
* @param bitmap
* @param xy
*/
private static void parseMapPNG13(final cgCache cache, Bitmap bitmap, UTFGridPosition xy) {
final int xCenter = xy.getX() * 4 + 2;
final int yCenter = xy.getY() * 4 + 2;
final int bitmapWidth = bitmap.getWidth();
final int bitmapHeight = bitmap.getHeight();
int countMulti = 0;
int countFound = 0;
for (int i = 0; i < OFFSET_X.length; i++) {
// assert that we are still in the tile
final int x = xCenter + OFFSET_X[i];
if (x < 0 || x >= bitmapWidth) {
continue;
}
final int y = yCenter + OFFSET_Y[i];
if (y < 0 || y >= bitmapHeight) {
continue;
}
int color = bitmap.getPixel(x, y) & 0x00FFFFFF;
// transparent pixels are not interesting
if (color == 0) {
continue;
}
int red = (color & 0xFF0000) >> 16;
int green = (color & 0xFF00) >> 8;
int blue = color & 0xFF;
// these are quite sure, so one pixel is enough for matching
if (green > 0x80 && green > red && green > blue) {
cache.setType(CacheType.TRADITIONAL);
return;
}
if (blue > 0x80 && blue > red && blue > green) {
cache.setType(CacheType.MYSTERY);
return;
}
if (red > 0x90 && blue < 0x10 && green < 0x10) {
cache.setType(CacheType.EVENT);
return;
}
// next two are hard to distinguish, therefore we sample all pixels of the spiral
if (red > 0xFA && green > 0xD0) {
countMulti++;
}
if (red < 0xF3 && red > 0xa0 && green > 0x20 && blue < 0x80) {
countFound++;
}
}
// now check whether we are sure about found/multi
if (countFound > countMulti && countFound >= 2) {
cache.setFound(true);
}
if (countMulti > countFound && countMulti >= 5) {
cache.setType(CacheType.MULTI);
}
}
// Pixel colors in tile
private final static int COLOR_BORDER_GRAY = 0x5F5F5F;
private final static int COLOR_TRADITIONAL = 0x316013;
private final static int COLOR_MYSTERY = 0x243C97;
private final static int COLOR_MULTI = 0xFFDE19;
private final static int COLOR_FOUND = 0xFF0000;
// Offset inside cache icon
private final static int POSX_TRADI = 7;
private final static int POSY_TRADI = -12;
private final static int POSX_MULTI = 5; // for orange 8
private final static int POSY_MULTI = -9; // for orange 10
private final static int POSX_MYSTERY = 5;
private final static int POSY_MYSTERY = -13;
private final static int POSX_FOUND = 9;
private final static int POSY_FOUND = -6;
/**
* For level 14 find the borders of the icons and then use a single pixel and color to match.
*
* @param cache
* @param bitmap
* @param xy
*/
private static void parseMapPNG14(cgCache cache, Bitmap bitmap, UTFGridPosition xy) {
int x = xy.getX() * 4 + 2;
int y = xy.getY() * 4 + 2;
// search for left border
int countX = 0;
while ((bitmap.getPixel(x, y) & 0x00FFFFFF) != COLOR_BORDER_GRAY) {
if (--x < 0 || ++countX > 20) {
return;
}
}
// search for bottom border
int countY = 0;
while ((bitmap.getPixel(x, y) & 0x00FFFFFF) != 0x000000) {
if (++y >= Tile.TILE_SIZE || ++countY > 20) {
return;
}
}
try {
if ((bitmap.getPixel(x + POSX_TRADI, y + POSY_TRADI) & 0x00FFFFFF) == COLOR_TRADITIONAL) {
cache.setType(CacheType.TRADITIONAL);
return;
}
if ((bitmap.getPixel(x + POSX_MYSTERY, y + POSY_MYSTERY) & 0x00FFFFFF) == COLOR_MYSTERY) {
cache.setType(CacheType.MYSTERY);
return;
}
if ((bitmap.getPixel(x + POSX_MULTI, y + POSY_MULTI) & 0x00FFFFFF) == COLOR_MULTI) {
cache.setType(CacheType.MULTI);
return;
}
if ((bitmap.getPixel(x + POSX_FOUND, y + POSY_FOUND) & 0x00FFFFFF) == COLOR_FOUND) {
cache.setFound(true);
}
} catch (IllegalArgumentException e) {
// intentionally left blank
}
}
}
|
main/src/cgeo/geocaching/connector/gc/IconDecoder.java
|
package cgeo.geocaching.connector.gc;
import cgeo.geocaching.cgCache;
import cgeo.geocaching.enumerations.CacheType;
import android.graphics.Bitmap;
/**
* icon decoder for cache icons
*
*/
public abstract class IconDecoder {
public static void parseMapPNG(final cgCache cache, Bitmap bitmap, UTFGridPosition xy, int zoomlevel) {
if (zoomlevel >= 14) {
parseMapPNG14(cache, bitmap, xy);
} else {
parseMapPNG13(cache, bitmap, xy);
}
}
private static final int[] OFFSET_X = new int[] { 0, -1, -1, 0, 1, 1, 1, 0, -1, -2, -2, -2, -2, -1, 0, 1, 2, 2, 2, 2, 2, 1, 0, -1, -2 };
private static final int[] OFFSET_Y = new int[] { 0, 0, 1, 1, 1, 0, -1, -1, -1, -1, 0, 1, 2, 2, 2, 2, 2, 1, 0, -1, -2, -2, -2, -2, -2 };
/**
* The icon decoder walks a spiral around the center pixel position of the cache
* and searches for characteristic colors.
*
* @param cache
* @param bitmap
* @param xy
*/
private static void parseMapPNG13(final cgCache cache, Bitmap bitmap, UTFGridPosition xy) {
final int xCenter = xy.getX() * 4 + 2;
final int yCenter = xy.getY() * 4 + 2;
final int bitmapWidth = bitmap.getWidth();
final int bitmapHeight = bitmap.getHeight();
int countMulti = 0;
int countFound = 0;
for (int i = 0; i < OFFSET_X.length; i++) {
// assert that we are still in the tile
final int x = xCenter + OFFSET_X[i];
if (x < 0 || x >= bitmapWidth) {
continue;
}
final int y = yCenter + OFFSET_Y[i];
if (y < 0 || y >= bitmapHeight) {
continue;
}
int color = bitmap.getPixel(x, y) & 0x00FFFFFF;
// transparent pixels are not interesting
if (color == 0) {
continue;
}
int red = (color & 0xFF0000) >> 16;
int green = (color & 0xFF00) >> 8;
int blue = color & 0xFF;
// these are quite sure, so one pixel is enough for matching
if (green > 0x80 && green > red && green > blue) {
cache.setType(CacheType.TRADITIONAL);
return;
}
if (blue > 0x80 && blue > red && blue > green) {
cache.setType(CacheType.MYSTERY);
return;
}
if (red > 0x90 && blue < 0x10 && green < 0x10) {
cache.setType(CacheType.EVENT);
return;
}
// next two are hard to distinguish, therefore we sample all pixels of the spiral
if (red > 0xFA && green > 0xD0) {
countMulti++;
}
if (red < 0xF3 && red > 0xa0 && green > 0x20 && blue < 0x80) {
countFound++;
}
}
// now check whether we are sure about found/multi
if (countFound > countMulti && countFound >= 2) {
cache.setFound(true);
}
if (countMulti > countFound && countMulti >= 5) {
cache.setType(CacheType.MULTI);
}
}
// Pixel colors in tile
private final static int COLOR_BORDER_GRAY = 0x5F5F5F;
private final static int COLOR_TRADITIONAL = 0x316013;
private final static int COLOR_MYSTERY = 0x243C97;
private final static int COLOR_MULTI = 0xFFDE19;
private final static int COLOR_FOUND = 0xFBEA5D;
// Offset inside cache icon
private final static int POSX_TRADI = 7;
private final static int POSY_TRADI = -12;
private final static int POSX_MULTI = 5; // for orange 8
private final static int POSY_MULTI = -9; // for orange 10
private final static int POSX_MYSTERY = 5;
private final static int POSY_MYSTERY = -13;
private final static int POSX_FOUND = 10;
private final static int POSY_FOUND = -8;
/**
* For level 14 find the borders of the icons and then use a single pixel and color to match.
*
* @param cache
* @param bitmap
* @param xy
*/
private static void parseMapPNG14(cgCache cache, Bitmap bitmap, UTFGridPosition xy) {
int x = xy.getX() * 4 + 2;
int y = xy.getY() * 4 + 2;
// search for left border
int countX = 0;
while ((bitmap.getPixel(x, y) & 0x00FFFFFF) != COLOR_BORDER_GRAY) {
if (--x < 0 || ++countX > 20) {
return;
}
}
// search for bottom border
int countY = 0;
while ((bitmap.getPixel(x, y) & 0x00FFFFFF) != 0x000000) {
if (++y >= Tile.TILE_SIZE || ++countY > 20) {
return;
}
}
try {
if ((bitmap.getPixel(x + POSX_TRADI, y + POSY_TRADI) & 0x00FFFFFF) == COLOR_TRADITIONAL) {
cache.setType(CacheType.TRADITIONAL);
return;
}
if ((bitmap.getPixel(x + POSX_MYSTERY, y + POSY_MYSTERY) & 0x00FFFFFF) == COLOR_MYSTERY) {
cache.setType(CacheType.MYSTERY);
return;
}
if ((bitmap.getPixel(x + POSX_MULTI, y + POSY_MULTI) & 0x00FFFFFF) == COLOR_MULTI) {
cache.setType(CacheType.MULTI);
return;
}
if ((bitmap.getPixel(x + POSX_FOUND, y + POSY_FOUND) & 0x00FFFFFF) == COLOR_FOUND) {
cache.setFound(true);
}
} catch (IllegalArgumentException e) {
// intentionally left blank
}
}
}
|
Fixed parsing of found icon on level 14+
|
main/src/cgeo/geocaching/connector/gc/IconDecoder.java
|
Fixed parsing of found icon on level 14+
|
|
Java
|
apache-2.0
|
4b9b80371daee9cddc3aa909ec0928fe8c85fca7
| 0
|
arunsoman/text-processor,arunsoman/text-processor,arunsoman/text-processor,arunsoman/text-processor,arunsoman/text-processor
|
package com.flytxt.tp.store;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.security.PrivilegedExceptionAction;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.mortbay.log.Log;
import org.springframework.data.hadoop.store.output.OutputStreamWriter;
import org.springframework.data.hadoop.store.strategy.naming.RollingFileNamingStrategy;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.flytxt.tp.marker.Marker;
import lombok.extern.slf4j.Slf4j;
@Component
@Slf4j
public class NeonStore implements Store {
private static OutputStreamWriter writer;
private static FlyMemStore fms;
private static UserGroupInformation ugi = UserGroupInformation.createRemoteUser("root");
private static ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
@SuppressWarnings("resource")
public static void init() throws FileNotFoundException, IOException, InterruptedException {
synchronized (NeonStore.class) {
if (fms == null)
fms = new FlyMemStore();
}
Path path = new Path("/tmp/output");
Configuration config = new Configuration();
// Hadoop configurations go here
config.addResource(new Path("/tmp/hdfs-site.xml"));
config.addResource(new Path("/tmp/core-site.xml"));
RollingFileNamingStrategy fileNamingStrategy = new RollingFileNamingStrategy().createInstance();
writer = new OutputStreamWriter(config, path, null);
writer.setFileNamingStrategy(fileNamingStrategy);
}
@Override
public void save(byte[] data, String fileName, Marker... markers) throws IOException {
try {
fms.write(markers);
} catch (ArrayIndexOutOfBoundsException e) {
rwl.writeLock().lock();
try {
writeToHdfs(fms.read());
} finally {
rwl.writeLock().unlock();
}
save(data, fileName, markers);
}
}
private static void writeToHdfs(byte[] data) throws IOException {
try {
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
rwl.writeLock().lock();
try {
writer.write(data);
} finally {
writer.close();
rwl.writeLock().lock();
}
return null;
}
});
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
public void set(String fileName) {
log.info(fileName);
}
@Override
public String done() throws IOException {
if (writer != null && writer.isRunning()) {
writer.flush();
writer.close();
}
return null;
}
// provided lower priority in hdfs write
@Scheduled(fixedDelay = 500)
public void timer() {
boolean tryLock = rwl.writeLock().tryLock();
if (tryLock) {
try {
writeToHdfs(fms.read());
} catch (IOException e) {
e.printStackTrace();
} finally {
if (tryLock)
rwl.writeLock().unlock();
}
}
}
}
|
store/src/main/java/com/flytxt/tp/store/NeonStore.java
|
package com.flytxt.tp.store;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.security.PrivilegedExceptionAction;
import java.util.concurrent.Semaphore;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.mortbay.log.Log;
import org.springframework.data.hadoop.store.output.OutputStreamWriter;
import org.springframework.data.hadoop.store.strategy.naming.RollingFileNamingStrategy;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.flytxt.tp.marker.Marker;
import lombok.extern.slf4j.Slf4j;
@Component
@Slf4j
public class NeonStore implements Store {
private static OutputStreamWriter writer;
private static FlyMemStore fms ;
private static UserGroupInformation ugi = UserGroupInformation.createRemoteUser("root");
private static ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
@SuppressWarnings("resource")
public static void init() throws FileNotFoundException, IOException, InterruptedException {
synchronized(NeonStore.class){
if (fms == null)
fms = new FlyMemStore();
}
Path path = new Path("/tmp/output");
Configuration config = new Configuration();
// Hadoop configurations go here
config.addResource(new Path("/tmp/hdfs-site.xml"));
config.addResource(new Path("/tmp/core-site.xml"));
RollingFileNamingStrategy fileNamingStrategy = new RollingFileNamingStrategy().createInstance();
writer = new OutputStreamWriter(config, path, null);
writer.setFileNamingStrategy(fileNamingStrategy);
}
@Override
public void save(byte[] data, String fileName, Marker... markers) throws IOException {
try {
fms.write(markers);
} catch (ArrayIndexOutOfBoundsException e) {
rwl.writeLock().lock();
writeToHdfs(fms.read());
rwl.writeLock().unlock();
save( data, fileName, markers) ;
}
}
private static void writeToHdfs(byte[] data) throws IOException {
try {
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
rwl.writeLock().lock();
writer.write(data);
writer.close();
rwl.writeLock().lock();
return null;
}
});
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
public void set(String fileName) {
// TODO Auto-generated method stub
}
@Override
public String done() throws IOException {
// TODO Auto-generated method stub
return null;
}
@Scheduled(fixedDelay = 500)
public void timer() {
try {
rwl.writeLock().lock();
writeToHdfs(fms.read());
rwl.writeLock().unlock();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
Made thread safe
|
store/src/main/java/com/flytxt/tp/store/NeonStore.java
|
Made thread safe
|
|
Java
|
apache-2.0
|
4c9fb78cb06fff1439827bcc2f66e8a1433d1573
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.lang;
import com.intellij.openapi.diagnostic.LoggerRt;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtilRt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.ProtectionDomain;
import java.util.*;
/**
* A class loader that allows for various customizations, e.g. not locking jars or using a special cache to speed up class loading.
* Should be constructed using {@link #build()} method.
*/
public class UrlClassLoader extends ClassLoader {
static final String CLASS_EXTENSION = ".class";
private static final Set<Class<?>> ourParallelCapableLoaders;
static {
//this class is compiled for Java 6 so it's enough to check that it isn't running under Java 6
boolean isAtLeastJava7 = !System.getProperty("java.runtime.version", "unknown").startsWith("1.6.");
boolean ibmJvm = System.getProperty("java.vm.vendor", "unknown").toLowerCase(Locale.ENGLISH).contains("ibm");
boolean capable =
isAtLeastJava7 && !ibmJvm && Boolean.parseBoolean(System.getProperty("use.parallel.class.loading", "true"));
if (capable) {
ourParallelCapableLoaders = Collections.synchronizedSet(new HashSet<Class<?>>());
try {
//todo Patches.USE_REFLECTION_TO_ACCESS_JDK7
Method registerAsParallelCapable = ClassLoader.class.getDeclaredMethod("registerAsParallelCapable");
registerAsParallelCapable.setAccessible(true);
if (Boolean.TRUE.equals(registerAsParallelCapable.invoke(null))) {
ourParallelCapableLoaders.add(UrlClassLoader.class);
}
}
catch (Exception ignored) { }
}
else {
ourParallelCapableLoaders = null;
}
}
protected static void markParallelCapable(Class<? extends UrlClassLoader> loaderClass) {
assert ourParallelCapableLoaders != null;
ourParallelCapableLoaders.add(loaderClass);
}
/**
* Called by the VM to support dynamic additions to the class path
*
* @see java.lang.instrument.Instrumentation#appendToSystemClassLoaderSearch
*/
@SuppressWarnings("unused")
void appendToClassPathForInstrumentation(String jar) {
try {
addURL(new File(jar).toURI().toURL());
} catch(MalformedURLException ignore) {}
}
private static final boolean ourClassPathIndexEnabled = Boolean.parseBoolean(System.getProperty("idea.classpath.index.enabled", "true"));
@NotNull
protected ClassPath getClassPath() {
return myClassPath;
}
/**
* See com.intellij.TestAll#getClassRoots()
*/
@SuppressWarnings("unused")
public List<URL> getBaseUrls() {
return myClassPath.getBaseUrls();
}
public static final class Builder<T extends UrlClassLoader> {
private final Class<T> myLoaderClass;
private List<URL> myURLs = ContainerUtilRt.emptyList();
private Set<URL> myURLsWithProtectionDomain = new HashSet<URL>();
private ClassLoader myParent;
private boolean myLockJars;
private boolean myUseCache;
private boolean myUsePersistentClasspathIndex;
private boolean myAcceptUnescaped;
private boolean myPreload = true;
private boolean myAllowBootstrapResources;
private boolean myErrorOnMissingJar = true;
private boolean myLazyClassloadingCaches;
@Nullable private CachePoolImpl myCachePool;
@Nullable private CachingCondition myCachingCondition;
private Builder(Class<T> loaderClass) {
myLoaderClass = loaderClass;
}
@NotNull
public Builder<T> urls(@NotNull List<URL> urls) { myURLs = urls; return this; }
@NotNull
public Builder<T> urls(@NotNull URL... urls) { myURLs = Arrays.asList(urls); return this; }
@NotNull
public Builder<T> parent(ClassLoader parent) { myParent = parent; return this; }
/**
* @param urls List of URLs that are signed by Sun/Oracle and their signatures must be verified.
*/
@NotNull
public Builder<T> urlsWithProtectionDomain(@NotNull Set<URL> urls) { myURLsWithProtectionDomain = urls; return this; }
/**
* @see #urlsWithProtectionDomain(Set)
*/
@NotNull
public Builder<T> urlsWithProtectionDomain(@NotNull URL... urls) { return urlsWithProtectionDomain(ContainerUtilRt.newHashSet(urls)); }
/**
* ZipFile handles opened in JarLoader will be kept in SoftReference. Depending on OS, the option significantly speeds up classloading
* from libraries. Caveat: for Windows opened handle will lock the file preventing its modification
* Thus, the option is recommended when jars are not modified or process that uses this option is transient
*/
@NotNull
public Builder<T> allowLock() { myLockJars = true; return this; }
@NotNull
public Builder<T> allowLock(boolean lockJars) { myLockJars = lockJars; return this; }
/**
* Build backward index of packages / class or resource names that allows to avoid IO during classloading
*/
@NotNull
public Builder<T> useCache() { myUseCache = true; return this; }
@NotNull
public Builder<T> useCache(boolean useCache) { myUseCache = useCache; return this; }
/**
* FileLoader will save list of files / packages under its root and use this information instead of walking filesystem for
* speedier classloading. Should be used only when the caches could be properly invalidated, e.g. when new file appears under
* FileLoader's root. Currently the flag is used for faster unit test / developed Idea running, because Idea's make (as of 14.1) ensures deletion of
* such information upon appearing new file for output root.
* N.b. Idea make does not ensure deletion of cached information upon deletion of some file under local root but false positives are not a
* logical error since code is prepared for that and disk access is performed upon class / resource loading.
* See also Builder#usePersistentClasspathIndexForLocalClassDirectories.
*/
@NotNull
public Builder<T> usePersistentClasspathIndexForLocalClassDirectories() {
myUsePersistentClasspathIndex = ourClassPathIndexEnabled;
return this;
}
/**
* Requests the class loader being built to use cache and, if possible, retrieve and store the cached data from a special cache pool
* that can be shared between several loaders.
* @param pool cache pool
* @param condition a custom policy to provide a possibility to prohibit caching for some URLs.
* @return this instance
*
* @see #createCachePool()
*/
@NotNull
public Builder<T> useCache(@NotNull CachePool pool, @NotNull CachingCondition condition) {
myUseCache = true;
myCachePool = (CachePoolImpl)pool;
myCachingCondition = condition;
return this;
}
@NotNull
public Builder<T> allowUnescaped() { myAcceptUnescaped = true; return this; }
@NotNull
public Builder<T> noPreload() { myPreload = false; return this; }
@NotNull
public Builder<T> allowBootstrapResources() { myAllowBootstrapResources = true; return this; }
@NotNull
public Builder<T> setLogErrorOnMissingJar(boolean log) {myErrorOnMissingJar = log; return this; }
/**
* Package contents information in Jar/File loaders will be lazily retrieved / cached upon classloading.
* Important: this option will result in much smaller initial overhead but for bulk classloading (like complete IDE start) it is less
* efficient (in number of disk / native code accesses / CPU spent) than combination of useCache / usePersistentClasspathIndexForLocalClassDirectories.
*/
@NotNull
public Builder<T> useLazyClassloadingCaches(boolean pleaseBeLazy) { myLazyClassloadingCaches = pleaseBeLazy; return this; }
@NotNull
public T get() {
try {
return myLoaderClass.getDeclaredConstructor(Builder.class).newInstance(this);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
@NotNull
public static Builder<UrlClassLoader> build() {
return build(UrlClassLoader.class);
}
@NotNull
public static <T extends UrlClassLoader> Builder<T> build(Class<T> loaderImplClass) {
return new Builder<T>(loaderImplClass);
}
private final List<URL> myURLs;
private final ClassPath myClassPath;
private final ClassLoadingLocks myClassLoadingLocks;
private final boolean myAllowBootstrapResources;
/** @deprecated use {@link #build()}, left for compatibility with java.system.class.loader setting */
@Deprecated
public UrlClassLoader(@NotNull ClassLoader parent) {
this(build().urls(((URLClassLoader)parent).getURLs()).parent(parent.getParent()).allowLock().useCache()
.usePersistentClasspathIndexForLocalClassDirectories()
.useLazyClassloadingCaches(Boolean.parseBoolean(System.getProperty("idea.lazy.classloading.caches", "false"))));
}
protected UrlClassLoader(@NotNull Builder<? extends UrlClassLoader> builder) {
super(builder.myParent);
myURLs = ContainerUtilRt.map2List(builder.myURLs, new Function<URL, URL>() {
@Override
public URL fun(URL url) {
return internProtocol(url);
}
});
myClassPath = createClassPath(builder);
myAllowBootstrapResources = builder.myAllowBootstrapResources;
myClassLoadingLocks = ourParallelCapableLoaders != null && ourParallelCapableLoaders.contains(getClass()) ? new ClassLoadingLocks() : null;
}
@NotNull
protected final ClassPath createClassPath(@NotNull Builder<? extends UrlClassLoader> builder) {
return new ClassPath(myURLs, builder.myLockJars, builder.myUseCache, builder.myAcceptUnescaped, builder.myPreload,
builder.myUsePersistentClasspathIndex, builder.myCachePool, builder.myCachingCondition,
builder.myErrorOnMissingJar, builder.myLazyClassloadingCaches, builder.myURLsWithProtectionDomain);
}
public static URL internProtocol(@NotNull URL url) {
try {
final String protocol = url.getProtocol();
if ("file".equals(protocol) || "jar".equals(protocol)) {
return new URL(protocol.intern(), url.getHost(), url.getPort(), url.getFile());
}
return url;
}
catch (MalformedURLException e) {
LoggerRt.getInstance(UrlClassLoader.class).error(e);
return null;
}
}
/**
* @deprecated Adding additional urls to classloader at runtime could lead to hard-to-debug errors
* <b>Note:</b> Used via reflection because of classLoaders incompatibility
*/
@SuppressWarnings({"unused", "DeprecatedIsStillUsed"})
@Deprecated
public void addURL(@NotNull URL url) {
getClassPath().addURL(internProtocol(url));
myURLs.add(url);
}
public List<URL> getUrls() {
return Collections.unmodifiableList(myURLs);
}
public boolean hasLoadedClass(String name) {
Class<?> aClass = findLoadedClass(name);
return aClass != null && aClass.getClassLoader() == this;
}
@Override
protected Class findClass(final String name) throws ClassNotFoundException {
Class clazz = _findClass(name);
if (clazz == null) {
throw new ClassNotFoundException(name);
}
return clazz;
}
@Nullable
protected final Class _findClass(@NotNull String name) {
Resource res = getClassPath().getResource(name.replace('.', '/') + CLASS_EXTENSION);
if (res == null) {
return null;
}
try {
return defineClass(name, res);
}
catch (IOException e) {
return null;
}
}
private Class defineClass(String name, Resource res) throws IOException {
int i = name.lastIndexOf('.');
if (i != -1) {
String pkgName = name.substring(0, i);
// Check if package already loaded.
Package pkg = getPackage(pkgName);
if (pkg == null) {
try {
definePackage(pkgName,
res.getValue(Resource.Attribute.SPEC_TITLE),
res.getValue(Resource.Attribute.SPEC_VERSION),
res.getValue(Resource.Attribute.SPEC_VENDOR),
res.getValue(Resource.Attribute.IMPL_TITLE),
res.getValue(Resource.Attribute.IMPL_VERSION),
res.getValue(Resource.Attribute.IMPL_VENDOR),
null);
}
catch (IllegalArgumentException e) {
// do nothing, package already defined by some other thread
}
}
}
byte[] b = res.getBytes();
ProtectionDomain protectionDomain = res.getProtectionDomain();
if (protectionDomain != null) {
return _defineClass(name, b, protectionDomain);
}
else {
return _defineClass(name, b);
}
}
protected Class _defineClass(final String name, final byte[] b) {
return defineClass(name, b, 0, b.length);
}
protected Class _defineClass(final String name, final byte[] b, @Nullable ProtectionDomain protectionDomain) {
return defineClass(name, b, 0, b.length, protectionDomain);
}
private static final ThreadLocal<Boolean> ourSkipFindingResource = new ThreadLocal<Boolean>();
@Override
public URL findResource(String name) {
if (ourSkipFindingResource.get() != null) return null;
Resource res = findResourceImpl(name);
return res != null ? res.getURL() : null;
}
@Nullable
private Resource findResourceImpl(String name) {
String n = FileUtilRt.toCanonicalPath(name, '/', false);
Resource resource = getClassPath().getResource(n);
if (resource == null && n.startsWith("/")) { // compatibility with existing code, non-standard classloader behavior
resource = getClassPath().getResource(n.substring(1));
}
return resource;
}
@Nullable
@Override
public InputStream getResourceAsStream(String name) {
if (myAllowBootstrapResources) {
ourSkipFindingResource.set(Boolean.TRUE);
try {
InputStream stream = super.getResourceAsStream(name);
if (stream != null) return stream;
} finally {
ourSkipFindingResource.set(null);
}
}
try {
Resource res = findResourceImpl(name);
return res != null ? res.getInputStream() : null;
}
catch (IOException e) {
return null;
}
}
@Override
protected Enumeration<URL> findResources(String name) throws IOException {
return getClassPath().getResources(name);
}
// called by a parent class on Java 7+
@SuppressWarnings("unused")
@NotNull
protected Object getClassLoadingLock(String className) {
//noinspection RedundantStringConstructorCall
return myClassLoadingLocks != null ? myClassLoadingLocks.getOrCreateLock(className) : this;
}
/**
* An interface for a pool to store internal class loader caches, that can be shared between several different class loaders,
* if they contain the same URLs in their class paths.<p/>
*
* The implementation is subject to change so one shouldn't rely on it.
*
* @see #createCachePool()
* @see Builder#useCache(CachePool, CachingCondition)
*/
public interface CachePool { }
/**
* A condition to customize the caching policy when using {@link CachePool}. This might be needed when a class loader is used on a directory
* that's being written into, to avoid the situation when a resource path is cached as nonexistent but then a file actually appears there,
* and other class loaders with the same caching pool should have access to these new resources. This can happen during compilation process
* with several module outputs.
*/
public interface CachingCondition {
/**
* @return whether the internal information should be cached for files in a specific classpath component URL: inside the directory or
* a jar.
*/
boolean shouldCacheData(@NotNull URL url);
}
/**
* @return a new pool to be able to share internal class loader caches between several different class loaders, if they contain the same URLs
* in their class paths.
*/
@NotNull
public static CachePool createCachePool() {
return new CachePoolImpl();
}
}
|
platform/util-class-loader/src/com/intellij/util/lang/UrlClassLoader.java
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.lang;
import com.intellij.openapi.diagnostic.LoggerRt;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtilRt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.ProtectionDomain;
import java.util.*;
/**
* A class loader that allows for various customizations, e.g. not locking jars or using a special cache to speed up class loading.
* Should be constructed using {@link #build()} method.
*/
public class UrlClassLoader extends ClassLoader {
static final String CLASS_EXTENSION = ".class";
private static final Set<Class<?>> ourParallelCapableLoaders;
static {
//this class is compiled for Java 6 so it's enough to check that it isn't running under Java 6
boolean isAtLeastJava7 = !System.getProperty("java.runtime.version", "unknown").startsWith("1.6.");
boolean ibmJvm = System.getProperty("java.vm.vendor", "unknown").toLowerCase(Locale.ENGLISH).contains("ibm");
boolean capable =
isAtLeastJava7 && !ibmJvm && Boolean.parseBoolean(System.getProperty("use.parallel.class.loading", "true"));
if (capable) {
ourParallelCapableLoaders = Collections.synchronizedSet(new HashSet<Class<?>>());
try {
//todo Patches.USE_REFLECTION_TO_ACCESS_JDK7
Method registerAsParallelCapable = ClassLoader.class.getDeclaredMethod("registerAsParallelCapable");
registerAsParallelCapable.setAccessible(true);
if (Boolean.TRUE.equals(registerAsParallelCapable.invoke(null))) {
ourParallelCapableLoaders.add(UrlClassLoader.class);
}
}
catch (Exception ignored) { }
}
else {
ourParallelCapableLoaders = null;
}
}
protected static void markParallelCapable(Class<? extends UrlClassLoader> loaderClass) {
assert ourParallelCapableLoaders != null;
ourParallelCapableLoaders.add(loaderClass);
}
/**
* Called by the VM to support dynamic additions to the class path
*
* @see java.lang.instrument.Instrumentation#appendToSystemClassLoaderSearch
*/
@SuppressWarnings("unused")
void appendToClassPathForInstrumentation(String jar) {
try {
//noinspection deprecation
addURL(new File(jar).toURI().toURL());
} catch(MalformedURLException ignore) {}
}
private static final boolean ourClassPathIndexEnabled = Boolean.parseBoolean(System.getProperty("idea.classpath.index.enabled", "true"));
@NotNull
protected ClassPath getClassPath() {
return myClassPath;
}
/**
* See com.intellij.TestAll#getClassRoots()
*/
@SuppressWarnings("unused")
public List<URL> getBaseUrls() {
return myClassPath.getBaseUrls();
}
public static final class Builder<T extends UrlClassLoader> {
private final Class<T> myLoaderClass;
private List<URL> myURLs = ContainerUtilRt.emptyList();
private Set<URL> myURLsWithProtectionDomain = new HashSet<URL>();
private ClassLoader myParent;
private boolean myLockJars;
private boolean myUseCache;
private boolean myUsePersistentClasspathIndex;
private boolean myAcceptUnescaped;
private boolean myPreload = true;
private boolean myAllowBootstrapResources;
private boolean myErrorOnMissingJar = true;
private boolean myLazyClassloadingCaches;
@Nullable private CachePoolImpl myCachePool;
@Nullable private CachingCondition myCachingCondition;
private Builder(Class<T> loaderClass) {
myLoaderClass = loaderClass;
}
@NotNull
public Builder<T> urls(@NotNull List<URL> urls) { myURLs = urls; return this; }
@NotNull
public Builder<T> urls(@NotNull URL... urls) { myURLs = Arrays.asList(urls); return this; }
@NotNull
public Builder<T> parent(ClassLoader parent) { myParent = parent; return this; }
/**
* @param urls List of URLs that are signed by Sun/Oracle and their signatures must be verified.
*/
@NotNull
public Builder<T> urlsWithProtectionDomain(@NotNull Set<URL> urls) { myURLsWithProtectionDomain = urls; return this; }
/**
* @see #urlsWithProtectionDomain(Set)
*/
@NotNull
public Builder<T> urlsWithProtectionDomain(@NotNull URL... urls) { return urlsWithProtectionDomain(ContainerUtilRt.newHashSet(urls)); }
/**
* ZipFile handles opened in JarLoader will be kept in SoftReference. Depending on OS, the option significantly speeds up classloading
* from libraries. Caveat: for Windows opened handle will lock the file preventing its modification
* Thus, the option is recommended when jars are not modified or process that uses this option is transient
*/
@NotNull
public Builder<T> allowLock() { myLockJars = true; return this; }
@NotNull
public Builder<T> allowLock(boolean lockJars) { myLockJars = lockJars; return this; }
/**
* Build backward index of packages / class or resource names that allows to avoid IO during classloading
*/
@NotNull
public Builder<T> useCache() { myUseCache = true; return this; }
@NotNull
public Builder<T> useCache(boolean useCache) { myUseCache = useCache; return this; }
/**
* FileLoader will save list of files / packages under its root and use this information instead of walking filesystem for
* speedier classloading. Should be used only when the caches could be properly invalidated, e.g. when new file appears under
* FileLoader's root. Currently the flag is used for faster unit test / developed Idea running, because Idea's make (as of 14.1) ensures deletion of
* such information upon appearing new file for output root.
* N.b. Idea make does not ensure deletion of cached information upon deletion of some file under local root but false positives are not a
* logical error since code is prepared for that and disk access is performed upon class / resource loading.
* See also Builder#usePersistentClasspathIndexForLocalClassDirectories.
*/
@NotNull
public Builder<T> usePersistentClasspathIndexForLocalClassDirectories() {
myUsePersistentClasspathIndex = ourClassPathIndexEnabled;
return this;
}
/**
* Requests the class loader being built to use cache and, if possible, retrieve and store the cached data from a special cache pool
* that can be shared between several loaders.
* @param pool cache pool
* @param condition a custom policy to provide a possibility to prohibit caching for some URLs.
* @return this instance
*
* @see #createCachePool()
*/
@NotNull
public Builder<T> useCache(@NotNull CachePool pool, @NotNull CachingCondition condition) {
myUseCache = true;
myCachePool = (CachePoolImpl)pool;
myCachingCondition = condition;
return this;
}
@NotNull
public Builder<T> allowUnescaped() { myAcceptUnescaped = true; return this; }
@NotNull
public Builder<T> noPreload() { myPreload = false; return this; }
@NotNull
public Builder<T> allowBootstrapResources() { myAllowBootstrapResources = true; return this; }
@NotNull
public Builder<T> setLogErrorOnMissingJar(boolean log) {myErrorOnMissingJar = log; return this; }
/**
* Package contents information in Jar/File loaders will be lazily retrieved / cached upon classloading.
* Important: this option will result in much smaller initial overhead but for bulk classloading (like complete IDE start) it is less
* efficient (in number of disk / native code accesses / CPU spent) than combination of useCache / usePersistentClasspathIndexForLocalClassDirectories.
*/
@NotNull
public Builder<T> useLazyClassloadingCaches(boolean pleaseBeLazy) { myLazyClassloadingCaches = pleaseBeLazy; return this; }
@NotNull
public T get() {
try {
return myLoaderClass.getDeclaredConstructor(Builder.class).newInstance(this);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
@NotNull
public static Builder<UrlClassLoader> build() {
return build(UrlClassLoader.class);
}
@NotNull
public static <T extends UrlClassLoader> Builder<T> build(Class<T> loaderImplClass) {
return new Builder<T>(loaderImplClass);
}
private final List<URL> myURLs;
private final ClassPath myClassPath;
private final ClassLoadingLocks myClassLoadingLocks;
private final boolean myAllowBootstrapResources;
/** @deprecated use {@link #build()}, left for compatibility with java.system.class.loader setting */
@Deprecated
public UrlClassLoader(@NotNull ClassLoader parent) {
this(build().urls(((URLClassLoader)parent).getURLs()).parent(parent.getParent()).allowLock().useCache()
.usePersistentClasspathIndexForLocalClassDirectories()
.useLazyClassloadingCaches(Boolean.parseBoolean(System.getProperty("idea.lazy.classloading.caches", "false"))));
}
protected UrlClassLoader(@NotNull Builder<? extends UrlClassLoader> builder) {
super(builder.myParent);
myURLs = ContainerUtilRt.map2List(builder.myURLs, new Function<URL, URL>() {
@Override
public URL fun(URL url) {
return internProtocol(url);
}
});
myClassPath = createClassPath(builder);
myAllowBootstrapResources = builder.myAllowBootstrapResources;
myClassLoadingLocks = ourParallelCapableLoaders != null && ourParallelCapableLoaders.contains(getClass()) ? new ClassLoadingLocks() : null;
}
@NotNull
protected final ClassPath createClassPath(@NotNull Builder<? extends UrlClassLoader> builder) {
return new ClassPath(myURLs, builder.myLockJars, builder.myUseCache, builder.myAcceptUnescaped, builder.myPreload,
builder.myUsePersistentClasspathIndex, builder.myCachePool, builder.myCachingCondition,
builder.myErrorOnMissingJar, builder.myLazyClassloadingCaches, builder.myURLsWithProtectionDomain);
}
public static URL internProtocol(@NotNull URL url) {
try {
final String protocol = url.getProtocol();
if ("file".equals(protocol) || "jar".equals(protocol)) {
return new URL(protocol.intern(), url.getHost(), url.getPort(), url.getFile());
}
return url;
}
catch (MalformedURLException e) {
LoggerRt.getInstance(UrlClassLoader.class).error(e);
return null;
}
}
/**
* @deprecated Adding additional urls to classloader at runtime could lead to hard-to-debug errors
* <b>Note:</b> Used via reflection because of classLoaders incompatibility
*/
@SuppressWarnings({"unused", "DeprecatedIsStillUsed"})
@Deprecated
public void addURL(@NotNull URL url) {
getClassPath().addURL(internProtocol(url));
myURLs.add(url);
}
public List<URL> getUrls() {
return Collections.unmodifiableList(myURLs);
}
public boolean hasLoadedClass(String name) {
Class<?> aClass = findLoadedClass(name);
return aClass != null && aClass.getClassLoader() == this;
}
@Override
protected Class findClass(final String name) throws ClassNotFoundException {
Class clazz = _findClass(name);
if (clazz == null) {
throw new ClassNotFoundException(name);
}
return clazz;
}
@Nullable
protected final Class _findClass(@NotNull String name) {
Resource res = getClassPath().getResource(name.replace('.', '/') + CLASS_EXTENSION);
if (res == null) {
return null;
}
try {
return defineClass(name, res);
}
catch (IOException e) {
return null;
}
}
private Class defineClass(String name, Resource res) throws IOException {
int i = name.lastIndexOf('.');
if (i != -1) {
String pkgName = name.substring(0, i);
// Check if package already loaded.
Package pkg = getPackage(pkgName);
if (pkg == null) {
try {
definePackage(pkgName,
res.getValue(Resource.Attribute.SPEC_TITLE),
res.getValue(Resource.Attribute.SPEC_VERSION),
res.getValue(Resource.Attribute.SPEC_VENDOR),
res.getValue(Resource.Attribute.IMPL_TITLE),
res.getValue(Resource.Attribute.IMPL_VERSION),
res.getValue(Resource.Attribute.IMPL_VENDOR),
null);
}
catch (IllegalArgumentException e) {
// do nothing, package already defined by some other thread
}
}
}
byte[] b = res.getBytes();
ProtectionDomain protectionDomain = res.getProtectionDomain();
if (protectionDomain != null) {
return _defineClass(name, b, protectionDomain);
}
else {
return _defineClass(name, b);
}
}
protected Class _defineClass(final String name, final byte[] b) {
return defineClass(name, b, 0, b.length);
}
protected Class _defineClass(final String name, final byte[] b, @Nullable ProtectionDomain protectionDomain) {
return defineClass(name, b, 0, b.length, protectionDomain);
}
@Override
public URL findResource(String name) {
Resource res = findResourceImpl(name);
return res != null ? res.getURL() : null;
}
@Nullable
private Resource findResourceImpl(String name) {
String n = FileUtilRt.toCanonicalPath(name, '/', false);
Resource resource = getClassPath().getResource(n);
if (resource == null && n.startsWith("/")) { // compatibility with existing code, non-standard classloader behavior
resource = getClassPath().getResource(n.substring(1));
}
return resource;
}
@Nullable
@Override
public InputStream getResourceAsStream(String name) {
if (myAllowBootstrapResources) {
return super.getResourceAsStream(name);
}
try {
Resource res = findResourceImpl(name);
return res != null ? res.getInputStream() : null;
}
catch (IOException e) {
return null;
}
}
@Override
protected Enumeration<URL> findResources(String name) throws IOException {
return getClassPath().getResources(name);
}
// called by a parent class on Java 7+
@SuppressWarnings("unused")
@NotNull
protected Object getClassLoadingLock(String className) {
//noinspection RedundantStringConstructorCall
return myClassLoadingLocks != null ? myClassLoadingLocks.getOrCreateLock(className) : this;
}
/**
* An interface for a pool to store internal class loader caches, that can be shared between several different class loaders,
* if they contain the same URLs in their class paths.<p/>
*
* The implementation is subject to change so one shouldn't rely on it.
*
* @see #createCachePool()
* @see Builder#useCache(CachePool, CachingCondition)
*/
public interface CachePool { }
/**
* A condition to customize the caching policy when using {@link CachePool}. This might be needed when a class loader is used on a directory
* that's being written into, to avoid the situation when a resource path is cached as nonexistent but then a file actually appears there,
* and other class loaders with the same caching pool should have access to these new resources. This can happen during compilation process
* with several module outputs.
*/
public interface CachingCondition {
/**
* @return whether the internal information should be cached for files in a specific classpath component URL: inside the directory or
* a jar.
*/
boolean shouldCacheData(@NotNull URL url);
}
/**
* @return a new pool to be able to share internal class loader caches between several different class loaders, if they contain the same URLs
* in their class paths.
*/
@NotNull
public static CachePool createCachePool() {
return new CachePoolImpl();
}
}
|
Avoid opening InputStream via URL when calling super.getResourceAsStream
When bootstrap resources are supported delegating to super.getResourceAsStream() will cause opening InputStreams from URL. This isn't as efficient as own UrlClassLoader's capability to use existing open ZipFile handles
GitOrigin-RevId: 48fa777911b29196899275119c6ccabbebca9afc
|
platform/util-class-loader/src/com/intellij/util/lang/UrlClassLoader.java
|
Avoid opening InputStream via URL when calling super.getResourceAsStream
|
|
Java
|
apache-2.0
|
f33c85d1533e5c50db45bd636adf4120c5b997bb
| 0
|
robertwb/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,apache/beam,apache/beam,robertwb/incubator-beam,apache/beam,lukecwik/incubator-beam,apache/beam,apache/beam,chamikaramj/beam,robertwb/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,chamikaramj/beam,apache/beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,apache/beam,lukecwik/incubator-beam,apache/beam,lukecwik/incubator-beam,apache/beam,robertwb/incubator-beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,chamikaramj/beam,lukecwik/incubator-beam,chamikaramj/beam,robertwb/incubator-beam
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms;
import com.google.auto.value.AutoValue;
import java.io.Serializable;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.annotation.Nullable;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Experimental.Kind;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.state.State;
import org.apache.beam.sdk.state.StateSpec;
import org.apache.beam.sdk.state.TimeDomain;
import org.apache.beam.sdk.state.Timer;
import org.apache.beam.sdk.state.TimerSpec;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
import org.apache.beam.sdk.transforms.splittabledofn.HasDefaultTracker;
import org.apache.beam.sdk.transforms.splittabledofn.HasDefaultWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.ManualWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
import org.apache.beam.sdk.transforms.splittabledofn.TimestampObservingWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.WatermarkEstimator;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.Row;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.joda.time.Duration;
import org.joda.time.Instant;
/**
* The argument to {@link ParDo} providing the code to use to process elements of the input {@link
* org.apache.beam.sdk.values.PCollection}.
*
* <p>See {@link ParDo} for more explanation, examples of use, and discussion of constraints on
* {@code DoFn}s, including their serializability, lack of access to global shared mutable state,
* requirements for failure tolerance, and benefits of optimization.
*
* <p>{@link DoFn DoFns} can be tested by using {@link TestPipeline}. You can verify their
* functional correctness in a local test using the {@code DirectRunner} as well as running
* integration tests with your production runner of choice. Typically, you can generate the input
* data using {@link Create#of} or other transforms. However, if you need to test the behavior of
* {@link StartBundle} and {@link FinishBundle} with particular bundle boundaries, you can use
* {@link TestStream}.
*
* <p>Implementations must define a method annotated with {@link ProcessElement} that satisfies the
* requirements described there. See the {@link ProcessElement} for details.
*
* <p>Example usage:
*
* <pre><code>
* {@literal PCollection<String>} lines = ... ;
* {@literal PCollection<String>} words =
* {@literal lines.apply(ParDo.of(new DoFn<String, String>())} {
* {@literal @ProcessElement}
* public void processElement({@literal @}Element String element, BoundedWindow window) {
* ...
* }}));
* </code></pre>
*
* @param <InputT> the type of the (main) input elements
* @param <OutputT> the type of the (main) output elements
*/
public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
/** Information accessible while within the {@link StartBundle} method. */
@SuppressWarnings("ClassCanBeStatic") // Converting class to static is an API change.
public abstract class StartBundleContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
}
/** Information accessible while within the {@link FinishBundle} method. */
public abstract class FinishBundleContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
/**
* Adds the given element to the main output {@code PCollection} at the given timestamp in the
* given window.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from the {@link
* FinishBundle} method.
*/
public abstract void output(@Nullable OutputT output, Instant timestamp, BoundedWindow window);
/**
* Adds the given element to the output {@code PCollection} with the given tag at the given
* timestamp in the given window.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from the {@link
* FinishBundle} method.
*/
public abstract <T> void output(
TupleTag<T> tag, T output, Instant timestamp, BoundedWindow window);
}
/**
* Information accessible to all methods in this {@link DoFn} where the context is in some window.
*/
public abstract class WindowedContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
/**
* Adds the given element to the main output {@code PCollection}.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}, the output element will have the same timestamp
* and be in the same windows as the input element passed to the method annotated with
* {@code @ProcessElement}.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element. The output element will
* have a timestamp of negative infinity.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*/
public abstract void output(OutputT output);
/**
* Adds the given element to the main output {@code PCollection}, with the given timestamp.
*
* <p>Once passed to {@code outputWithTimestamp} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}), the timestamp must not be older than the input
* element's timestamp minus {@link DoFn#getAllowedTimestampSkew}. The output element will be in
* the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element except for the
* timestamp.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*/
public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
/**
* Adds the given element to the output {@code PCollection} with the given tag.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p>The caller of {@code ParDo} uses {@link ParDo.SingleOutput#withOutputTags} to specify the
* tags of outputs that it consumes. Non-consumed outputs, e.g., outputs for monitoring purposes
* only, don't necessarily need to be specified.
*
* <p>The output element will have the same timestamp and be in the same windows as the input
* element passed to {@link ProcessElement}).
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element. The output element will
* have a timestamp of negative infinity.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*
* @see ParDo.SingleOutput#withOutputTags
*/
public abstract <T> void output(TupleTag<T> tag, T output);
/**
* Adds the given element to the specified output {@code PCollection}, with the given timestamp.
*
* <p>Once passed to {@code outputWithTimestamp} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}), the timestamp must not be older than the input
* element's timestamp minus {@link DoFn#getAllowedTimestampSkew}. The output element will be in
* the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element except for the
* timestamp.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*
* @see ParDo.SingleOutput#withOutputTags
*/
public abstract <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp);
}
/** Information accessible when running a {@link DoFn.ProcessElement} method. */
public abstract class ProcessContext extends WindowedContext {
/**
* Returns the input element to be processed.
*
* <p>The element will not be changed -- it is safe to cache, etc. without copying.
* Implementation of {@link DoFn.ProcessElement} method should not mutate the element.
*/
public abstract InputT element();
/**
* Returns the value of the side input.
*
* @throws IllegalArgumentException if this is not a side input
* @see ParDo.SingleOutput#withSideInputs
*/
public abstract <T> T sideInput(PCollectionView<T> view);
/**
* Returns the timestamp of the input element.
*
* <p>See {@link Window} for more information.
*/
public abstract Instant timestamp();
/**
* Returns information about the pane within this window into which the input element has been
* assigned.
*
* <p>Generally all data is in a single, uninteresting pane unless custom triggering and/or late
* data has been explicitly requested. See {@link Window} for more information.
*/
public abstract PaneInfo pane();
}
/** Information accessible when running a {@link DoFn.OnTimer} method. */
@Experimental(Kind.TIMERS)
public abstract class OnTimerContext extends WindowedContext {
/** Returns the output timestamp of the current timer. */
public abstract Instant timestamp();
/** Returns the firing timestamp of the current timer. */
public abstract Instant fireTimestamp();
/** Returns the window in which the timer is firing. */
public abstract BoundedWindow window();
/** Returns the time domain of the current timer. */
public abstract TimeDomain timeDomain();
}
public abstract class OnWindowExpirationContext extends WindowedContext {
/** Returns the window in which the window expiration is firing. */
public abstract BoundedWindow window();
}
/**
* Returns the allowed timestamp skew duration, which is the maximum duration that timestamps can
* be shifted backward in {@link WindowedContext#outputWithTimestamp}.
*
* <p>The default value is {@code Duration.ZERO}, in which case timestamps can only be shifted
* forward to future. For infinite skew, return {@code Duration.millis(Long.MAX_VALUE)}.
*
* @deprecated This method permits a {@link DoFn} to emit elements behind the watermark. These
* elements are considered late, and if behind the {@link Window#withAllowedLateness(Duration)
* allowed lateness} of a downstream {@link PCollection} may be silently dropped. See
* https://issues.apache.org/jira/browse/BEAM-644 for details on a replacement.
*/
@Deprecated
public Duration getAllowedTimestampSkew() {
return Duration.ZERO;
}
/////////////////////////////////////////////////////////////////////////////
/**
* Returns a {@link TypeDescriptor} capturing what is known statically about the input type of
* this {@code DoFn} instance's most-derived class.
*
* <p>See {@link #getOutputTypeDescriptor} for more discussion.
*/
public TypeDescriptor<InputT> getInputTypeDescriptor() {
return new TypeDescriptor<InputT>(getClass()) {};
}
/**
* Returns a {@link TypeDescriptor} capturing what is known statically about the output type of
* this {@code DoFn} instance's most-derived class.
*
* <p>In the normal case of a concrete {@code DoFn} subclass with no generic type parameters of
* its own (including anonymous inner classes), this will be a complete non-generic type, which is
* good for choosing a default output {@code Coder<O>} for the output {@code PCollection<O>}.
*/
public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
return new TypeDescriptor<OutputT>(getClass()) {};
}
/** Receives values of the given type. */
public interface OutputReceiver<T> {
void output(T output);
void outputWithTimestamp(T output, Instant timestamp);
}
/** Receives tagged output for a multi-output function. */
public interface MultiOutputReceiver {
/** Returns an {@link OutputReceiver} for the given tag. * */
<T> OutputReceiver<T> get(TupleTag<T> tag);
/**
* Returns a {@link OutputReceiver} for publishing {@link Row} objects to the given tag.
*
* <p>The {@link PCollection} representing this tag must have a schema registered in order to
* call this function.
*/
@Experimental(Kind.SCHEMAS)
<T> OutputReceiver<Row> getRowReceiver(TupleTag<T> tag);
}
/////////////////////////////////////////////////////////////////////////////
/**
* Annotation for declaring and dereferencing state cells.
*
* <p>To declare a state cell, create a field of type {@link StateSpec} annotated with a {@link
* StateId}. To use the cell during processing, add a parameter of the appropriate {@link State}
* subclass to your {@link ProcessElement @ProcessElement} or {@link OnTimer @OnTimer} method, and
* annotate it with {@link StateId}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @StateId("my-state-id")}
* {@literal private final StateSpec<ValueState<MyState>>} myStateSpec =
* StateSpecs.value(new MyStateCoder());
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @StateId("my-state-id") ValueState<MyState> myState}) {
* myState.read();
* myState.write(...);
* }
* }
* </code></pre>
*
* <p>State is subject to the following validity conditions:
*
* <ul>
* <li>Each state ID must be declared at most once.
* <li>Any state referenced in a parameter must be declared with the same state type.
* <li>State declarations must be final.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.STATE)
public @interface StateId {
/** The state ID. */
String value();
}
/////////////////////////////////////////////////////////////////////////////
/**
* Annotation for declaring that a state parameter is always fetched.
*
* <p>A DoFn might not fetch a state value on every element, and for that reason runners may
* choose to defer fetching state until read() is called. Annotating a state argument with this
* parameter provides a hint to the runner that the state is always fetched. This may cause the
* runner to prefetch all the state before calling the processElement or processTimer method,
* improving performance. This is a performance-only hint - it does not change semantics. See the
* following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @StateId("my-state-id")}
* {@literal private final StateSpec<ValueState<MyState>>} myStateSpec =
* StateSpecs.value(new MyStateCoder());
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @AlwaysFetched @StateId("my-state-id") ValueState<MyState> myState}) {
* myState.read();
* myState.write(...);
* }
* }
* </code></pre>
*
* <p>This can only be used on state objects that implement {@link
* org.apache.beam.sdk.state.ReadableState}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.STATE)
public @interface AlwaysFetched {}
/**
* Annotation for declaring and dereferencing timers.
*
* <p>To declare a timer, create a field of type {@link TimerSpec} annotated with a {@link
* TimerId}. To use the cell during processing, add a parameter of the type {@link Timer} to your
* {@link ProcessElement @ProcessElement} or {@link OnTimer @OnTimer} method, and annotate it with
* {@link TimerId}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
* {@literal @TimerId("my-timer-id")}
* private final TimerSpec myTimer = TimerSpecs.timerForDomain(TimeDomain.EVENT_TIME);
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @TimerId("my-timer-id") Timer myTimer}) {
* myTimer.offset(Duration.standardSeconds(...)).setRelative();
* }
*
* {@literal @OnTimer("my-timer-id")}
* public void onMyTimer() {
* ...
* }
* }</code></pre>
*
* <p>Timers are subject to the following validity conditions:
*
* <ul>
* <li>Each timer must have a distinct id.
* <li>Any timer referenced in a parameter must be declared.
* <li>Timer declarations must be final.
* <li>All declared timers must have a corresponding callback annotated with {@link
* OnTimer @OnTimer}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.TIMERS)
public @interface TimerId {
/** The timer ID. */
String value() default "";
}
/** Parameter annotation for the TimerMap for a {@link ProcessElement} method. */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.TIMERS)
public @interface TimerFamily {
/** The TimerMap tag ID. */
String value();
}
/**
* Parameter annotation for dereferencing input element key in {@link
* org.apache.beam.sdk.values.KV} pair.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Key {}
/** Annotation for specifying specific fields that are accessed in a Schema PCollection. */
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.SCHEMAS)
public @interface FieldAccess {
String value();
}
/**
* Annotation for registering a callback for a timer.
*
* <p>See the javadoc for {@link TimerId} for use in a full example.
*
* <p>The method annotated with {@code @OnTimer} may have parameters according to the same logic
* as {@link ProcessElement}, but limited to the {@link BoundedWindow}, {@link State} subclasses,
* and {@link Timer}. State and timer parameters must be annotated with their {@link StateId} and
* {@link TimerId} respectively.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.TIMERS)
public @interface OnTimer {
/** The timer ID. */
String value();
}
/**
* Annotation for registering a callback for a timerFamily.
*
* <p>See the javadoc for {@link TimerFamily} for use in a full example.
*
* <p>The method annotated with {@code @OnTimerFamily} may have parameters according to the same
* logic as {@link ProcessElement}, but limited to the {@link BoundedWindow}, {@link State}
* subclasses, and {@link org.apache.beam.sdk.state.TimerMap}. State and timer parameters must be
* annotated with their {@link StateId} and {@link TimerId} respectively.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.TIMERS)
public @interface OnTimerFamily {
/** The timer ID. */
String value();
}
/**
* Annotation for the method to use for performing actions on window expiration. For example,
* users can use this annotation to write a method that extracts a value saved in a state before
* it gets garbage collected on window expiration.
*
* <p>The method annotated with {@code @OnWindowExpiration} may have parameters according to the
* same logic as {@link OnTimer}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @ProcessElement}
* public void processElement(ProcessContext c) {
* }
*
* {@literal @OnWindowExpiration}
* public void onWindowExpiration() {
* ...
* }
* }</code></pre>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.STATE)
public @interface OnWindowExpiration {}
/**
* Annotation for the method to use to prepare an instance for processing bundles of elements.
*
* <p>This is a good place to initialize transient in-memory resources, such as network
* connections. The resources can then be disposed in {@link Teardown}.
*
* <p>This is <b>not</b> a good place to perform external side-effects that later need cleanup,
* e.g. creating temporary files on distributed filesystems, starting VMs, or initiating data
* export jobs. Such logic must be instead implemented purely via {@link StartBundle}, {@link
* ProcessElement} and {@link FinishBundle} methods, references to the objects requiring cleanup
* must be passed as {@link PCollection} elements, and they must be cleaned up via regular Beam
* transforms, e.g. see the {@link Wait} transform.
*
* <p>The method annotated with this must satisfy the following constraints:
*
* <ul>
* <li>It must have zero arguments.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Setup {}
/**
* Annotation for the method to use to prepare an instance for processing a batch of elements. The
* method annotated with this must satisfy the following constraints:
*
* <ul>
* <li>If one of the parameters is of type {@link DoFn.StartBundleContext}, then it will be
* passed a context object for the current execution.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface StartBundle {}
/**
* Annotation for the method to use for processing elements. A subclass of {@link DoFn} must have
* a method with this annotation.
*
* <p>If any of the arguments is a {@link RestrictionTracker} then see the specifications below
* about splittable {@link DoFn}, otherwise this method must satisfy the following constraints:
*
* <ul>
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed. The argument type must match the input type
* of this DoFn exactly, or both types must have equivalent schemas registered.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting elements to the default output.
* <li>If one of the parameters is of type {@link MultiOutputReceiver}, then it will be passed
* an output receiver for outputting to multiple tagged outputs.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>It must return {@code void}.
* </ul>
*
* <h2>Splittable DoFn's</h2>
*
* <p>A {@link DoFn} is <i>splittable</i> if its {@link ProcessElement} method has a parameter
* whose type is of {@link RestrictionTracker}. This is an advanced feature and an overwhelming
* majority of users will never need to write a splittable {@link DoFn}.
*
* <p>Not all runners support Splittable DoFn. See the <a
* href="https://beam.apache.org/documentation/runners/capability-matrix/">capability matrix</a>.
*
* <p>See <a href="https://s.apache.org/splittable-do-fn">the proposal</a> for an overview of the
* involved concepts (<i>splittable DoFn</i>, <i>restriction</i>, <i>restriction tracker</i>).
*
* <p>A splittable {@link DoFn} must obey the following constraints:
*
* <ul>
* <li>The type of restrictions used by all of these methods must be the same.
* <li>It <i>must</i> define a {@link GetInitialRestriction} method.
* <li>It <i>may</i> define a {@link GetSize} method or ensure that the {@link
* RestrictionTracker} implements {@link RestrictionTracker.HasProgress}. Poor auto-scaling
* of workers and/or splitting may result if size or progress is an inaccurate
* representation of work. See {@link GetSize} and {@link RestrictionTracker.HasProgress}
* for further details.
* <li>It <i>should</i> define a {@link SplitRestriction} method. This method enables runners to
* perform bulk splitting initially allowing for a rapid increase in parallelism. See {@link
* RestrictionTracker#trySplit} for details about splitting when the current element and
* restriction are actively being processed.
* <li>It <i>may</i> define a {@link NewTracker} method returning a subtype of {@code
* RestrictionTracker<R>} where {@code R} is the restriction type returned by {@link
* GetInitialRestriction}. This method is optional only if the restriction type returned by
* {@link GetInitialRestriction} implements {@link HasDefaultTracker}.
* <li>It <i>may</i> define a {@link GetRestrictionCoder} method.
* <li>It <i>may</i> define a {@link GetInitialWatermarkEstimatorState} method. If none is
* defined then the watermark estimator state is of type {@link Void}.
* <li>It <i>may</i> define a {@link GetWatermarkEstimatorStateCoder} method.
* <li>It <i>may</i> define a {@link NewWatermarkEstimator} method returning a subtype of {@code
* WatermarkEstimator<W>} where {@code W} is the watermark estimator state type returned by
* {@link GetInitialWatermarkEstimatorState}. This method is optional only if {@link
* GetInitialWatermarkEstimatorState} has not been defined or {@code W} implements {@link
* HasDefaultWatermarkEstimator}.
* <li>The {@link DoFn} itself <i>may</i> be annotated with {@link BoundedPerElement} or {@link
* UnboundedPerElement}, but not both at the same time. If it's not annotated with either of
* these, it's assumed to be {@link BoundedPerElement} if its {@link ProcessElement} method
* returns {@code void} and {@link UnboundedPerElement} if it returns a {@link
* ProcessContinuation}.
* <li>Timers and state must not be used.
* </ul>
*
* <p>If this DoFn is splittable, this method must satisfy the following constraints:
*
* <ul>
* <li>One of its arguments must be a {@link RestrictionTracker}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed. The argument type must match the input type
* of this DoFn exactly, or both types must have equivalent schemas registered.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is of the type {@link WatermarkEstimator}, then it will be passed
* the watermark estimator.
* <li>If one of its arguments is of the type {@link ManualWatermarkEstimator}, then it will be
* passed a watermark estimator that can be updated manually. This parameter can only be
* supplied if the method annotated with {@link GetInitialWatermarkEstimatorState} returns a
* sub-type of {@link ManualWatermarkEstimator}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting elements to the default output.
* <li>If one of the parameters is of type {@link MultiOutputReceiver}, then it will be passed
* an output receiver for outputting to multiple tagged outputs.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>May return a {@link ProcessContinuation} to indicate whether there is more work to be
* done for the current element, otherwise must return {@code void}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ProcessElement {}
/**
* Parameter annotation for the input element for {@link ProcessElement}, {@link
* GetInitialRestriction}, {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Element {}
/**
* Parameter annotation for the restriction for {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods. Must match the return type used on the method annotated with {@link
* GetInitialRestriction}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Restriction {}
/**
* Parameter annotation for the input element timestamp for {@link ProcessElement}, {@link
* GetInitialRestriction}, {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Timestamp {}
/** Parameter annotation for the SideInput for a {@link ProcessElement} method. */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface SideInput {
/** The SideInput tag ID. */
String value();
}
/**
* <b><i>Experimental - no backwards compatibility guarantees. The exact name or usage of this
* feature may change.</i></b>
*
* <p>Annotation that may be added to a {@link ProcessElement}, {@link OnTimer}, or {@link
* OnWindowExpiration} method to indicate that the runner must ensure that the observable contents
* of the input {@link PCollection} or mutable state must be stable upon retries.
*
* <p>This is important for sinks, which must ensure exactly-once semantics when writing to a
* storage medium outside of your pipeline. A general pattern for a basic sink is to write a
* {@link DoFn} that can perform an idempotent write, and annotate that it requires stable input.
* Combined, these allow the write to be freely retried until success.
*
* <p>An example of an unstable input would be anything computed using nondeterministic logic. In
* Beam, any user-defined function is permitted to be nondeterministic, and any {@link
* PCollection} is permitted to be recomputed in any manner.
*/
@Documented
@Experimental
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface RequiresStableInput {}
/**
* <b><i>Experimental - no backwards compatibility guarantees. The exact name or usage of this
* feature may change.</i></b>
*
* <p>Annotation that may be added to a {@link ProcessElement} method to indicate that the runner
* must ensure that the observable contents of the input {@link PCollection} is sorted by time, in
* ascending order. The time ordering is defined by element's timestamp, ordering of elements with
* equal timestamps is not defined.
*
* <p>Note that this annotation makes sense only for stateful {@code ParDo}s, because outcome of
* stateless functions cannot depend on the ordering.
*
* <p>This annotation respects specified <i>allowedLateness</i> defined in {@link
* WindowingStrategy}. All data is emitted <b>after</b> input watermark passes element's timestamp
* + allowedLateness. Output watermark is hold, so that the emitted data is not emitted as late
* data.
*
* <p>The ordering requirements implies that all data that arrives later than the allowed lateness
* will have to be dropped. This might change in the future with introduction of retractions.
*/
@Documented
@Experimental
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface RequiresTimeSortedInput {}
/**
* Annotation for the method to use to finish processing a batch of elements. The method annotated
* with this must satisfy the following constraints:
*
* <ul>
* <li>If one of the parameters is of type {@link DoFn.FinishBundleContext}, then it will be
* passed a context object for the current execution.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>TODO(BEAM-1287): Add support for an {@link OutputReceiver} and {@link
* MultiOutputReceiver} that can output to a window.
* </ul>
*
* <p>Note that {@link FinishBundle @FinishBundle} is invoked before the runner commits the output
* while {@link BundleFinalizer.Callback bundle finalizer callbacks} are invoked after the runner
* has committed the output of a successful bundle.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface FinishBundle {}
/**
* Annotation for the method to use to clean up this instance before it is discarded. No other
* method will be called after a call to the annotated method is made.
*
* <p>A runner will do its best to call this method on any given instance to prevent leaks of
* transient resources, however, there may be situations where this is impossible (e.g. process
* crash, hardware failure, etc.) or unnecessary (e.g. the pipeline is shutting down and the
* process is about to be killed anyway, so all transient resources will be released automatically
* by the OS). In these cases, the call may not happen. It will also not be retried, because in
* such situations the DoFn instance no longer exists, so there's no instance to retry it on.
*
* <p>Thus, all work that depends on input elements, and all externally important side effects,
* must be performed in the {@link ProcessElement} or {@link FinishBundle} methods.
*
* <p>Example things that are a good idea to do in this method:
*
* <ul>
* <li>Close a network connection that was opened in {@link Setup}
* <li>Shut down a helper process that was started in {@link Setup}
* </ul>
*
* <p>Example things that MUST NOT be done in this method:
*
* <ul>
* <li>Flushing a batch of buffered records to a database: this must be done in {@link
* FinishBundle}.
* <li>Deleting temporary files on a distributed filesystem: this must be done using the
* pipeline structure, e.g. using the {@link Wait} transform.
* </ul>
*
* <p>The method annotated with this must satisfy the following constraint:
*
* <ul>
* <li>It must have zero arguments.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Teardown {}
/**
* Annotation for the method that maps an element to an initial restriction for a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code RestrictionT getInitialRestriction(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type {@code RestrictionT} defines the restriction type used within this
* splittable DoFn. All other methods that use a {@link Restriction @Restriction} parameter
* must use the same type that is used here. It is suggested to use as narrow of a return
* type definition as possible (for example prefer to use a square type over a shape type as
* a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetInitialRestriction {}
/**
* Annotation for the method that returns the corresponding size for an element and restriction
* pair.
*
* <p>Signature: {@code double getSize(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a {@link RestrictionTracker}, then it will be passed a tracker
* that is initialized for the current {@link Restriction}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*
* <p>Returns a double representing the size of the current element and restriction.
*
* <p>Splittable {@link DoFn}s should only provide this method if the default {@link
* RestrictionTracker.HasProgress} implementation within the {@link RestrictionTracker} is an
* inaccurate representation of known work.
*
* <p>It is up to each splittable {@DoFn} to convert between their natural representation of
* outstanding work and this representation. For example:
*
* <ul>
* <li>Block based file source (e.g. Avro): The number of bytes that will be read from the file.
* <li>Pull based queue based source (e.g. Pubsub): The local/global size available in number of
* messages or number of {@code message bytes} that have not been processed.
* <li>Key range based source (e.g. Shuffle, Bigtable, ...): Typically {@code 1.0} unless
* additional details such as the number of bytes for keys and values is known for the key
* range.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetSize {}
/**
* Annotation for the method that returns the coder to use for the restriction of a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>If not defined, a coder will be inferred using standard coder inference rules and the
* pipeline's {@link Pipeline#getCoderRegistry coder registry}.
*
* <p>This method will be called only at pipeline construction time.
*
* <p>Signature: {@code Coder<RestrictionT> getRestrictionCoder();}
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetRestrictionCoder {}
/**
* Annotation for the method that splits restriction of a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn} into multiple parts to
* be processed in parallel.
*
* <p>This method is used to perform bulk splitting while a restriction is not actively being
* processed while {@link RestrictionTracker#trySplit} is used to perform splitting during
* processing.
*
* <p>Signature: {@code void splitRestriction(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>If one of the arguments is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting the splits. All splits must be output through this
* parameter.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a {@link RestrictionTracker}, then it will be passed a tracker
* that is initialized for the current {@link Restriction}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface SplitRestriction {}
/**
* Annotation for the method that creates a new {@link RestrictionTracker} for the restriction of
* a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code MyRestrictionTracker newTracker(<optional arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type must be a subtype of {@code RestrictionTracker<RestrictionT, PositionT>}.
* It is suggested to use as narrow of a return type definition as possible (for example
* prefer to use a square type over a shape type as a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface NewTracker {}
/**
* Annotation for the method that maps an element and restriction to initial watermark estimator
* state for a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code WatermarkEstimatorStateT getInitialWatermarkState(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type {@code WatermarkEstimatorStateT} defines the watermark state type used
* within this splittable DoFn. All other methods that use a {@link
* WatermarkEstimatorState @WatermarkEstimatorState} parameter must use the same type that
* is used here. It is suggested to use as narrow of a return type definition as possible
* (for example prefer to use a square type over a shape type as a square is a type of a
* shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetInitialWatermarkEstimatorState {}
/**
* Annotation for the method that returns the coder to use for the watermark estimator state of a
* <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>If not defined, a coder will be inferred using standard coder inference rules and the
* pipeline's {@link Pipeline#getCoderRegistry coder registry}.
*
* <p>This method will be called only at pipeline construction time.
*
* <p>Signature: {@code Coder<WatermarkEstimatorStateT> getWatermarkEstimatorStateCoder();}
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetWatermarkEstimatorStateCoder {}
/**
* Annotation for the method that creates a new {@link WatermarkEstimator} for the watermark state
* of a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code MyWatermarkEstimator newWatermarkEstimator(<optional arguments>);}
*
* <p>If the return type is a subtype of {@link TimestampObservingWatermarkEstimator} then the
* timestamp of each element output from this DoFn is provided to the watermark estimator.
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type must be a subtype of {@code
* WatermarkEstimator<WatermarkEstimatorStateT>}. It is suggested to use as narrow of a
* return type definition as possible (for example prefer to use a square type over a shape
* type as a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link WatermarkEstimatorState} annotation,
* then it will be passed the current watermark estimator state; the argument must be of
* type {@code WatermarkEstimatorStateT}.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface NewWatermarkEstimator {}
/**
* Parameter annotation for the watermark estimator state for the {@link NewWatermarkEstimator}
* method. Must match the return type on the method annotated with {@link
* GetInitialWatermarkEstimatorState}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface WatermarkEstimatorState {}
/**
* Annotation on a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}
* specifying that the {@link DoFn} performs a bounded amount of work per input element, so
* applying it to a bounded {@link PCollection} will produce also a bounded {@link PCollection}.
* It is an error to specify this on a non-splittable {@link DoFn}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface BoundedPerElement {}
/**
* Annotation on a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}
* specifying that the {@link DoFn} performs an unbounded amount of work per input element, so
* applying it to a bounded {@link PCollection} will produce an unbounded {@link PCollection}. It
* is an error to specify this on a non-splittable {@link DoFn}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface UnboundedPerElement {}
// This can't be put into ProcessContinuation itself due to the following problem:
// http://ternarysearch.blogspot.com/2013/07/static-initialization-deadlock.html
private static final ProcessContinuation PROCESS_CONTINUATION_STOP =
new AutoValue_DoFn_ProcessContinuation(false, Duration.ZERO);
/**
* When used as a return value of {@link ProcessElement}, indicates whether there is more work to
* be done for the current element.
*
* <p>If the {@link ProcessElement} call completes because of a failed {@code tryClaim()} call on
* the {@link RestrictionTracker}, then the call MUST return {@link #stop()}.
*/
@Experimental(Kind.SPLITTABLE_DO_FN)
@AutoValue
public abstract static class ProcessContinuation {
/** Indicates that there is no more work to be done for the current element. */
public static ProcessContinuation stop() {
return PROCESS_CONTINUATION_STOP;
}
/** Indicates that there is more work to be done for the current element. */
public static ProcessContinuation resume() {
return new AutoValue_DoFn_ProcessContinuation(true, Duration.ZERO);
}
/**
* If false, the {@link DoFn} promises that there is no more work remaining for the current
* element, so the runner should not resume the {@link ProcessElement} call.
*/
public abstract boolean shouldResume();
/**
* A minimum duration that should elapse between the end of this {@link ProcessElement} call and
* the {@link ProcessElement} call continuing processing of the same element. By default, zero.
*/
public abstract Duration resumeDelay();
/** Builder method to set the value of {@link #resumeDelay()}. */
public ProcessContinuation withResumeDelay(Duration resumeDelay) {
return new AutoValue_DoFn_ProcessContinuation(shouldResume(), resumeDelay);
}
}
/**
* Finalize the {@link DoFn} construction to prepare for processing. This method should be called
* by runners before any processing methods.
*
* @deprecated use {@link Setup} or {@link StartBundle} instead. This method will be removed in a
* future release.
*/
@Deprecated
public final void prepareForProcessing() {}
/**
* {@inheritDoc}
*
* <p>By default, does not register any display data. Implementors may override this method to
* provide their own display data.
*/
@Override
public void populateDisplayData(DisplayData.Builder builder) {}
/**
* A parameter that is accessible during {@link StartBundle @StartBundle}, {@link
* ProcessElement @ProcessElement} and {@link FinishBundle @FinishBundle} that allows the caller
* to register a callback that will be invoked after the bundle has been successfully completed
* and the runner has commit the output.
*
* <p>A common usage would be to perform any acknowledgements required by an external system such
* as acking messages from a message queue since this callback is only invoked after the output of
* the bundle has been durably persisted by the runner.
*
* <p>Note that a runner may make the output of the bundle available immediately to downstream
* consumers without waiting for finalization to succeed. For pipelines that are sensitive to
* duplicate messages, they must perform output deduplication in the pipeline.
*/
@Experimental(Kind.PORTABILITY)
public interface BundleFinalizer {
/**
* The provided function will be called after the runner successfully commits the output of a
* successful bundle. Throwing during finalization represents that bundle finalization may have
* failed and the runner may choose to attempt finalization again. The provided {@code
* callbackExpiry} controls how long the finalization is valid for before it is garbage
* collected and no longer able to be invoked.
*
* <p>Note that finalization is best effort and it is expected that the external system will
* self recover state if finalization never happens or consistently fails. For example, a queue
* based system that requires message acknowledgement would replay messages if that
* acknowledgement was never received within the provided time bound.
*
* <p>See <a href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API:
* How to Finalize Bundles</a> for further details.
*
* @param callbackExpiry When the finalization callback expires. If the runner cannot commit
* results and execute the callback within this duration, the callback will not be invoked.
* @param callback The finalization callback method for the runner to invoke after processing
* results have been successfully committed.
*/
void afterBundleCommit(Instant callbackExpiry, Callback callback);
/**
* An instance of a function that will be invoked after bundle finalization.
*
* <p>Note that this function should maintain all state necessary outside of a DoFn's context to
* be able to perform bundle finalization and should not rely on mutable state stored within a
* DoFn instance.
*/
@FunctionalInterface
interface Callback {
void onBundleSuccess() throws Exception;
}
}
}
|
sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms;
import com.google.auto.value.AutoValue;
import java.io.Serializable;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.annotation.Nullable;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Experimental.Kind;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.state.State;
import org.apache.beam.sdk.state.StateSpec;
import org.apache.beam.sdk.state.TimeDomain;
import org.apache.beam.sdk.state.Timer;
import org.apache.beam.sdk.state.TimerSpec;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
import org.apache.beam.sdk.transforms.splittabledofn.HasDefaultTracker;
import org.apache.beam.sdk.transforms.splittabledofn.HasDefaultWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.ManualWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
import org.apache.beam.sdk.transforms.splittabledofn.TimestampObservingWatermarkEstimator;
import org.apache.beam.sdk.transforms.splittabledofn.WatermarkEstimator;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.Row;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.joda.time.Duration;
import org.joda.time.Instant;
/**
* The argument to {@link ParDo} providing the code to use to process elements of the input {@link
* org.apache.beam.sdk.values.PCollection}.
*
* <p>See {@link ParDo} for more explanation, examples of use, and discussion of constraints on
* {@code DoFn}s, including their serializability, lack of access to global shared mutable state,
* requirements for failure tolerance, and benefits of optimization.
*
* <p>{@link DoFn DoFns} can be tested by using {@link TestPipeline}. You can verify their
* functional correctness in a local test using the {@code DirectRunner} as well as running
* integration tests with your production runner of choice. Typically, you can generate the input
* data using {@link Create#of} or other transforms. However, if you need to test the behavior of
* {@link StartBundle} and {@link FinishBundle} with particular bundle boundaries, you can use
* {@link TestStream}.
*
* <p>Implementations must define a method annotated with {@link ProcessElement} that satisfies the
* requirements described there. See the {@link ProcessElement} for details.
*
* <p>Example usage:
*
* <pre><code>
* {@literal PCollection<String>} lines = ... ;
* {@literal PCollection<String>} words =
* {@literal lines.apply(ParDo.of(new DoFn<String, String>())} {
* {@literal @ProcessElement}
* public void processElement({@literal @}Element String element, BoundedWindow window) {
* ...
* }}));
* </code></pre>
*
* @param <InputT> the type of the (main) input elements
* @param <OutputT> the type of the (main) output elements
*/
public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
/** Information accessible while within the {@link StartBundle} method. */
@SuppressWarnings("ClassCanBeStatic") // Converting class to static is an API change.
public abstract class StartBundleContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
}
/** Information accessible while within the {@link FinishBundle} method. */
public abstract class FinishBundleContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
/**
* Adds the given element to the main output {@code PCollection} at the given timestamp in the
* given window.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from the {@link
* FinishBundle} method.
*/
public abstract void output(@Nullable OutputT output, Instant timestamp, BoundedWindow window);
/**
* Adds the given element to the output {@code PCollection} with the given tag at the given
* timestamp in the given window.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from the {@link
* FinishBundle} method.
*/
public abstract <T> void output(
TupleTag<T> tag, T output, Instant timestamp, BoundedWindow window);
}
/**
* Information accessible to all methods in this {@link DoFn} where the context is in some window.
*/
public abstract class WindowedContext {
/**
* Returns the {@code PipelineOptions} specified with the {@link
* org.apache.beam.sdk.PipelineRunner} invoking this {@code DoFn}.
*/
public abstract PipelineOptions getPipelineOptions();
/**
* Adds the given element to the main output {@code PCollection}.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}, the output element will have the same timestamp
* and be in the same windows as the input element passed to the method annotated with
* {@code @ProcessElement}.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element. The output element will
* have a timestamp of negative infinity.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*/
public abstract void output(OutputT output);
/**
* Adds the given element to the main output {@code PCollection}, with the given timestamp.
*
* <p>Once passed to {@code outputWithTimestamp} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}), the timestamp must not be older than the input
* element's timestamp minus {@link DoFn#getAllowedTimestampSkew}. The output element will be in
* the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element except for the
* timestamp.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*/
public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
/**
* Adds the given element to the output {@code PCollection} with the given tag.
*
* <p>Once passed to {@code output} the element should not be modified in any way.
*
* <p>The caller of {@code ParDo} uses {@link ParDo.SingleOutput#withOutputTags} to specify the
* tags of outputs that it consumes. Non-consumed outputs, e.g., outputs for monitoring purposes
* only, don't necessarily need to be specified.
*
* <p>The output element will have the same timestamp and be in the same windows as the input
* element passed to {@link ProcessElement}).
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element. The output element will
* have a timestamp of negative infinity.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*
* @see ParDo.SingleOutput#withOutputTags
*/
public abstract <T> void output(TupleTag<T> tag, T output);
/**
* Adds the given element to the specified output {@code PCollection}, with the given timestamp.
*
* <p>Once passed to {@code outputWithTimestamp} the element should not be modified in any way.
*
* <p>If invoked from {@link ProcessElement}), the timestamp must not be older than the input
* element's timestamp minus {@link DoFn#getAllowedTimestampSkew}. The output element will be in
* the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle}, this will attempt to use the
* {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the input {@code PCollection} to
* determine what windows the element should be in, throwing an exception if the {@code
* WindowFn} attempts to access any information about the input element except for the
* timestamp.
*
* <p><i>Note:</i> A splittable {@link DoFn} is not allowed to output from {@link StartBundle}
* or {@link FinishBundle} methods.
*
* @see ParDo.SingleOutput#withOutputTags
*/
public abstract <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp);
}
/** Information accessible when running a {@link DoFn.ProcessElement} method. */
public abstract class ProcessContext extends WindowedContext {
/**
* Returns the input element to be processed.
*
* <p>The element will not be changed -- it is safe to cache, etc. without copying.
* Implementation of {@link DoFn.ProcessElement} method should not mutate the element.
*/
public abstract InputT element();
/**
* Returns the value of the side input.
*
* @throws IllegalArgumentException if this is not a side input
* @see ParDo.SingleOutput#withSideInputs
*/
public abstract <T> T sideInput(PCollectionView<T> view);
/**
* Returns the timestamp of the input element.
*
* <p>See {@link Window} for more information.
*/
public abstract Instant timestamp();
/**
* Returns information about the pane within this window into which the input element has been
* assigned.
*
* <p>Generally all data is in a single, uninteresting pane unless custom triggering and/or late
* data has been explicitly requested. See {@link Window} for more information.
*/
public abstract PaneInfo pane();
}
/** Information accessible when running a {@link DoFn.OnTimer} method. */
@Experimental(Kind.TIMERS)
public abstract class OnTimerContext extends WindowedContext {
/** Returns the output timestamp of the current timer. */
public abstract Instant timestamp();
/** Returns the firing timestamp of the current timer. */
public abstract Instant fireTimestamp();
/** Returns the window in which the timer is firing. */
public abstract BoundedWindow window();
/** Returns the time domain of the current timer. */
public abstract TimeDomain timeDomain();
}
public abstract class OnWindowExpirationContext extends WindowedContext {
/** Returns the window in which the window expiration is firing. */
public abstract BoundedWindow window();
}
/**
* Returns the allowed timestamp skew duration, which is the maximum duration that timestamps can
* be shifted backward in {@link WindowedContext#outputWithTimestamp}.
*
* <p>The default value is {@code Duration.ZERO}, in which case timestamps can only be shifted
* forward to future. For infinite skew, return {@code Duration.millis(Long.MAX_VALUE)}.
*
* @deprecated This method permits a {@link DoFn} to emit elements behind the watermark. These
* elements are considered late, and if behind the {@link Window#withAllowedLateness(Duration)
* allowed lateness} of a downstream {@link PCollection} may be silently dropped. See
* https://issues.apache.org/jira/browse/BEAM-644 for details on a replacement.
*/
@Deprecated
public Duration getAllowedTimestampSkew() {
return Duration.ZERO;
}
/////////////////////////////////////////////////////////////////////////////
/**
* Returns a {@link TypeDescriptor} capturing what is known statically about the input type of
* this {@code DoFn} instance's most-derived class.
*
* <p>See {@link #getOutputTypeDescriptor} for more discussion.
*/
public TypeDescriptor<InputT> getInputTypeDescriptor() {
return new TypeDescriptor<InputT>(getClass()) {};
}
/**
* Returns a {@link TypeDescriptor} capturing what is known statically about the output type of
* this {@code DoFn} instance's most-derived class.
*
* <p>In the normal case of a concrete {@code DoFn} subclass with no generic type parameters of
* its own (including anonymous inner classes), this will be a complete non-generic type, which is
* good for choosing a default output {@code Coder<O>} for the output {@code PCollection<O>}.
*/
public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
return new TypeDescriptor<OutputT>(getClass()) {};
}
/** Receives values of the given type. */
public interface OutputReceiver<T> {
void output(T output);
void outputWithTimestamp(T output, Instant timestamp);
}
/** Receives tagged output for a multi-output function. */
public interface MultiOutputReceiver {
/** Returns an {@link OutputReceiver} for the given tag. * */
<T> OutputReceiver<T> get(TupleTag<T> tag);
/**
* Returns a {@link OutputReceiver} for publishing {@link Row} objects to the given tag.
*
* <p>The {@link PCollection} representing this tag must have a schema registered in order to
* call this function.
*/
@Experimental(Kind.SCHEMAS)
<T> OutputReceiver<Row> getRowReceiver(TupleTag<T> tag);
}
/////////////////////////////////////////////////////////////////////////////
/**
* Annotation for declaring and dereferencing state cells.
*
* <p>To declare a state cell, create a field of type {@link StateSpec} annotated with a {@link
* StateId}. To use the cell during processing, add a parameter of the appropriate {@link State}
* subclass to your {@link ProcessElement @ProcessElement} or {@link OnTimer @OnTimer} method, and
* annotate it with {@link StateId}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @StateId("my-state-id")}
* {@literal private final StateSpec<ValueState<MyState>>} myStateSpec =
* StateSpecs.value(new MyStateCoder());
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @StateId("my-state-id") ValueState<MyState> myState}) {
* myState.read();
* myState.write(...);
* }
* }
* </code></pre>
*
* <p>State is subject to the following validity conditions:
*
* <ul>
* <li>Each state ID must be declared at most once.
* <li>Any state referenced in a parameter must be declared with the same state type.
* <li>State declarations must be final.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.STATE)
public @interface StateId {
/** The state ID. */
String value();
}
/////////////////////////////////////////////////////////////////////////////
/**
* Annotation for declaring that a state parameter is always fetched.
*
* <p>A DoFn might not fetch a state value on every element, and for that reason runners may
* choose to defer fetching state until read() is called. Annotating a state argument with this
* parameter provides a hint to the runner that the state is always fetched. This may cause the
* runner to prefetch all the state before calling the processElement or processTimer method,
* improving performance. This is a performance-only hint - it does not change semantics. See the
* following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @StateId("my-state-id")}
* {@literal private final StateSpec<ValueState<MyState>>} myStateSpec =
* StateSpecs.value(new MyStateCoder());
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @AlwaysFetched @StateId("my-state-id") ValueState<MyState> myState}) {
* myState.read();
* myState.write(...);
* }
* }
* </code></pre>
*
* <p>This can only be used on state objects that implement {@link
* org.apache.beam.sdk.state.ReadableState}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.STATE)
public @interface AlwaysFetched {}
/**
* Annotation for declaring and dereferencing timers.
*
* <p>To declare a timer, create a field of type {@link TimerSpec} annotated with a {@link
* TimerId}. To use the cell during processing, add a parameter of the type {@link Timer} to your
* {@link ProcessElement @ProcessElement} or {@link OnTimer @OnTimer} method, and annotate it with
* {@link TimerId}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
* {@literal @TimerId("my-timer-id")}
* private final TimerSpec myTimer = TimerSpecs.timerForDomain(TimeDomain.EVENT_TIME);
*
* {@literal @ProcessElement}
* public void processElement(
* {@literal @Element InputT element},
* {@literal @TimerId("my-timer-id") Timer myTimer}) {
* myTimer.offset(Duration.standardSeconds(...)).setRelative();
* }
*
* {@literal @OnTimer("my-timer-id")}
* public void onMyTimer() {
* ...
* }
* }</code></pre>
*
* <p>Timers are subject to the following validity conditions:
*
* <ul>
* <li>Each timer must have a distinct id.
* <li>Any timer referenced in a parameter must be declared.
* <li>Timer declarations must be final.
* <li>All declared timers must have a corresponding callback annotated with {@link
* OnTimer @OnTimer}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.TIMERS)
public @interface TimerId {
/** The timer ID. */
String value() default "";
}
/** Parameter annotation for the TimerMap for a {@link ProcessElement} method. */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.TIMERS)
public @interface TimerFamily {
/** The TimerMap tag ID. */
String value();
}
/**
* Parameter annotation for dereferencing input element key in {@link
* org.apache.beam.sdk.values.KV} pair.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Key {}
/** Annotation for specifying specific fields that are accessed in a Schema PCollection. */
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER})
@Experimental(Kind.SCHEMAS)
public @interface FieldAccess {
String value();
}
/**
* Annotation for registering a callback for a timer.
*
* <p>See the javadoc for {@link TimerId} for use in a full example.
*
* <p>The method annotated with {@code @OnTimer} may have parameters according to the same logic
* as {@link ProcessElement}, but limited to the {@link BoundedWindow}, {@link State} subclasses,
* and {@link Timer}. State and timer parameters must be annotated with their {@link StateId} and
* {@link TimerId} respectively.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.TIMERS)
public @interface OnTimer {
/** The timer ID. */
String value();
}
/**
* Annotation for registering a callback for a timerFamily.
*
* <p>See the javadoc for {@link TimerFamily} for use in a full example.
*
* <p>The method annotated with {@code @OnTimerFamily} may have parameters according to the same
* logic as {@link ProcessElement}, but limited to the {@link BoundedWindow}, {@link State}
* subclasses, and {@link org.apache.beam.sdk.state.TimerMap}. State and timer parameters must be
* annotated with their {@link StateId} and {@link TimerId} respectively.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.TIMERS)
public @interface OnTimerFamily {
/** The timer ID. */
String value();
}
/**
* Annotation for the method to use for performing actions on window expiration. For example,
* users can use this annotation to write a method that extracts a value saved in a state before
* it gets garbage collected on window expiration.
*
* <p>The method annotated with {@code @OnWindowExpiration} may have parameters according to the
* same logic as {@link OnTimer}. See the following code for an example:
*
* <pre><code>{@literal new DoFn<KV<Key, Foo>, Baz>()} {
*
* {@literal @ProcessElement}
* public void processElement(ProcessContext c) {
* }
*
* {@literal @OnWindowExpiration}
* public void onWindowExpiration() {
* ...
* }
* }</code></pre>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.STATE)
public @interface OnWindowExpiration {}
/**
* Annotation for the method to use to prepare an instance for processing bundles of elements.
*
* <p>This is a good place to initialize transient in-memory resources, such as network
* connections. The resources can then be disposed in {@link Teardown}.
*
* <p>This is <b>not</b> a good place to perform external side-effects that later need cleanup,
* e.g. creating temporary files on distributed filesystems, starting VMs, or initiating data
* export jobs. Such logic must be instead implemented purely via {@link StartBundle}, {@link
* ProcessElement} and {@link FinishBundle} methods, references to the objects requiring cleanup
* must be passed as {@link PCollection} elements, and they must be cleaned up via regular Beam
* transforms, e.g. see the {@link Wait} transform.
*
* <p>The method annotated with this must satisfy the following constraints:
*
* <ul>
* <li>It must have zero arguments.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Setup {}
/**
* Annotation for the method to use to prepare an instance for processing a batch of elements. The
* method annotated with this must satisfy the following constraints:
*
* <ul>
* <li>If one of the parameters is of type {@link DoFn.StartBundleContext}, then it will be
* passed a context object for the current execution.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface StartBundle {}
/**
* Annotation for the method to use for processing elements. A subclass of {@link DoFn} must have
* a method with this annotation.
*
* <p>If any of the arguments is a {@link RestrictionTracker} then see the specifications below
* about splittable {@link DoFn}, otherwise this method must satisfy the following constraints:
*
* <ul>
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument type must match the input type
* of this DoFn.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting elements to the default output.
* <li>If one of the parameters is of type {@link MultiOutputReceiver}, then it will be passed
* an output receiver for outputting to multiple tagged outputs.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>It must return {@code void}.
* </ul>
*
* <h2>Splittable DoFn's</h2>
*
* <p>A {@link DoFn} is <i>splittable</i> if its {@link ProcessElement} method has a parameter
* whose type is of {@link RestrictionTracker}. This is an advanced feature and an overwhelming
* majority of users will never need to write a splittable {@link DoFn}.
*
* <p>Not all runners support Splittable DoFn. See the <a
* href="https://beam.apache.org/documentation/runners/capability-matrix/">capability matrix</a>.
*
* <p>See <a href="https://s.apache.org/splittable-do-fn">the proposal</a> for an overview of the
* involved concepts (<i>splittable DoFn</i>, <i>restriction</i>, <i>restriction tracker</i>).
*
* <p>A splittable {@link DoFn} must obey the following constraints:
*
* <ul>
* <li>The type of restrictions used by all of these methods must be the same.
* <li>It <i>must</i> define a {@link GetInitialRestriction} method.
* <li>It <i>may</i> define a {@link GetSize} method or ensure that the {@link
* RestrictionTracker} implements {@link RestrictionTracker.HasProgress}. Poor auto-scaling
* of workers and/or splitting may result if size or progress is an inaccurate
* representation of work. See {@link GetSize} and {@link RestrictionTracker.HasProgress}
* for further details.
* <li>It <i>should</i> define a {@link SplitRestriction} method. This method enables runners to
* perform bulk splitting initially allowing for a rapid increase in parallelism. See {@link
* RestrictionTracker#trySplit} for details about splitting when the current element and
* restriction are actively being processed.
* <li>It <i>may</i> define a {@link NewTracker} method returning a subtype of {@code
* RestrictionTracker<R>} where {@code R} is the restriction type returned by {@link
* GetInitialRestriction}. This method is optional only if the restriction type returned by
* {@link GetInitialRestriction} implements {@link HasDefaultTracker}.
* <li>It <i>may</i> define a {@link GetRestrictionCoder} method.
* <li>It <i>may</i> define a {@link GetInitialWatermarkEstimatorState} method. If none is
* defined then the watermark estimator state is of type {@link Void}.
* <li>It <i>may</i> define a {@link GetWatermarkEstimatorStateCoder} method.
* <li>It <i>may</i> define a {@link NewWatermarkEstimator} method returning a subtype of {@code
* WatermarkEstimator<W>} where {@code W} is the watermark estimator state type returned by
* {@link GetInitialWatermarkEstimatorState}. This method is optional only if {@link
* GetInitialWatermarkEstimatorState} has not been defined or {@code W} implements {@link
* HasDefaultWatermarkEstimator}.
* <li>The {@link DoFn} itself <i>may</i> be annotated with {@link BoundedPerElement} or {@link
* UnboundedPerElement}, but not both at the same time. If it's not annotated with either of
* these, it's assumed to be {@link BoundedPerElement} if its {@link ProcessElement} method
* returns {@code void} and {@link UnboundedPerElement} if it returns a {@link
* ProcessContinuation}.
* <li>Timers and state must not be used.
* </ul>
*
* <p>If this DoFn is splittable, this method must satisfy the following constraints:
*
* <ul>
* <li>One of its arguments must be a {@link RestrictionTracker}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument type must match the input type
* of this DoFn.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is of the type {@link WatermarkEstimator}, then it will be passed
* the watermark estimator.
* <li>If one of its arguments is of the type {@link ManualWatermarkEstimator}, then it will be
* passed a watermark estimator that can be updated manually. This parameter can only be
* supplied if the method annotated with {@link GetInitialWatermarkEstimatorState} returns a
* sub-type of {@link ManualWatermarkEstimator}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting elements to the default output.
* <li>If one of the parameters is of type {@link MultiOutputReceiver}, then it will be passed
* an output receiver for outputting to multiple tagged outputs.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>May return a {@link ProcessContinuation} to indicate whether there is more work to be
* done for the current element, otherwise must return {@code void}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ProcessElement {}
/**
* Parameter annotation for the input element for {@link ProcessElement}, {@link
* GetInitialRestriction}, {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Element {}
/**
* Parameter annotation for the restriction for {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods. Must match the return type used on the method annotated with {@link
* GetInitialRestriction}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Restriction {}
/**
* Parameter annotation for the input element timestamp for {@link ProcessElement}, {@link
* GetInitialRestriction}, {@link GetSize}, {@link SplitRestriction}, {@link
* GetInitialWatermarkEstimatorState}, {@link NewWatermarkEstimator}, and {@link NewTracker}
* methods.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface Timestamp {}
/** Parameter annotation for the SideInput for a {@link ProcessElement} method. */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface SideInput {
/** The SideInput tag ID. */
String value();
}
/**
* <b><i>Experimental - no backwards compatibility guarantees. The exact name or usage of this
* feature may change.</i></b>
*
* <p>Annotation that may be added to a {@link ProcessElement}, {@link OnTimer}, or {@link
* OnWindowExpiration} method to indicate that the runner must ensure that the observable contents
* of the input {@link PCollection} or mutable state must be stable upon retries.
*
* <p>This is important for sinks, which must ensure exactly-once semantics when writing to a
* storage medium outside of your pipeline. A general pattern for a basic sink is to write a
* {@link DoFn} that can perform an idempotent write, and annotate that it requires stable input.
* Combined, these allow the write to be freely retried until success.
*
* <p>An example of an unstable input would be anything computed using nondeterministic logic. In
* Beam, any user-defined function is permitted to be nondeterministic, and any {@link
* PCollection} is permitted to be recomputed in any manner.
*/
@Documented
@Experimental
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface RequiresStableInput {}
/**
* <b><i>Experimental - no backwards compatibility guarantees. The exact name or usage of this
* feature may change.</i></b>
*
* <p>Annotation that may be added to a {@link ProcessElement} method to indicate that the runner
* must ensure that the observable contents of the input {@link PCollection} is sorted by time, in
* ascending order. The time ordering is defined by element's timestamp, ordering of elements with
* equal timestamps is not defined.
*
* <p>Note that this annotation makes sense only for stateful {@code ParDo}s, because outcome of
* stateless functions cannot depend on the ordering.
*
* <p>This annotation respects specified <i>allowedLateness</i> defined in {@link
* WindowingStrategy}. All data is emitted <b>after</b> input watermark passes element's timestamp
* + allowedLateness. Output watermark is hold, so that the emitted data is not emitted as late
* data.
*
* <p>The ordering requirements implies that all data that arrives later than the allowed lateness
* will have to be dropped. This might change in the future with introduction of retractions.
*/
@Documented
@Experimental
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface RequiresTimeSortedInput {}
/**
* Annotation for the method to use to finish processing a batch of elements. The method annotated
* with this must satisfy the following constraints:
*
* <ul>
* <li>If one of the parameters is of type {@link DoFn.FinishBundleContext}, then it will be
* passed a context object for the current execution.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* <li>If one of the parameters is of type {@link BundleFinalizer}, then it will be passed a
* mechanism to register a callback that will be invoked after the runner successfully
* commits the output of this bundle. See <a
* href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
* Finalize Bundles</a> for further details.
* <li>TODO(BEAM-1287): Add support for an {@link OutputReceiver} and {@link
* MultiOutputReceiver} that can output to a window.
* </ul>
*
* <p>Note that {@link FinishBundle @FinishBundle} is invoked before the runner commits the output
* while {@link BundleFinalizer.Callback bundle finalizer callbacks} are invoked after the runner
* has committed the output of a successful bundle.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface FinishBundle {}
/**
* Annotation for the method to use to clean up this instance before it is discarded. No other
* method will be called after a call to the annotated method is made.
*
* <p>A runner will do its best to call this method on any given instance to prevent leaks of
* transient resources, however, there may be situations where this is impossible (e.g. process
* crash, hardware failure, etc.) or unnecessary (e.g. the pipeline is shutting down and the
* process is about to be killed anyway, so all transient resources will be released automatically
* by the OS). In these cases, the call may not happen. It will also not be retried, because in
* such situations the DoFn instance no longer exists, so there's no instance to retry it on.
*
* <p>Thus, all work that depends on input elements, and all externally important side effects,
* must be performed in the {@link ProcessElement} or {@link FinishBundle} methods.
*
* <p>Example things that are a good idea to do in this method:
*
* <ul>
* <li>Close a network connection that was opened in {@link Setup}
* <li>Shut down a helper process that was started in {@link Setup}
* </ul>
*
* <p>Example things that MUST NOT be done in this method:
*
* <ul>
* <li>Flushing a batch of buffered records to a database: this must be done in {@link
* FinishBundle}.
* <li>Deleting temporary files on a distributed filesystem: this must be done using the
* pipeline structure, e.g. using the {@link Wait} transform.
* </ul>
*
* <p>The method annotated with this must satisfy the following constraint:
*
* <ul>
* <li>It must have zero arguments.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Teardown {}
/**
* Annotation for the method that maps an element to an initial restriction for a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code RestrictionT getInitialRestriction(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type {@code RestrictionT} defines the restriction type used within this
* splittable DoFn. All other methods that use a {@link Restriction @Restriction} parameter
* must use the same type that is used here. It is suggested to use as narrow of a return
* type definition as possible (for example prefer to use a square type over a shape type as
* a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetInitialRestriction {}
/**
* Annotation for the method that returns the corresponding size for an element and restriction
* pair.
*
* <p>Signature: {@code double getSize(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a {@link RestrictionTracker}, then it will be passed a tracker
* that is initialized for the current {@link Restriction}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*
* <p>Returns a double representing the size of the current element and restriction.
*
* <p>Splittable {@link DoFn}s should only provide this method if the default {@link
* RestrictionTracker.HasProgress} implementation within the {@link RestrictionTracker} is an
* inaccurate representation of known work.
*
* <p>It is up to each splittable {@DoFn} to convert between their natural representation of
* outstanding work and this representation. For example:
*
* <ul>
* <li>Block based file source (e.g. Avro): The number of bytes that will be read from the file.
* <li>Pull based queue based source (e.g. Pubsub): The local/global size available in number of
* messages or number of {@code message bytes} that have not been processed.
* <li>Key range based source (e.g. Shuffle, Bigtable, ...): Typically {@code 1.0} unless
* additional details such as the number of bytes for keys and values is known for the key
* range.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetSize {}
/**
* Annotation for the method that returns the coder to use for the restriction of a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>If not defined, a coder will be inferred using standard coder inference rules and the
* pipeline's {@link Pipeline#getCoderRegistry coder registry}.
*
* <p>This method will be called only at pipeline construction time.
*
* <p>Signature: {@code Coder<RestrictionT> getRestrictionCoder();}
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetRestrictionCoder {}
/**
* Annotation for the method that splits restriction of a <a
* href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn} into multiple parts to
* be processed in parallel.
*
* <p>This method is used to perform bulk splitting while a restriction is not actively being
* processed while {@link RestrictionTracker#trySplit} is used to perform splitting during
* processing.
*
* <p>Signature: {@code void splitRestriction(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>If one of the arguments is of type {@link OutputReceiver}, then it will be passed an
* output receiver for outputting the splits. All splits must be output through this
* parameter.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a {@link RestrictionTracker}, then it will be passed a tracker
* that is initialized for the current {@link Restriction}. The argument must be of the
* exact type {@code RestrictionTracker<RestrictionT, PositionT>}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface SplitRestriction {}
/**
* Annotation for the method that creates a new {@link RestrictionTracker} for the restriction of
* a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code MyRestrictionTracker newTracker(<optional arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type must be a subtype of {@code RestrictionTracker<RestrictionT, PositionT>}.
* It is suggested to use as narrow of a return type definition as possible (for example
* prefer to use a square type over a shape type as a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface NewTracker {}
/**
* Annotation for the method that maps an element and restriction to initial watermark estimator
* state for a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code WatermarkEstimatorStateT getInitialWatermarkState(<arguments>);}
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type {@code WatermarkEstimatorStateT} defines the watermark state type used
* within this splittable DoFn. All other methods that use a {@link
* WatermarkEstimatorState @WatermarkEstimatorState} parameter must use the same type that
* is used here. It is suggested to use as narrow of a return type definition as possible
* (for example prefer to use a square type over a shape type as a square is a type of a
* shape).
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetInitialWatermarkEstimatorState {}
/**
* Annotation for the method that returns the coder to use for the watermark estimator state of a
* <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>If not defined, a coder will be inferred using standard coder inference rules and the
* pipeline's {@link Pipeline#getCoderRegistry coder registry}.
*
* <p>This method will be called only at pipeline construction time.
*
* <p>Signature: {@code Coder<WatermarkEstimatorStateT> getWatermarkEstimatorStateCoder();}
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface GetWatermarkEstimatorStateCoder {}
/**
* Annotation for the method that creates a new {@link WatermarkEstimator} for the watermark state
* of a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
*
* <p>Signature: {@code MyWatermarkEstimator newWatermarkEstimator(<optional arguments>);}
*
* <p>If the return type is a subtype of {@link TimestampObservingWatermarkEstimator} then the
* timestamp of each element output from this DoFn is provided to the watermark estimator.
*
* <p>This method must satisfy the following constraints:
*
* <ul>
* <li>The return type must be a subtype of {@code
* WatermarkEstimator<WatermarkEstimatorStateT>}. It is suggested to use as narrow of a
* return type definition as possible (for example prefer to use a square type over a shape
* type as a square is a type of a shape).
* <li>If one of its arguments is tagged with the {@link WatermarkEstimatorState} annotation,
* then it will be passed the current watermark estimator state; the argument must be of
* type {@code WatermarkEstimatorStateT}.
* <li>If one of its arguments is tagged with the {@link Element} annotation, then it will be
* passed the current element being processed; the argument must be of type {@code InputT}.
* Note that automatic conversion of {@link Row}s and {@link FieldAccess} parameters are
* currently unsupported.
* <li>If one of its arguments is tagged with the {@link Restriction} annotation, then it will
* be passed the current restriction being processed; the argument must be of type {@code
* RestrictionT}.
* <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
* passed the timestamp of the current element being processed; the argument must be of type
* {@link Instant}.
* <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
* window of the current element. When applied by {@link ParDo} the subtype of {@link
* BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
* window is not accessed a runner may perform additional optimizations.
* <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
* about the current triggering pane.
* <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
* options for the current pipeline.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface NewWatermarkEstimator {}
/**
* Parameter annotation for the watermark estimator state for the {@link NewWatermarkEstimator}
* method. Must match the return type on the method annotated with {@link
* GetInitialWatermarkEstimatorState}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface WatermarkEstimatorState {}
/**
* Annotation on a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}
* specifying that the {@link DoFn} performs a bounded amount of work per input element, so
* applying it to a bounded {@link PCollection} will produce also a bounded {@link PCollection}.
* It is an error to specify this on a non-splittable {@link DoFn}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface BoundedPerElement {}
/**
* Annotation on a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}
* specifying that the {@link DoFn} performs an unbounded amount of work per input element, so
* applying it to a bounded {@link PCollection} will produce an unbounded {@link PCollection}. It
* is an error to specify this on a non-splittable {@link DoFn}.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Experimental(Kind.SPLITTABLE_DO_FN)
public @interface UnboundedPerElement {}
// This can't be put into ProcessContinuation itself due to the following problem:
// http://ternarysearch.blogspot.com/2013/07/static-initialization-deadlock.html
private static final ProcessContinuation PROCESS_CONTINUATION_STOP =
new AutoValue_DoFn_ProcessContinuation(false, Duration.ZERO);
/**
* When used as a return value of {@link ProcessElement}, indicates whether there is more work to
* be done for the current element.
*
* <p>If the {@link ProcessElement} call completes because of a failed {@code tryClaim()} call on
* the {@link RestrictionTracker}, then the call MUST return {@link #stop()}.
*/
@Experimental(Kind.SPLITTABLE_DO_FN)
@AutoValue
public abstract static class ProcessContinuation {
/** Indicates that there is no more work to be done for the current element. */
public static ProcessContinuation stop() {
return PROCESS_CONTINUATION_STOP;
}
/** Indicates that there is more work to be done for the current element. */
public static ProcessContinuation resume() {
return new AutoValue_DoFn_ProcessContinuation(true, Duration.ZERO);
}
/**
* If false, the {@link DoFn} promises that there is no more work remaining for the current
* element, so the runner should not resume the {@link ProcessElement} call.
*/
public abstract boolean shouldResume();
/**
* A minimum duration that should elapse between the end of this {@link ProcessElement} call and
* the {@link ProcessElement} call continuing processing of the same element. By default, zero.
*/
public abstract Duration resumeDelay();
/** Builder method to set the value of {@link #resumeDelay()}. */
public ProcessContinuation withResumeDelay(Duration resumeDelay) {
return new AutoValue_DoFn_ProcessContinuation(shouldResume(), resumeDelay);
}
}
/**
* Finalize the {@link DoFn} construction to prepare for processing. This method should be called
* by runners before any processing methods.
*
* @deprecated use {@link Setup} or {@link StartBundle} instead. This method will be removed in a
* future release.
*/
@Deprecated
public final void prepareForProcessing() {}
/**
* {@inheritDoc}
*
* <p>By default, does not register any display data. Implementors may override this method to
* provide their own display data.
*/
@Override
public void populateDisplayData(DisplayData.Builder builder) {}
/**
* A parameter that is accessible during {@link StartBundle @StartBundle}, {@link
* ProcessElement @ProcessElement} and {@link FinishBundle @FinishBundle} that allows the caller
* to register a callback that will be invoked after the bundle has been successfully completed
* and the runner has commit the output.
*
* <p>A common usage would be to perform any acknowledgements required by an external system such
* as acking messages from a message queue since this callback is only invoked after the output of
* the bundle has been durably persisted by the runner.
*
* <p>Note that a runner may make the output of the bundle available immediately to downstream
* consumers without waiting for finalization to succeed. For pipelines that are sensitive to
* duplicate messages, they must perform output deduplication in the pipeline.
*/
@Experimental(Kind.PORTABILITY)
public interface BundleFinalizer {
/**
* The provided function will be called after the runner successfully commits the output of a
* successful bundle. Throwing during finalization represents that bundle finalization may have
* failed and the runner may choose to attempt finalization again. The provided {@code
* callbackExpiry} controls how long the finalization is valid for before it is garbage
* collected and no longer able to be invoked.
*
* <p>Note that finalization is best effort and it is expected that the external system will
* self recover state if finalization never happens or consistently fails. For example, a queue
* based system that requires message acknowledgement would replay messages if that
* acknowledgement was never received within the provided time bound.
*
* <p>See <a href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API:
* How to Finalize Bundles</a> for further details.
*
* @param callbackExpiry When the finalization callback expires. If the runner cannot commit
* results and execute the callback within this duration, the callback will not be invoked.
* @param callback The finalization callback method for the runner to invoke after processing
* results have been successfully committed.
*/
void afterBundleCommit(Instant callbackExpiry, Callback callback);
/**
* An instance of a function that will be invoked after bundle finalization.
*
* <p>Note that this function should maintain all state necessary outside of a DoFn's context to
* be able to perform bundle finalization and should not rely on mutable state stored within a
* DoFn instance.
*/
@FunctionalInterface
interface Callback {
void onBundleSuccess() throws Exception;
}
}
}
|
[BEAM-9217] Update DoFn javadoc for schema type translation (#11984)
|
sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
|
[BEAM-9217] Update DoFn javadoc for schema type translation (#11984)
|
|
Java
|
apache-2.0
|
4d6278974b2a8d9f130fefd1b42ff5263e9259c2
| 0
|
avano/fabric8,jboss-fuse/fuse,gnodet/fuse,jimmidyson/fabric8,sobkowiak/fabric8,mwringe/fabric8,aslakknutsen/fabric8,dhirajsb/fabric8,gnodet/fuse,rhuss/fabric8,rhuss/fabric8,joelschuster/fuse,gashcrumb/fabric8,zmhassan/fabric8,migue/fabric8,rajdavies/fabric8,dhirajsb/fabric8,punkhorn/fabric8,rmarting/fuse,jludvice/fabric8,tadayosi/fuse,tadayosi/fuse,mwringe/fabric8,janstey/fuse-1,EricWittmann/fabric8,jludvice/fabric8,chirino/fabric8v2,opensourceconsultant/fuse,punkhorn/fuse,migue/fabric8,janstey/fabric8,christian-posta/fabric8,KurtStam/fabric8,KurtStam/fabric8,punkhorn/fuse,PhilHardwick/fabric8,hekonsek/fabric8,janstey/fabric8,sobkowiak/fabric8,jimmidyson/fabric8,gashcrumb/fabric8,jimmidyson/fabric8,zmhassan/fabric8,avano/fabric8,rmarting/fuse,jludvice/fabric8,chirino/fabric8v2,mwringe/fabric8,janstey/fuse,hekonsek/fabric8,zmhassan/fabric8,dhirajsb/fuse,EricWittmann/fabric8,gnodet/fuse,chirino/fabric8v2,cunningt/fuse,KurtStam/fabric8,migue/fabric8,punkhorn/fabric8,jonathanchristison/fabric8,chirino/fabric8,zmhassan/fabric8,janstey/fuse-1,dejanb/fuse,gnodet/fuse,hekonsek/fabric8,EricWittmann/fabric8,ffang/fuse-1,rnc/fabric8,migue/fabric8,christian-posta/fabric8,hekonsek/fabric8,rnc/fabric8,Jitendrakry/fuse,janstey/fuse,janstey/fuse,jonathanchristison/fabric8,sobkowiak/fabric8,christian-posta/fabric8,janstey/fuse,gashcrumb/fabric8,joelschuster/fuse,jludvice/fabric8,mwringe/fabric8,christian-posta/fabric8,rhuss/fabric8,KurtStam/fabric8,sobkowiak/fuse,jonathanchristison/fabric8,rnc/fabric8,rajdavies/fabric8,janstey/fuse-1,joelschuster/fuse,rhuss/fabric8,jimmidyson/fabric8,ffang/fuse-1,rnc/fabric8,ffang/fuse-1,jboss-fuse/fuse,avano/fabric8,EricWittmann/fabric8,jimmidyson/fabric8,aslakknutsen/fabric8,jonathanchristison/fabric8,chirino/fuse,aslakknutsen/fabric8,gashcrumb/fabric8,dejanb/fuse,chirino/fabric8,chirino/fabric8,dhirajsb/fabric8,sobkowiak/fabric8,rnc/fabric8,chirino/fuse,punkhorn/fabric8,punkhorn/fabric8,opensourceconsultant/fuse,PhilHardwick/fabric8,PhilHardwick/fabric8,hekonsek/fabric8,dejanb/fuse,cunningt/fuse,PhilHardwick/fabric8,dhirajsb/fabric8,chirino/fabric8,opensourceconsultant/fuse,rajdavies/fabric8,chirino/fabric8v2,rajdavies/fabric8,jboss-fuse/fuse,Jitendrakry/fuse,janstey/fabric8,sobkowiak/fuse,rmarting/fuse,avano/fabric8,dhirajsb/fuse
|
/**
* Copyright (C) 2010, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* AGPL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.api;
import java.io.Serializable;
/**
* Arguments for creating a new agent via JClouds
*/
public class CreateJCloudsAgentArguments implements CreateAgentArguments, Serializable {
private static final long serialVersionUID = 1L;
private boolean debugAgent;
private String imageId;
private String hardwareId;
private String locationId;
private String group;
private String user;
private String providerName;
private JCloudsInstanceType instanceType;
private String identity;
private String credential;
private String owner;
@Override
public String toString() {
return "CreateJCloudsAgentArguments{" +
"imageId='" + imageId + '\'' +
", hardwareId='" + hardwareId + '\'' +
", locationId='" + locationId + '\'' +
", group='" + group + '\'' +
", user='" + user + '\'' +
", instanceType='" + instanceType + '\'' +
'}';
}
public boolean isDebugAgent() {
return debugAgent;
}
public void setDebugAgent(boolean debugAgent) {
this.debugAgent = debugAgent;
}
public String getImageId() {
return imageId;
}
public void setImageId(String imageId) {
this.imageId = imageId;
}
public String getHardwareId() {
return hardwareId;
}
public void setHardwareId(String hardwareId) {
this.hardwareId = hardwareId;
}
public String getLocationId() {
return locationId;
}
public void setLocationId(String locationId) {
this.locationId = locationId;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getProviderName() {
return providerName;
}
public void setProviderName(String providerName) {
this.providerName = providerName;
}
public JCloudsInstanceType getInstanceType() {
return instanceType;
}
public void setInstanceType(JCloudsInstanceType instanceType) {
this.instanceType = instanceType;
}
public String getCredential() {
return credential;
}
public void setCredential(String credential) {
this.credential = credential;
}
public String getIdentity() {
return identity;
}
public void setIdentity(String identity) {
this.identity = identity;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
}
|
fabric-core/src/main/scala/org/fusesource/fabric/api/CreateJCloudsAgentArguments.java
|
/**
* Copyright (C) 2010, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* AGPL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.api;
import java.io.Serializable;
/**
* Arguments for creating a new agent via JClouds
*/
public class CreateJCloudsAgentArguments implements CreateAgentArguments, Serializable {
private static final long serialVersionUID = 1L;
private boolean debugAgent;
private String imageId;
private String hardwareId;
private String locationId;
private String group;
private String user;
private String providerName;
private JCloudsInstanceType instanceType;
@Override
public String toString() {
return "CreateJCloudsAgentArguments{" +
"imageId='" + imageId + '\'' +
", hardwareId='" + hardwareId + '\'' +
", locationId='" + locationId + '\'' +
", group='" + group + '\'' +
", user='" + user + '\'' +
", instanceType='" + instanceType + '\'' +
'}';
}
public boolean isDebugAgent() {
return debugAgent;
}
public void setDebugAgent(boolean debugAgent) {
this.debugAgent = debugAgent;
}
public String getImageId() {
return imageId;
}
public void setImageId(String imageId) {
this.imageId = imageId;
}
public String getHardwareId() {
return hardwareId;
}
public void setHardwareId(String hardwareId) {
this.hardwareId = hardwareId;
}
public String getLocationId() {
return locationId;
}
public void setLocationId(String locationId) {
this.locationId = locationId;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getProviderName() {
return providerName;
}
public void setProviderName(String providerName) {
this.providerName = providerName;
}
public JCloudsInstanceType getInstanceType() {
return instanceType;
}
public void setInstanceType(JCloudsInstanceType instanceType) {
this.instanceType = instanceType;
}
}
|
allow the jclouds provider to work without having prior ComputeService instances wired into osgi
|
fabric-core/src/main/scala/org/fusesource/fabric/api/CreateJCloudsAgentArguments.java
|
allow the jclouds provider to work without having prior ComputeService instances wired into osgi
|
|
Java
|
apache-2.0
|
3f242e4205de5cce987f93f58b6ef5a679d6d785
| 0
|
crate/crate,EvilMcJerkface/crate,crate/crate,crate/crate,EvilMcJerkface/crate,EvilMcJerkface/crate
|
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.planner.operators;
import com.google.common.annotations.VisibleForTesting;
import io.crate.analyze.OrderBy;
import io.crate.analyze.relations.AnalyzedRelation;
import io.crate.collections.Lists2;
import io.crate.data.Row;
import io.crate.execution.dsl.phases.HashJoinPhase;
import io.crate.execution.dsl.phases.MergePhase;
import io.crate.execution.dsl.projection.EvalProjection;
import io.crate.execution.dsl.projection.builder.InputColumns;
import io.crate.execution.dsl.projection.builder.ProjectionBuilder;
import io.crate.execution.engine.join.JoinOperations;
import io.crate.execution.engine.pipeline.TopN;
import io.crate.expression.symbol.SelectSymbol;
import io.crate.expression.symbol.Symbol;
import io.crate.expression.symbol.Symbols;
import io.crate.planner.ExecutionPlan;
import io.crate.planner.PlannerContext;
import io.crate.planner.ResultDescription;
import io.crate.planner.TableStats;
import io.crate.planner.distribution.DistributionInfo;
import io.crate.planner.distribution.DistributionType;
import io.crate.planner.node.dql.join.Join;
import io.crate.planner.node.dql.join.JoinType;
import org.elasticsearch.common.collect.Tuple;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.crate.planner.operators.LogicalPlanner.NO_LIMIT;
class HashJoin extends TwoInputPlan {
private final Symbol joinCondition;
private final TableStats tableStats;
@VisibleForTesting
final AnalyzedRelation concreteRelation;
HashJoin(LogicalPlan lhs,
LogicalPlan rhs,
Symbol joinCondition,
AnalyzedRelation concreteRelation,
TableStats tableStats) {
super(lhs, rhs, new ArrayList<>());
this.concreteRelation = concreteRelation;
this.joinCondition = joinCondition;
this.outputs.addAll(lhs.outputs());
this.outputs.addAll(rhs.outputs());
this.tableStats = tableStats;
}
JoinType joinType() {
return JoinType.INNER;
}
Symbol joinCondition() {
return joinCondition;
}
@Override
public Map<LogicalPlan, SelectSymbol> dependencies() {
Map<LogicalPlan, SelectSymbol> leftDeps = lhs.dependencies();
Map<LogicalPlan, SelectSymbol> rightDeps = rhs.dependencies();
HashMap<LogicalPlan, SelectSymbol> deps = new HashMap<>(leftDeps.size() + rightDeps.size());
deps.putAll(leftDeps);
deps.putAll(rightDeps);
return deps;
}
@Override
public ExecutionPlan build(PlannerContext plannerContext,
ProjectionBuilder projectionBuilder,
int limit,
int offset,
@Nullable OrderBy order,
@Nullable Integer pageSizeHint,
Row params,
Map<SelectSymbol, Object> subQueryValues) {
ExecutionPlan leftExecutionPlan = lhs.build(
plannerContext, projectionBuilder, NO_LIMIT, 0, null, null, params, subQueryValues);
ExecutionPlan rightExecutionPlan = rhs.build(
plannerContext, projectionBuilder, NO_LIMIT, 0, null, null, params, subQueryValues);
LogicalPlan leftLogicalPlan = lhs;
LogicalPlan rightLogicalPlan = rhs;
boolean tablesSwitched = false;
// We move smaller table to the right side since benchmarking
// revealed that this improves performance in most cases.
if (lhs.numExpectedRows() < rhs.numExpectedRows()) {
tablesSwitched = true;
leftLogicalPlan = rhs;
rightLogicalPlan = lhs;
ExecutionPlan tmp = leftExecutionPlan;
leftExecutionPlan = rightExecutionPlan;
rightExecutionPlan = tmp;
}
Tuple<List<Symbol>, List<Symbol>> hashSymbols =
extractHashJoinSymbolsFromJoinSymbolsAndSplitPerSide(tablesSwitched);
ResultDescription leftResultDesc = leftExecutionPlan.resultDescription();
ResultDescription rightResultDesc = rightExecutionPlan.resultDescription();
Collection<String> joinExecutionNodes = leftResultDesc.nodeIds();
List<Symbol> leftOutputs = leftLogicalPlan.outputs();
List<Symbol> rightOutputs = rightLogicalPlan.outputs();
MergePhase leftMerge = null;
MergePhase rightMerge = null;
// We can only run the join distributed if no remaining limit or offset must be applied on the source relations.
// Because on distributed joins, every join is running on a slice (modulo) set of the data and so no limit/offset
// could be applied. Limit/offset can only be applied on the whole data set after all partial rows from the
// shards are merged
boolean isDistributed = leftResultDesc.hasRemainingLimitOrOffset() == false
&& rightResultDesc.hasRemainingLimitOrOffset() == false;
if (joinExecutionNodes.size() == 1
&& joinExecutionNodes.equals(rightResultDesc.nodeIds())
&& !rightResultDesc.hasRemainingLimitOrOffset()) {
// If the left and the right plan are executed on the same single node the mergePhase
// should be omitted. This is the case if the left and right table have only one shards which
// are on the same node
leftExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_SAME_NODE);
rightExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_SAME_NODE);
} else {
if (isDistributed) {
// Run the join distributed by modulo distribution algorithm
leftOutputs = setModuloDistribution(hashSymbols.v1(), leftLogicalPlan.outputs(), leftExecutionPlan);
rightOutputs = setModuloDistribution(hashSymbols.v2(), rightLogicalPlan.outputs(), rightExecutionPlan);
} else {
// Run the join non-distributed on the handler node
joinExecutionNodes = Collections.singletonList(plannerContext.handlerNode());
leftExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_BROADCAST);
rightExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_BROADCAST);
}
leftMerge = JoinOperations.buildMergePhaseForJoin(plannerContext, leftResultDesc, joinExecutionNodes);
rightMerge = JoinOperations.buildMergePhaseForJoin(plannerContext, rightResultDesc, joinExecutionNodes);
}
List<Symbol> joinOutputs = Lists2.concat(leftOutputs, rightOutputs);
HashJoinPhase joinPhase = new HashJoinPhase(
plannerContext.jobId(),
plannerContext.nextExecutionPhaseId(),
"hash-join",
Collections.singletonList(JoinOperations.createJoinProjection(outputs, joinOutputs)),
leftMerge,
rightMerge,
leftOutputs.size(),
rightOutputs.size(),
joinExecutionNodes,
InputColumns.create(joinCondition, joinOutputs),
InputColumns.create(hashSymbols.v1(), new InputColumns.SourceSymbols(leftOutputs)),
InputColumns.create(hashSymbols.v2(), new InputColumns.SourceSymbols(rightOutputs)),
Symbols.typeView(leftOutputs),
leftLogicalPlan.estimatedRowSize(),
leftLogicalPlan.numExpectedRows());
return new Join(
joinPhase,
leftExecutionPlan,
rightExecutionPlan,
TopN.NO_LIMIT,
0,
TopN.NO_LIMIT,
outputs.size(),
null
);
}
private Tuple<List<Symbol>, List<Symbol>> extractHashJoinSymbolsFromJoinSymbolsAndSplitPerSide(boolean switchedTables) {
Map<AnalyzedRelation, List<Symbol>> hashJoinSymbols = HashJoinConditionSymbolsExtractor.extract(joinCondition);
// First extract the symbols that belong to the concrete relation
List<Symbol> hashJoinSymbolsForConcreteRelation = hashJoinSymbols.remove(concreteRelation);
// All leftover extracted symbols belong to the other relation which might be a
// "concrete" relation too but can already be a tree of relation.
List<Symbol> hashJoinSymbolsForJoinTree =
hashJoinSymbols.values().stream().flatMap(List::stream).collect(Collectors.toList());
if (switchedTables) {
return new Tuple<>(hashJoinSymbolsForConcreteRelation, hashJoinSymbolsForJoinTree);
}
return new Tuple<>(hashJoinSymbolsForJoinTree, hashJoinSymbolsForConcreteRelation);
}
@Override
protected LogicalPlan updateSources(LogicalPlan newLeftSource, LogicalPlan newRightSource) {
return new HashJoin(newLeftSource, newRightSource, joinCondition, concreteRelation, tableStats);
}
@Override
public long numExpectedRows() {
// We don't have any cardinality estimates, so just take the bigger table
return Math.max(lhs.numExpectedRows(), rhs.numExpectedRows());
}
@Override
public long estimatedRowSize() {
return lhs.estimatedRowSize() + rhs.estimatedRowSize();
}
@Override
public <C, R> R accept(LogicalPlanVisitor<C, R> visitor, C context) {
return visitor.visitHashJoin(this, context);
}
private List<Symbol> setModuloDistribution(List<Symbol> joinSymbols,
List<Symbol> planOutputs,
ExecutionPlan executionPlan) {
List<Symbol> outputs = planOutputs;
Symbol firstJoinSymbol = joinSymbols.get(0);
int distributeBySymbolPos = planOutputs.indexOf(firstJoinSymbol);
if (distributeBySymbolPos < 0) {
// Looks like a function symbol, it must be evaluated BEFORE distribution
outputs = createEvalProjectionForDistributionJoinSymbol(firstJoinSymbol, planOutputs, executionPlan);
distributeBySymbolPos = planOutputs.size();
}
executionPlan.setDistributionInfo(new DistributionInfo(DistributionType.MODULO, distributeBySymbolPos));
return outputs;
}
private List<Symbol> createEvalProjectionForDistributionJoinSymbol(Symbol firstJoinSymbol,
List<Symbol> outputs,
ExecutionPlan executionPlan) {
List<Symbol> projectionOutputs = new ArrayList<>(outputs.size() + 1);
projectionOutputs.addAll(outputs);
projectionOutputs.add(firstJoinSymbol);
EvalProjection evalProjection = new EvalProjection(InputColumns.create(projectionOutputs, new InputColumns.SourceSymbols(outputs)));
executionPlan.addProjection(evalProjection);
return projectionOutputs;
}
}
|
sql/src/main/java/io/crate/planner/operators/HashJoin.java
|
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.planner.operators;
import com.google.common.annotations.VisibleForTesting;
import io.crate.analyze.OrderBy;
import io.crate.analyze.relations.AnalyzedRelation;
import io.crate.collections.Lists2;
import io.crate.data.Row;
import io.crate.execution.dsl.phases.HashJoinPhase;
import io.crate.execution.dsl.phases.MergePhase;
import io.crate.execution.dsl.projection.EvalProjection;
import io.crate.execution.dsl.projection.builder.InputColumns;
import io.crate.execution.dsl.projection.builder.ProjectionBuilder;
import io.crate.execution.engine.join.JoinOperations;
import io.crate.execution.engine.pipeline.TopN;
import io.crate.expression.symbol.SelectSymbol;
import io.crate.expression.symbol.Symbol;
import io.crate.expression.symbol.Symbols;
import io.crate.planner.ExecutionPlan;
import io.crate.planner.PlannerContext;
import io.crate.planner.ResultDescription;
import io.crate.planner.TableStats;
import io.crate.planner.distribution.DistributionInfo;
import io.crate.planner.distribution.DistributionType;
import io.crate.planner.node.dql.join.Join;
import io.crate.planner.node.dql.join.JoinType;
import org.elasticsearch.common.collect.Tuple;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.crate.planner.operators.LogicalPlanner.NO_LIMIT;
class HashJoin extends TwoInputPlan {
private final Symbol joinCondition;
private final TableStats tableStats;
@VisibleForTesting
final AnalyzedRelation concreteRelation;
HashJoin(LogicalPlan lhs,
LogicalPlan rhs,
Symbol joinCondition,
AnalyzedRelation concreteRelation,
TableStats tableStats) {
super(lhs, rhs, new ArrayList<>());
this.concreteRelation = concreteRelation;
this.joinCondition = joinCondition;
this.outputs.addAll(lhs.outputs());
this.outputs.addAll(rhs.outputs());
this.tableStats = tableStats;
}
JoinType joinType() {
return JoinType.INNER;
}
Symbol joinCondition() {
return joinCondition;
}
@Override
public Map<LogicalPlan, SelectSymbol> dependencies() {
HashMap<LogicalPlan, SelectSymbol> deps = new HashMap<>(lhs.dependencies().size() + rhs.dependencies().size());
deps.putAll(lhs.dependencies());
deps.putAll(rhs.dependencies());
return deps;
}
@Override
public ExecutionPlan build(PlannerContext plannerContext,
ProjectionBuilder projectionBuilder,
int limit,
int offset,
@Nullable OrderBy order,
@Nullable Integer pageSizeHint,
Row params,
Map<SelectSymbol, Object> subQueryValues) {
ExecutionPlan leftExecutionPlan = lhs.build(
plannerContext, projectionBuilder, NO_LIMIT, 0, null, null, params, subQueryValues);
ExecutionPlan rightExecutionPlan = rhs.build(
plannerContext, projectionBuilder, NO_LIMIT, 0, null, null, params, subQueryValues);
LogicalPlan leftLogicalPlan = lhs;
LogicalPlan rightLogicalPlan = rhs;
boolean tablesSwitched = false;
// We move smaller table to the right side since benchmarking
// revealed that this improves performance in most cases.
if (lhs.numExpectedRows() < rhs.numExpectedRows()) {
tablesSwitched = true;
leftLogicalPlan = rhs;
rightLogicalPlan = lhs;
ExecutionPlan tmp = leftExecutionPlan;
leftExecutionPlan = rightExecutionPlan;
rightExecutionPlan = tmp;
}
Tuple<List<Symbol>, List<Symbol>> hashSymbols =
extractHashJoinSymbolsFromJoinSymbolsAndSplitPerSide(tablesSwitched);
ResultDescription leftResultDesc = leftExecutionPlan.resultDescription();
ResultDescription rightResultDesc = rightExecutionPlan.resultDescription();
Collection<String> joinExecutionNodes = leftResultDesc.nodeIds();
List<Symbol> leftOutputs = leftLogicalPlan.outputs();
List<Symbol> rightOutputs = rightLogicalPlan.outputs();
MergePhase leftMerge = null;
MergePhase rightMerge = null;
// We can only run the join distributed if no remaining limit or offset must be applied on the source relations.
// Because on distributed joins, every join is running on a slice (modulo) set of the data and so no limit/offset
// could be applied. Limit/offset can only be applied on the whole data set after all partial rows from the
// shards are merged
boolean isDistributed = leftResultDesc.hasRemainingLimitOrOffset() == false
&& rightResultDesc.hasRemainingLimitOrOffset() == false;
if (joinExecutionNodes.size() == 1
&& joinExecutionNodes.equals(rightResultDesc.nodeIds())
&& !rightResultDesc.hasRemainingLimitOrOffset()) {
// If the left and the right plan are executed on the same single node the mergePhase
// should be omitted. This is the case if the left and right table have only one shards which
// are on the same node
leftExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_SAME_NODE);
rightExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_SAME_NODE);
} else {
if (isDistributed) {
// Run the join distributed by modulo distribution algorithm
leftOutputs = setModuloDistribution(hashSymbols.v1(), leftLogicalPlan.outputs(), leftExecutionPlan);
rightOutputs = setModuloDistribution(hashSymbols.v2(), rightLogicalPlan.outputs(), rightExecutionPlan);
} else {
// Run the join non-distributed on the handler node
joinExecutionNodes = Collections.singletonList(plannerContext.handlerNode());
leftExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_BROADCAST);
rightExecutionPlan.setDistributionInfo(DistributionInfo.DEFAULT_BROADCAST);
}
leftMerge = JoinOperations.buildMergePhaseForJoin(plannerContext, leftResultDesc, joinExecutionNodes);
rightMerge = JoinOperations.buildMergePhaseForJoin(plannerContext, rightResultDesc, joinExecutionNodes);
}
List<Symbol> joinOutputs = Lists2.concat(leftOutputs, rightOutputs);
HashJoinPhase joinPhase = new HashJoinPhase(
plannerContext.jobId(),
plannerContext.nextExecutionPhaseId(),
"hash-join",
Collections.singletonList(JoinOperations.createJoinProjection(outputs, joinOutputs)),
leftMerge,
rightMerge,
leftOutputs.size(),
rightOutputs.size(),
joinExecutionNodes,
InputColumns.create(joinCondition, joinOutputs),
InputColumns.create(hashSymbols.v1(), new InputColumns.SourceSymbols(leftOutputs)),
InputColumns.create(hashSymbols.v2(), new InputColumns.SourceSymbols(rightOutputs)),
Symbols.typeView(leftOutputs),
leftLogicalPlan.estimatedRowSize(),
leftLogicalPlan.numExpectedRows());
return new Join(
joinPhase,
leftExecutionPlan,
rightExecutionPlan,
TopN.NO_LIMIT,
0,
TopN.NO_LIMIT,
outputs.size(),
null
);
}
private Tuple<List<Symbol>, List<Symbol>> extractHashJoinSymbolsFromJoinSymbolsAndSplitPerSide(boolean switchedTables) {
Map<AnalyzedRelation, List<Symbol>> hashJoinSymbols = HashJoinConditionSymbolsExtractor.extract(joinCondition);
// First extract the symbols that belong to the concrete relation
List<Symbol> hashJoinSymbolsForConcreteRelation = hashJoinSymbols.remove(concreteRelation);
// All leftover extracted symbols belong to the other relation which might be a
// "concrete" relation too but can already be a tree of relation.
List<Symbol> hashJoinSymbolsForJoinTree =
hashJoinSymbols.values().stream().flatMap(List::stream).collect(Collectors.toList());
if (switchedTables) {
return new Tuple<>(hashJoinSymbolsForConcreteRelation, hashJoinSymbolsForJoinTree);
}
return new Tuple<>(hashJoinSymbolsForJoinTree, hashJoinSymbolsForConcreteRelation);
}
@Override
protected LogicalPlan updateSources(LogicalPlan newLeftSource, LogicalPlan newRightSource) {
return new HashJoin(newLeftSource, newRightSource, joinCondition, concreteRelation, tableStats);
}
@Override
public long numExpectedRows() {
// We don't have any cardinality estimates, so just take the bigger table
return Math.max(lhs.numExpectedRows(), rhs.numExpectedRows());
}
@Override
public long estimatedRowSize() {
return lhs.estimatedRowSize() + rhs.estimatedRowSize();
}
@Override
public <C, R> R accept(LogicalPlanVisitor<C, R> visitor, C context) {
return visitor.visitHashJoin(this, context);
}
private List<Symbol> setModuloDistribution(List<Symbol> joinSymbols,
List<Symbol> planOutputs,
ExecutionPlan executionPlan) {
List<Symbol> outputs = planOutputs;
Symbol firstJoinSymbol = joinSymbols.get(0);
int distributeBySymbolPos = planOutputs.indexOf(firstJoinSymbol);
if (distributeBySymbolPos < 0) {
// Looks like a function symbol, it must be evaluated BEFORE distribution
outputs = createEvalProjectionForDistributionJoinSymbol(firstJoinSymbol, planOutputs, executionPlan);
distributeBySymbolPos = planOutputs.size();
}
executionPlan.setDistributionInfo(new DistributionInfo(DistributionType.MODULO, distributeBySymbolPos));
return outputs;
}
private List<Symbol> createEvalProjectionForDistributionJoinSymbol(Symbol firstJoinSymbol,
List<Symbol> outputs,
ExecutionPlan executionPlan) {
List<Symbol> projectionOutputs = new ArrayList<>(outputs.size() + 1);
projectionOutputs.addAll(outputs);
projectionOutputs.add(firstJoinSymbol);
EvalProjection evalProjection = new EvalProjection(InputColumns.create(projectionOutputs, new InputColumns.SourceSymbols(outputs)));
executionPlan.addProjection(evalProjection);
return projectionOutputs;
}
}
|
Avoid duplicate method calls in HashJoin.dependencies
Avoids duplicate allocations or calculations in case `dependencies` is
not just a simple property.
|
sql/src/main/java/io/crate/planner/operators/HashJoin.java
|
Avoid duplicate method calls in HashJoin.dependencies
|
|
Java
|
apache-2.0
|
353f3c408d85b6634e21da61a8fde7e9e85bcfbc
| 0
|
litesuits/android-common
|
package com.litesuits.common.data.cipher;
import com.litesuits.common.assist.Base64;
/**
* @author MaTianyu
* @date 14-7-31
*/
public class Base64Cipher extends Cipher {
private Cipher cipher;
public Base64Cipher() {
}
public Base64Cipher(Cipher cipher) {
this.cipher = cipher;
}
@Override
public byte[] decrypt(byte[] res) {
if(cipher == null) return Base64.decode(res, Base64.DEFAULT);
res=Base64.decode(res, Base64.DEFAULT);
return cipher.decrypt(res);;
}
@Override
public byte[] encrypt(byte[] res) {
if(cipher != null) res = cipher.encrypt(res);
return Base64.encode(res, Base64.DEFAULT);
}
}
|
app/src/main/java/com/litesuits/common/data/cipher/Base64Cipher.java
|
package com.litesuits.common.data.cipher;
import com.litesuits.common.assist.Base64;
/**
* @author MaTianyu
* @date 14-7-31
*/
public class Base64Cipher extends Cipher {
private Cipher cipher;
public Base64Cipher() {
}
public Base64Cipher(Cipher cipher) {
this.cipher = cipher;
}
@Override
public byte[] decrypt(byte[] res) {
if(cipher != null) res = cipher.decrypt(res);
return Base64.decode(res, Base64.DEFAULT);
}
@Override
public byte[] encrypt(byte[] res) {
if(cipher != null) res = cipher.encrypt(res);
return Base64.encode(res, Base64.DEFAULT);
}
}
|
修改多次加密失败问题,加密时Base64是最后加密,所以解密时也需要先使用Base64解密,后再使用其他解密
|
app/src/main/java/com/litesuits/common/data/cipher/Base64Cipher.java
|
修改多次加密失败问题,加密时Base64是最后加密,所以解密时也需要先使用Base64解密,后再使用其他解密
|
|
Java
|
apache-2.0
|
3d8dcf71c211f68b455f5cda7a8486101c9f5ebd
| 0
|
mikosik/jsolid
|
/**
* CSG.java
*
* Copyright 2014-2014 Michael Hoffer <info@michaelhoffer.de>. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY Michael Hoffer <info@michaelhoffer.de> "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL Michael Hoffer <info@michaelhoffer.de> OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Michael Hoffer
* <info@michaelhoffer.de>.
*/
package eu.mihosoft.vrl.v3d;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import eu.mihosoft.vrl.v3d.ext.quickhull3d.HullUtil;
/**
* Constructive Solid Geometry (CSG).
*
* This implementation is a Java port of <a href=
* "https://github.com/evanw/csg.js/">https://github.com/evanw/csg.js/</a> with
* some additional features like polygon extrude, transformations etc. Thanks to
* the author for creating the CSG.js library.<br>
* <br>
*
* <b>Implementation Details</b>
*
* All CSG operations are implemented in terms of two functions,
* {@link Node#clipTo(eu.mihosoft.vrl.v3d.Node)} and {@link Node#invert()},
* which remove parts of a BSP tree inside another BSP tree and swap solid and
* empty space, respectively. To find the union of {@code a} and {@code b}, we
* want to remove everything in {@code a} inside {@code b} and everything in
* {@code b} inside {@code a}, then combine polygons from {@code a} and
* {@code b} into one solid:
*
* <blockquote>
*
* <pre>
* a.clipTo(b);
* b.clipTo(a);
* a.build(b.allPolygons());
* </pre>
*
* </blockquote>
*
* The only tricky part is handling overlapping coplanar polygons in both trees.
* The code above keeps both copies, but we need to keep them in one tree and
* remove them in the other tree. To remove them from {@code b} we can clip the
* inverse of {@code b} against {@code a}. The code for union now looks like
* this:
*
* <blockquote>
*
* <pre>
* a.clipTo(b);
* b.clipTo(a);
* b.invert();
* b.clipTo(a);
* b.invert();
* a.build(b.allPolygons());
* </pre>
*
* </blockquote>
*
* Subtraction and intersection naturally follow from set operations. If union
* is {@code A | B}, differenceion is {@code A - B = ~(~A | B)} and intersection
* is {@code A & B =
* ~(~A | ~B)} where {@code ~} is the complement operator.
*/
public class CSG {
private List<Polygon> polygons;
private static OptType defaultOptType = OptType.NONE;
private OptType optType = null;
private PropertyStorage storage;
private CSG() {
storage = new PropertyStorage();
}
/**
* Constructs a CSG from a list of {@link Polygon} instances.
*
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(List<Polygon> polygons) {
CSG csg = new CSG();
csg.polygons = polygons;
return csg;
}
/**
* Constructs a CSG from the specified {@link Polygon} instances.
*
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(Polygon... polygons) {
return fromPolygons(Arrays.asList(polygons));
}
/**
* Constructs a CSG from a list of {@link Polygon} instances.
*
* @param storage
* shared storage
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(PropertyStorage storage, List<Polygon> polygons) {
CSG csg = new CSG();
csg.polygons = polygons;
csg.storage = storage;
for (Polygon polygon : polygons) {
polygon.setStorage(storage);
}
return csg;
}
/**
* Constructs a CSG from the specified {@link Polygon} instances.
*
* @param storage
* shared storage
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(PropertyStorage storage, Polygon... polygons) {
return fromPolygons(storage, Arrays.asList(polygons));
}
@Override
public CSG clone() {
CSG csg = new CSG();
csg.setOptType(this.getOptType());
// sequential code
// csg.polygons = new ArrayList<>();
// polygons.forEach((polygon) -> {
// csg.polygons.add(polygon.clone());
// });
Stream<Polygon> polygonStream;
if (polygons.size() > 200) {
polygonStream = polygons.parallelStream();
} else {
polygonStream = polygons.stream();
}
csg.polygons = polygonStream.map((Polygon p) -> p.clone()).collect(Collectors.toList());
return csg;
}
/**
*
* @return the polygons of this CSG
*/
public List<Polygon> getPolygons() {
return polygons;
}
/**
* Defines the CSg optimization type.
*
* @param type
* optimization type
* @return this CSG
*/
public CSG optimization(OptType type) {
this.setOptType(type);
return this;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csg
* other csg
*
* @return union of this csg and the specified csg
*/
public CSG union(CSG csg) {
switch (getOptType()) {
case CSG_BOUND:
return _unionCSGBoundsOpt(csg);
case POLYGON_BOUND:
return _unionPolygonBoundsOpt(csg);
default:
// return _unionIntersectOpt(csg);
return _unionNoOpt(csg);
}
}
/**
* Returns a csg consisting of the polygons of this csg and the specified csg.
*
* The purpose of this method is to allow fast union operations for objects
* that do not intersect.
*
* <p>
* <b>WARNING:</b> this method does not apply the csg algorithms. Therefore,
* please ensure that this csg and the specified csg do not intersect.
*
* @param csg
* csg
*
* @return a csg consisting of the polygons of this csg and the specified csg
*/
public CSG dumbUnion(CSG csg) {
CSG result = this.clone();
CSG other = csg.clone();
result.polygons.addAll(other.polygons);
return result;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csgs.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csgs
* other csgs
*
* @return union of this csg and the specified csgs
*/
public CSG union(List<CSG> csgs) {
CSG result = this;
for (CSG csg : csgs) {
result = result.union(csg);
}
return result;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csgs.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csgs
* other csgs
*
* @return union of this csg and the specified csgs
*/
public CSG union(CSG... csgs) {
return union(Arrays.asList(csgs));
}
/**
* Returns the convex hull of this csg.
*
* @return the convex hull of this csg
*/
public CSG hull() {
return HullUtil.hull(this, storage);
}
/**
* Returns the convex hull of this csg and the union of the specified csgs.
*
* @param csgs
* csgs
* @return the convex hull of this csg and the specified csgs
*/
public CSG hull(List<CSG> csgs) {
CSG csgsUnion = new CSG();
csgsUnion.storage = storage;
csgsUnion.optType = optType;
csgsUnion.polygons = this.clone().polygons;
csgs.stream().forEach((csg) -> {
csgsUnion.polygons.addAll(csg.clone().polygons);
});
csgsUnion.polygons.forEach(p -> p.setStorage(storage));
return csgsUnion.hull();
// CSG csgsUnion = this;
//
// for (CSG csg : csgs) {
// csgsUnion = csgsUnion.union(csg);
// }
//
// return csgsUnion.hull();
}
/**
* Returns the convex hull of this csg and the union of the specified csgs.
*
* @param csgs
* csgs
* @return the convex hull of this csg and the specified csgs
*/
public CSG hull(CSG... csgs) {
return hull(Arrays.asList(csgs));
}
private CSG _unionCSGBoundsOpt(CSG csg) {
System.err.println("WARNING: using " + CSG.OptType.NONE
+ " since other optimization types missing for union operation.");
return _unionIntersectOpt(csg);
}
private CSG _unionPolygonBoundsOpt(CSG csg) {
List<Polygon> inner = new ArrayList<>();
List<Polygon> outer = new ArrayList<>();
Bounds bounds = csg.getBounds();
this.polygons.stream().forEach((p) -> {
if (bounds.intersects(p.getBounds())) {
inner.add(p);
} else {
outer.add(p);
}
});
List<Polygon> allPolygons = new ArrayList<>();
if (!inner.isEmpty()) {
CSG innerCSG = CSG.fromPolygons(inner);
allPolygons.addAll(outer);
allPolygons.addAll(innerCSG._unionNoOpt(csg).polygons);
} else {
allPolygons.addAll(this.polygons);
allPolygons.addAll(csg.polygons);
}
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
/**
* Optimizes for intersection. If csgs do not intersect create a new csg that
* consists of the polygon lists of this csg and the specified csg. In this
* case no further space partitioning is performed.
*
* @param csg
* csg
* @return the union of this csg and the specified csg
*/
private CSG _unionIntersectOpt(CSG csg) {
boolean intersects = false;
Bounds bounds = csg.getBounds();
for (Polygon p : polygons) {
if (bounds.intersects(p.getBounds())) {
intersects = true;
break;
}
}
List<Polygon> allPolygons = new ArrayList<>();
if (intersects) {
return _unionNoOpt(csg);
} else {
allPolygons.addAll(this.polygons);
allPolygons.addAll(csg.polygons);
}
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
private CSG _unionNoOpt(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return difference of this csg and the specified csgs
*/
public CSG difference(List<CSG> csgs) {
if (csgs.isEmpty()) {
return this.clone();
}
CSG csgsUnion = csgs.get(0);
for (int i = 1; i < csgs.size(); i++) {
csgsUnion = csgsUnion.union(csgs.get(i));
}
return difference(csgsUnion);
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return difference of this csg and the specified csgs
*/
public CSG difference(CSG... csgs) {
return difference(Arrays.asList(csgs));
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csg
* other csg
* @return difference of this csg and the specified csg
*/
public CSG difference(CSG csg) {
switch (getOptType()) {
case CSG_BOUND:
return _differenceCSGBoundsOpt(csg);
case POLYGON_BOUND:
return _differencePolygonBoundsOpt(csg);
default:
return _differenceNoOpt(csg);
}
}
private CSG _differenceCSGBoundsOpt(CSG csg) {
CSG b = csg;
CSG a1 = this._differenceNoOpt(csg.getBounds().toCSG());
CSG a2 = this.intersect(csg.getBounds().toCSG());
return a2._differenceNoOpt(b)._unionIntersectOpt(a1).optimization(getOptType());
}
private CSG _differencePolygonBoundsOpt(CSG csg) {
List<Polygon> inner = new ArrayList<>();
List<Polygon> outer = new ArrayList<>();
Bounds bounds = csg.getBounds();
this.polygons.stream().forEach((p) -> {
if (bounds.intersects(p.getBounds())) {
inner.add(p);
} else {
outer.add(p);
}
});
CSG innerCSG = CSG.fromPolygons(inner);
List<Polygon> allPolygons = new ArrayList<>();
allPolygons.addAll(outer);
allPolygons.addAll(innerCSG._differenceNoOpt(csg).polygons);
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
private CSG _differenceNoOpt(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.invert();
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
a.invert();
CSG csgA = CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
return csgA;
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csg
* other csg
* @return intersection of this csg and the specified csg
*/
public CSG intersect(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.invert();
b.clipTo(a);
b.invert();
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
a.invert();
return CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return intersection of this csg and the specified csgs
*/
public CSG intersect(List<CSG> csgs) {
if (csgs.isEmpty()) {
return this.clone();
}
CSG csgsUnion = csgs.get(0);
for (int i = 1; i < csgs.size(); i++) {
csgsUnion = csgsUnion.union(csgs.get(i));
}
return intersect(csgsUnion);
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return intersection of this csg and the specified csgs
*/
public CSG intersect(CSG... csgs) {
return intersect(Arrays.asList(csgs));
}
/**
* Returns this csg in STL string format.
*
* @return this csg in STL string format
*/
public String toStlString() {
StringBuilder sb = new StringBuilder();
toStlString(sb);
return sb.toString();
}
/**
* Returns this csg in STL string format.
*
* @param sb
* string builder
*
* @return the specified string builder
*/
public StringBuilder toStlString(StringBuilder sb) {
sb.append("solid v3d.csg\n");
this.polygons.stream().forEach(
(Polygon p) -> {
p.toStlString(sb);
});
sb.append("endsolid v3d.csg\n");
return sb;
}
public ObjFile toObj() {
StringBuilder objSb = new StringBuilder();
objSb.append("mtllib " + ObjFile.MTL_NAME);
objSb.append("# Group").append("\n");
objSb.append("g v3d.csg\n");
class PolygonStruct {
PropertyStorage storage;
List<Integer> indices;
String materialName;
public PolygonStruct(PropertyStorage storage, List<Integer> indices, String materialName) {
this.storage = storage;
this.indices = indices;
this.materialName = materialName;
}
}
List<Vertex> vertices = new ArrayList<>();
List<PolygonStruct> indices = new ArrayList<>();
objSb.append("\n# Vertices\n");
Map<PropertyStorage, Integer> materialNames = new HashMap<>();
int materialIndex = 0;
for (Polygon p : polygons) {
List<Integer> polyIndices = new ArrayList<>();
p.vertices.stream().forEach((v) -> {
if (!vertices.contains(v)) {
vertices.add(v);
v.toObjString(objSb);
polyIndices.add(vertices.size());
} else {
polyIndices.add(vertices.indexOf(v) + 1);
}
});
if (!materialNames.containsKey(p.getStorage())) {
materialIndex++;
materialNames.put(p.getStorage(), materialIndex);
p.getStorage().set("material:name", materialIndex);
}
indices.add(new PolygonStruct(
p.getStorage(), polyIndices,
"material-" + materialNames.get(p.getStorage())));
}
objSb.append("\n# Faces").append("\n");
for (PolygonStruct ps : indices) {
// add mtl info
ps.storage.getValue("material:color").ifPresent(
(v) -> objSb.append("usemtl ").append(ps.materialName).append("\n"));
// we triangulate the polygon to ensure
// compatibility with 3d printer software
List<Integer> pVerts = ps.indices;
int index1 = pVerts.get(0);
for (int i = 0; i < pVerts.size() - 2; i++) {
int index2 = pVerts.get(i + 1);
int index3 = pVerts.get(i + 2);
objSb.append("f ").append(index1).append(" ").append(index2).append(" ").append(index3)
.append("\n");
}
}
objSb.append("\n# End Group v3d.csg").append("\n");
StringBuilder mtlSb = new StringBuilder();
materialNames.keySet().forEach(s -> {
if (s.contains("material:color")) {
mtlSb.append("newmtl material-").append(s.getValue("material:name").get()).append("\n");
mtlSb.append("Kd ").append(s.getValue("material:color").get()).append("\n");
}
});
return new ObjFile(objSb.toString(), mtlSb.toString());
}
/**
* Returns this csg in OBJ string format.
*
* @param sb
* string builder
* @return the specified string builder
*/
public StringBuilder toObjString(StringBuilder sb) {
sb.append("# Group").append("\n");
sb.append("g v3d.csg\n");
class PolygonStruct {
PropertyStorage storage;
List<Integer> indices;
String materialName;
public PolygonStruct(PropertyStorage storage, List<Integer> indices, String materialName) {
this.storage = storage;
this.indices = indices;
this.materialName = materialName;
}
}
List<Vertex> vertices = new ArrayList<>();
List<PolygonStruct> indices = new ArrayList<>();
sb.append("\n# Vertices\n");
for (Polygon p : polygons) {
List<Integer> polyIndices = new ArrayList<>();
p.vertices.stream().forEach((v) -> {
if (!vertices.contains(v)) {
vertices.add(v);
v.toObjString(sb);
polyIndices.add(vertices.size());
} else {
polyIndices.add(vertices.indexOf(v) + 1);
}
});
}
sb.append("\n# Faces").append("\n");
for (PolygonStruct ps : indices) {
// we triangulate the polygon to ensure
// compatibility with 3d printer software
List<Integer> pVerts = ps.indices;
int index1 = pVerts.get(0);
for (int i = 0; i < pVerts.size() - 2; i++) {
int index2 = pVerts.get(i + 1);
int index3 = pVerts.get(i + 2);
sb.append("f ").append(index1).append(" ").append(index2).append(" ").append(index3).append(
"\n");
}
}
sb.append("\n# End Group v3d.csg").append("\n");
return sb;
}
/**
* Returns this csg in OBJ string format.
*
* @return this csg in OBJ string format
*/
public String toObjString() {
StringBuilder sb = new StringBuilder();
return toObjString(sb).toString();
}
public CSG weighted(WeightFunction f) {
return new Modifier(f).modified(this);
}
/**
* Returns a transformed copy of this CSG.
*
* @param transform
* the transform to apply
*
* @return a transformed copy of this CSG
*/
public CSG transformed(Transform transform) {
if (polygons.isEmpty()) {
return clone();
}
List<Polygon> newpolygons = this.polygons.stream().map(
p -> p.transformed(transform)).collect(Collectors.toList());
CSG result = CSG.fromPolygons(newpolygons).optimization(getOptType());
result.storage = storage;
return result;
}
/**
* Returns the bounds of this csg.
*
* @return bouds of this csg
*/
public Bounds getBounds() {
if (polygons.isEmpty()) {
return new Bounds(Vector3d.ZERO, Vector3d.ZERO);
}
double minX = Double.POSITIVE_INFINITY;
double minY = Double.POSITIVE_INFINITY;
double minZ = Double.POSITIVE_INFINITY;
double maxX = Double.NEGATIVE_INFINITY;
double maxY = Double.NEGATIVE_INFINITY;
double maxZ = Double.NEGATIVE_INFINITY;
for (Polygon p : getPolygons()) {
for (int i = 0; i < p.vertices.size(); i++) {
Vertex vert = p.vertices.get(i);
if (vert.pos.x < minX) {
minX = vert.pos.x;
}
if (vert.pos.y < minY) {
minY = vert.pos.y;
}
if (vert.pos.z < minZ) {
minZ = vert.pos.z;
}
if (vert.pos.x > maxX) {
maxX = vert.pos.x;
}
if (vert.pos.y > maxY) {
maxY = vert.pos.y;
}
if (vert.pos.z > maxZ) {
maxZ = vert.pos.z;
}
} // end for vertices
} // end for polygon
return new Bounds(
new Vector3d(minX, minY, minZ),
new Vector3d(maxX, maxY, maxZ));
}
/**
* @return the optType
*/
private OptType getOptType() {
return optType != null ? optType : defaultOptType;
}
/**
* @param optType
* the optType to set
*/
public static void setDefaultOptType(OptType optType) {
defaultOptType = optType;
}
/**
* @param optType
* the optType to set
*/
public void setOptType(OptType optType) {
this.optType = optType;
}
public static enum OptType {
CSG_BOUND,
POLYGON_BOUND,
NONE
}
}
|
src/eu/mihosoft/vrl/v3d/CSG.java
|
/**
* CSG.java
*
* Copyright 2014-2014 Michael Hoffer <info@michaelhoffer.de>. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY Michael Hoffer <info@michaelhoffer.de> "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL Michael Hoffer <info@michaelhoffer.de> OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Michael Hoffer
* <info@michaelhoffer.de>.
*/
package eu.mihosoft.vrl.v3d;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import eu.mihosoft.vrl.v3d.ext.quickhull3d.HullUtil;
import javafx.scene.shape.TriangleMesh;
/**
* Constructive Solid Geometry (CSG).
*
* This implementation is a Java port of <a href=
* "https://github.com/evanw/csg.js/">https://github.com/evanw/csg.js/</a> with
* some additional features like polygon extrude, transformations etc. Thanks to
* the author for creating the CSG.js library.<br>
* <br>
*
* <b>Implementation Details</b>
*
* All CSG operations are implemented in terms of two functions,
* {@link Node#clipTo(eu.mihosoft.vrl.v3d.Node)} and {@link Node#invert()},
* which remove parts of a BSP tree inside another BSP tree and swap solid and
* empty space, respectively. To find the union of {@code a} and {@code b}, we
* want to remove everything in {@code a} inside {@code b} and everything in
* {@code b} inside {@code a}, then combine polygons from {@code a} and
* {@code b} into one solid:
*
* <blockquote>
*
* <pre>
* a.clipTo(b);
* b.clipTo(a);
* a.build(b.allPolygons());
* </pre>
*
* </blockquote>
*
* The only tricky part is handling overlapping coplanar polygons in both trees.
* The code above keeps both copies, but we need to keep them in one tree and
* remove them in the other tree. To remove them from {@code b} we can clip the
* inverse of {@code b} against {@code a}. The code for union now looks like
* this:
*
* <blockquote>
*
* <pre>
* a.clipTo(b);
* b.clipTo(a);
* b.invert();
* b.clipTo(a);
* b.invert();
* a.build(b.allPolygons());
* </pre>
*
* </blockquote>
*
* Subtraction and intersection naturally follow from set operations. If union
* is {@code A | B}, differenceion is {@code A - B = ~(~A | B)} and intersection
* is {@code A & B =
* ~(~A | ~B)} where {@code ~} is the complement operator.
*/
public class CSG {
private List<Polygon> polygons;
private static OptType defaultOptType = OptType.NONE;
private OptType optType = null;
private PropertyStorage storage;
private CSG() {
storage = new PropertyStorage();
}
/**
* Constructs a CSG from a list of {@link Polygon} instances.
*
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(List<Polygon> polygons) {
CSG csg = new CSG();
csg.polygons = polygons;
return csg;
}
/**
* Constructs a CSG from the specified {@link Polygon} instances.
*
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(Polygon... polygons) {
return fromPolygons(Arrays.asList(polygons));
}
/**
* Constructs a CSG from a list of {@link Polygon} instances.
*
* @param storage
* shared storage
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(PropertyStorage storage, List<Polygon> polygons) {
CSG csg = new CSG();
csg.polygons = polygons;
csg.storage = storage;
for (Polygon polygon : polygons) {
polygon.setStorage(storage);
}
return csg;
}
/**
* Constructs a CSG from the specified {@link Polygon} instances.
*
* @param storage
* shared storage
* @param polygons
* polygons
* @return a CSG instance
*/
public static CSG fromPolygons(PropertyStorage storage, Polygon... polygons) {
return fromPolygons(storage, Arrays.asList(polygons));
}
@Override
public CSG clone() {
CSG csg = new CSG();
csg.setOptType(this.getOptType());
// sequential code
// csg.polygons = new ArrayList<>();
// polygons.forEach((polygon) -> {
// csg.polygons.add(polygon.clone());
// });
Stream<Polygon> polygonStream;
if (polygons.size() > 200) {
polygonStream = polygons.parallelStream();
} else {
polygonStream = polygons.stream();
}
csg.polygons = polygonStream.map((Polygon p) -> p.clone()).collect(Collectors.toList());
return csg;
}
/**
*
* @return the polygons of this CSG
*/
public List<Polygon> getPolygons() {
return polygons;
}
/**
* Defines the CSg optimization type.
*
* @param type
* optimization type
* @return this CSG
*/
public CSG optimization(OptType type) {
this.setOptType(type);
return this;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csg
* other csg
*
* @return union of this csg and the specified csg
*/
public CSG union(CSG csg) {
switch (getOptType()) {
case CSG_BOUND:
return _unionCSGBoundsOpt(csg);
case POLYGON_BOUND:
return _unionPolygonBoundsOpt(csg);
default:
// return _unionIntersectOpt(csg);
return _unionNoOpt(csg);
}
}
/**
* Returns a csg consisting of the polygons of this csg and the specified csg.
*
* The purpose of this method is to allow fast union operations for objects
* that do not intersect.
*
* <p>
* <b>WARNING:</b> this method does not apply the csg algorithms. Therefore,
* please ensure that this csg and the specified csg do not intersect.
*
* @param csg
* csg
*
* @return a csg consisting of the polygons of this csg and the specified csg
*/
public CSG dumbUnion(CSG csg) {
CSG result = this.clone();
CSG other = csg.clone();
result.polygons.addAll(other.polygons);
return result;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csgs.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csgs
* other csgs
*
* @return union of this csg and the specified csgs
*/
public CSG union(List<CSG> csgs) {
CSG result = this;
for (CSG csg : csgs) {
result = result.union(csg);
}
return result;
}
/**
* Return a new CSG solid representing the union of this csg and the specified
* csgs.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.union(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +----+
* +----+--+ | +----+ |
* | B | | |
* | | | |
* +-------+ +-------+
* </pre>
*
* </blockquote>
*
*
* @param csgs
* other csgs
*
* @return union of this csg and the specified csgs
*/
public CSG union(CSG... csgs) {
return union(Arrays.asList(csgs));
}
/**
* Returns the convex hull of this csg.
*
* @return the convex hull of this csg
*/
public CSG hull() {
return HullUtil.hull(this, storage);
}
/**
* Returns the convex hull of this csg and the union of the specified csgs.
*
* @param csgs
* csgs
* @return the convex hull of this csg and the specified csgs
*/
public CSG hull(List<CSG> csgs) {
CSG csgsUnion = new CSG();
csgsUnion.storage = storage;
csgsUnion.optType = optType;
csgsUnion.polygons = this.clone().polygons;
csgs.stream().forEach((csg) -> {
csgsUnion.polygons.addAll(csg.clone().polygons);
});
csgsUnion.polygons.forEach(p -> p.setStorage(storage));
return csgsUnion.hull();
// CSG csgsUnion = this;
//
// for (CSG csg : csgs) {
// csgsUnion = csgsUnion.union(csg);
// }
//
// return csgsUnion.hull();
}
/**
* Returns the convex hull of this csg and the union of the specified csgs.
*
* @param csgs
* csgs
* @return the convex hull of this csg and the specified csgs
*/
public CSG hull(CSG... csgs) {
return hull(Arrays.asList(csgs));
}
private CSG _unionCSGBoundsOpt(CSG csg) {
System.err.println("WARNING: using " + CSG.OptType.NONE
+ " since other optimization types missing for union operation.");
return _unionIntersectOpt(csg);
}
private CSG _unionPolygonBoundsOpt(CSG csg) {
List<Polygon> inner = new ArrayList<>();
List<Polygon> outer = new ArrayList<>();
Bounds bounds = csg.getBounds();
this.polygons.stream().forEach((p) -> {
if (bounds.intersects(p.getBounds())) {
inner.add(p);
} else {
outer.add(p);
}
});
List<Polygon> allPolygons = new ArrayList<>();
if (!inner.isEmpty()) {
CSG innerCSG = CSG.fromPolygons(inner);
allPolygons.addAll(outer);
allPolygons.addAll(innerCSG._unionNoOpt(csg).polygons);
} else {
allPolygons.addAll(this.polygons);
allPolygons.addAll(csg.polygons);
}
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
/**
* Optimizes for intersection. If csgs do not intersect create a new csg that
* consists of the polygon lists of this csg and the specified csg. In this
* case no further space partitioning is performed.
*
* @param csg
* csg
* @return the union of this csg and the specified csg
*/
private CSG _unionIntersectOpt(CSG csg) {
boolean intersects = false;
Bounds bounds = csg.getBounds();
for (Polygon p : polygons) {
if (bounds.intersects(p.getBounds())) {
intersects = true;
break;
}
}
List<Polygon> allPolygons = new ArrayList<>();
if (intersects) {
return _unionNoOpt(csg);
} else {
allPolygons.addAll(this.polygons);
allPolygons.addAll(csg.polygons);
}
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
private CSG _unionNoOpt(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return difference of this csg and the specified csgs
*/
public CSG difference(List<CSG> csgs) {
if (csgs.isEmpty()) {
return this.clone();
}
CSG csgsUnion = csgs.get(0);
for (int i = 1; i < csgs.size(); i++) {
csgsUnion = csgsUnion.union(csgs.get(i));
}
return difference(csgsUnion);
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return difference of this csg and the specified csgs
*/
public CSG difference(CSG... csgs) {
return difference(Arrays.asList(csgs));
}
/**
* Return a new CSG solid representing the difference of this csg and the
* specified csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.difference(B)
*
* +-------+ +-------+
* | | | |
* | A | | |
* | +--+----+ = | +--+
* +----+--+ | +----+
* | B |
* | |
* +-------+
* </pre>
*
* </blockquote>
*
* @param csg
* other csg
* @return difference of this csg and the specified csg
*/
public CSG difference(CSG csg) {
switch (getOptType()) {
case CSG_BOUND:
return _differenceCSGBoundsOpt(csg);
case POLYGON_BOUND:
return _differencePolygonBoundsOpt(csg);
default:
return _differenceNoOpt(csg);
}
}
private CSG _differenceCSGBoundsOpt(CSG csg) {
CSG b = csg;
CSG a1 = this._differenceNoOpt(csg.getBounds().toCSG());
CSG a2 = this.intersect(csg.getBounds().toCSG());
return a2._differenceNoOpt(b)._unionIntersectOpt(a1).optimization(getOptType());
}
private CSG _differencePolygonBoundsOpt(CSG csg) {
List<Polygon> inner = new ArrayList<>();
List<Polygon> outer = new ArrayList<>();
Bounds bounds = csg.getBounds();
this.polygons.stream().forEach((p) -> {
if (bounds.intersects(p.getBounds())) {
inner.add(p);
} else {
outer.add(p);
}
});
CSG innerCSG = CSG.fromPolygons(inner);
List<Polygon> allPolygons = new ArrayList<>();
allPolygons.addAll(outer);
allPolygons.addAll(innerCSG._differenceNoOpt(csg).polygons);
return CSG.fromPolygons(allPolygons).optimization(getOptType());
}
private CSG _differenceNoOpt(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.invert();
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
a.invert();
CSG csgA = CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
return csgA;
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csg.
*
* <b>Note:</b> Neither this csg nor the specified csg are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csg
* other csg
* @return intersection of this csg and the specified csg
*/
public CSG intersect(CSG csg) {
Node a = new Node(this.clone().polygons);
Node b = new Node(csg.clone().polygons);
a.invert();
b.clipTo(a);
b.invert();
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
a.invert();
return CSG.fromPolygons(a.allPolygons()).optimization(getOptType());
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return intersection of this csg and the specified csgs
*/
public CSG intersect(List<CSG> csgs) {
if (csgs.isEmpty()) {
return this.clone();
}
CSG csgsUnion = csgs.get(0);
for (int i = 1; i < csgs.size(); i++) {
csgsUnion = csgsUnion.union(csgs.get(i));
}
return intersect(csgsUnion);
}
/**
* Return a new CSG solid representing the intersection of this csg and the
* specified csgs.
*
* <b>Note:</b> Neither this csg nor the specified csgs are weighted.
*
* <blockquote>
*
* <pre>
* A.intersect(B)
*
* +-------+
* | |
* | A |
* | +--+----+ = +--+
* +----+--+ | +--+
* | B |
* | |
* +-------+
* }
* </pre>
*
* </blockquote>
*
* @param csgs
* other csgs
* @return intersection of this csg and the specified csgs
*/
public CSG intersect(CSG... csgs) {
return intersect(Arrays.asList(csgs));
}
/**
* Returns this csg in STL string format.
*
* @return this csg in STL string format
*/
public String toStlString() {
StringBuilder sb = new StringBuilder();
toStlString(sb);
return sb.toString();
}
/**
* Returns this csg in STL string format.
*
* @param sb
* string builder
*
* @return the specified string builder
*/
public StringBuilder toStlString(StringBuilder sb) {
sb.append("solid v3d.csg\n");
this.polygons.stream().forEach(
(Polygon p) -> {
p.toStlString(sb);
});
sb.append("endsolid v3d.csg\n");
return sb;
}
public ObjFile toObj() {
StringBuilder objSb = new StringBuilder();
objSb.append("mtllib " + ObjFile.MTL_NAME);
objSb.append("# Group").append("\n");
objSb.append("g v3d.csg\n");
class PolygonStruct {
PropertyStorage storage;
List<Integer> indices;
String materialName;
public PolygonStruct(PropertyStorage storage, List<Integer> indices, String materialName) {
this.storage = storage;
this.indices = indices;
this.materialName = materialName;
}
}
List<Vertex> vertices = new ArrayList<>();
List<PolygonStruct> indices = new ArrayList<>();
objSb.append("\n# Vertices\n");
Map<PropertyStorage, Integer> materialNames = new HashMap<>();
int materialIndex = 0;
for (Polygon p : polygons) {
List<Integer> polyIndices = new ArrayList<>();
p.vertices.stream().forEach((v) -> {
if (!vertices.contains(v)) {
vertices.add(v);
v.toObjString(objSb);
polyIndices.add(vertices.size());
} else {
polyIndices.add(vertices.indexOf(v) + 1);
}
});
if (!materialNames.containsKey(p.getStorage())) {
materialIndex++;
materialNames.put(p.getStorage(), materialIndex);
p.getStorage().set("material:name", materialIndex);
}
indices.add(new PolygonStruct(
p.getStorage(), polyIndices,
"material-" + materialNames.get(p.getStorage())));
}
objSb.append("\n# Faces").append("\n");
for (PolygonStruct ps : indices) {
// add mtl info
ps.storage.getValue("material:color").ifPresent(
(v) -> objSb.append("usemtl ").append(ps.materialName).append("\n"));
// we triangulate the polygon to ensure
// compatibility with 3d printer software
List<Integer> pVerts = ps.indices;
int index1 = pVerts.get(0);
for (int i = 0; i < pVerts.size() - 2; i++) {
int index2 = pVerts.get(i + 1);
int index3 = pVerts.get(i + 2);
objSb.append("f ").append(index1).append(" ").append(index2).append(" ").append(index3)
.append("\n");
}
}
objSb.append("\n# End Group v3d.csg").append("\n");
StringBuilder mtlSb = new StringBuilder();
materialNames.keySet().forEach(s -> {
if (s.contains("material:color")) {
mtlSb.append("newmtl material-").append(s.getValue("material:name").get()).append("\n");
mtlSb.append("Kd ").append(s.getValue("material:color").get()).append("\n");
}
});
return new ObjFile(objSb.toString(), mtlSb.toString());
}
/**
* Returns this csg in OBJ string format.
*
* @param sb
* string builder
* @return the specified string builder
*/
public StringBuilder toObjString(StringBuilder sb) {
sb.append("# Group").append("\n");
sb.append("g v3d.csg\n");
class PolygonStruct {
PropertyStorage storage;
List<Integer> indices;
String materialName;
public PolygonStruct(PropertyStorage storage, List<Integer> indices, String materialName) {
this.storage = storage;
this.indices = indices;
this.materialName = materialName;
}
}
List<Vertex> vertices = new ArrayList<>();
List<PolygonStruct> indices = new ArrayList<>();
sb.append("\n# Vertices\n");
for (Polygon p : polygons) {
List<Integer> polyIndices = new ArrayList<>();
p.vertices.stream().forEach((v) -> {
if (!vertices.contains(v)) {
vertices.add(v);
v.toObjString(sb);
polyIndices.add(vertices.size());
} else {
polyIndices.add(vertices.indexOf(v) + 1);
}
});
}
sb.append("\n# Faces").append("\n");
for (PolygonStruct ps : indices) {
// we triangulate the polygon to ensure
// compatibility with 3d printer software
List<Integer> pVerts = ps.indices;
int index1 = pVerts.get(0);
for (int i = 0; i < pVerts.size() - 2; i++) {
int index2 = pVerts.get(i + 1);
int index3 = pVerts.get(i + 2);
sb.append("f ").append(index1).append(" ").append(index2).append(" ").append(index3).append(
"\n");
}
}
sb.append("\n# End Group v3d.csg").append("\n");
return sb;
}
/**
* Returns this csg in OBJ string format.
*
* @return this csg in OBJ string format
*/
public String toObjString() {
StringBuilder sb = new StringBuilder();
return toObjString(sb).toString();
}
public CSG weighted(WeightFunction f) {
return new Modifier(f).modified(this);
}
/**
* Returns a transformed copy of this CSG.
*
* @param transform
* the transform to apply
*
* @return a transformed copy of this CSG
*/
public CSG transformed(Transform transform) {
if (polygons.isEmpty()) {
return clone();
}
List<Polygon> newpolygons = this.polygons.stream().map(
p -> p.transformed(transform)).collect(Collectors.toList());
CSG result = CSG.fromPolygons(newpolygons).optimization(getOptType());
result.storage = storage;
return result;
}
// TODO finish experiment (20.7.2014)
public MeshContainer toJavaFXMesh() {
return toJavaFXMeshSimple();
// TODO test obj approach with multiple materials
// try {
// ObjImporter importer = new ObjImporter(toObj());
//
// List<Mesh> meshes = new ArrayList<>(importer.getMeshCollection());
// return new MeshContainer(getBounds().getMin(), getBounds().getMax(),
// meshes, new ArrayList<>(importer.getMaterialCollection()));
// } catch (IOException ex) {
// Logger.getLogger(CSG.class.getName()).log(Level.SEVERE, null, ex);
// }
// // we have no backup strategy for broken streams :(
// return null;
}
/**
* Returns the CSG as JavaFX triangle mesh.
*
* @return the CSG as JavaFX triangle mesh
*/
public MeshContainer toJavaFXMeshSimple() {
TriangleMesh mesh = new TriangleMesh();
double minX = Double.POSITIVE_INFINITY;
double minY = Double.POSITIVE_INFINITY;
double minZ = Double.POSITIVE_INFINITY;
double maxX = Double.NEGATIVE_INFINITY;
double maxY = Double.NEGATIVE_INFINITY;
double maxZ = Double.NEGATIVE_INFINITY;
int counter = 0;
for (Polygon p : getPolygons()) {
if (p.vertices.size() >= 3) {
// TODO: improve the triangulation?
//
// JavaOne requires triangular polygons.
// If our polygon has more vertices, create
// multiple triangles:
Vertex firstVertex = p.vertices.get(0);
for (int i = 0; i < p.vertices.size() - 2; i++) {
if (firstVertex.pos.x < minX) {
minX = firstVertex.pos.x;
}
if (firstVertex.pos.y < minY) {
minY = firstVertex.pos.y;
}
if (firstVertex.pos.z < minZ) {
minZ = firstVertex.pos.z;
}
if (firstVertex.pos.x > maxX) {
maxX = firstVertex.pos.x;
}
if (firstVertex.pos.y > maxY) {
maxY = firstVertex.pos.y;
}
if (firstVertex.pos.z > maxZ) {
maxZ = firstVertex.pos.z;
}
mesh.getPoints().addAll(
(float) firstVertex.pos.x,
(float) firstVertex.pos.y,
(float) firstVertex.pos.z);
mesh.getTexCoords().addAll(0); // texture (not covered)
mesh.getTexCoords().addAll(0);
Vertex secondVertex = p.vertices.get(i + 1);
if (secondVertex.pos.x < minX) {
minX = secondVertex.pos.x;
}
if (secondVertex.pos.y < minY) {
minY = secondVertex.pos.y;
}
if (secondVertex.pos.z < minZ) {
minZ = secondVertex.pos.z;
}
if (secondVertex.pos.x > maxX) {
maxX = firstVertex.pos.x;
}
if (secondVertex.pos.y > maxY) {
maxY = firstVertex.pos.y;
}
if (secondVertex.pos.z > maxZ) {
maxZ = firstVertex.pos.z;
}
mesh.getPoints().addAll(
(float) secondVertex.pos.x,
(float) secondVertex.pos.y,
(float) secondVertex.pos.z);
mesh.getTexCoords().addAll(0); // texture (not covered)
mesh.getTexCoords().addAll(0);
Vertex thirdVertex = p.vertices.get(i + 2);
mesh.getPoints().addAll(
(float) thirdVertex.pos.x,
(float) thirdVertex.pos.y,
(float) thirdVertex.pos.z);
if (thirdVertex.pos.x < minX) {
minX = thirdVertex.pos.x;
}
if (thirdVertex.pos.y < minY) {
minY = thirdVertex.pos.y;
}
if (thirdVertex.pos.z < minZ) {
minZ = thirdVertex.pos.z;
}
if (thirdVertex.pos.x > maxX) {
maxX = firstVertex.pos.x;
}
if (thirdVertex.pos.y > maxY) {
maxY = firstVertex.pos.y;
}
if (thirdVertex.pos.z > maxZ) {
maxZ = firstVertex.pos.z;
}
mesh.getTexCoords().addAll(0); // texture (not covered)
mesh.getTexCoords().addAll(0);
mesh.getFaces().addAll(
counter, // first vertex
0, // texture (not covered)
counter + 1, // second vertex
0, // texture (not covered)
counter + 2, // third vertex
0 // texture (not covered)
);
counter += 3;
} // end for
} // end if #verts >= 3
} // end for polygon
return new MeshContainer(new Vector3d(minX, minY, minZ), new Vector3d(maxX, maxY, maxZ), mesh);
}
/**
* Returns the bounds of this csg.
*
* @return bouds of this csg
*/
public Bounds getBounds() {
if (polygons.isEmpty()) {
return new Bounds(Vector3d.ZERO, Vector3d.ZERO);
}
double minX = Double.POSITIVE_INFINITY;
double minY = Double.POSITIVE_INFINITY;
double minZ = Double.POSITIVE_INFINITY;
double maxX = Double.NEGATIVE_INFINITY;
double maxY = Double.NEGATIVE_INFINITY;
double maxZ = Double.NEGATIVE_INFINITY;
for (Polygon p : getPolygons()) {
for (int i = 0; i < p.vertices.size(); i++) {
Vertex vert = p.vertices.get(i);
if (vert.pos.x < minX) {
minX = vert.pos.x;
}
if (vert.pos.y < minY) {
minY = vert.pos.y;
}
if (vert.pos.z < minZ) {
minZ = vert.pos.z;
}
if (vert.pos.x > maxX) {
maxX = vert.pos.x;
}
if (vert.pos.y > maxY) {
maxY = vert.pos.y;
}
if (vert.pos.z > maxZ) {
maxZ = vert.pos.z;
}
} // end for vertices
} // end for polygon
return new Bounds(
new Vector3d(minX, minY, minZ),
new Vector3d(maxX, maxY, maxZ));
}
/**
* @return the optType
*/
private OptType getOptType() {
return optType != null ? optType : defaultOptType;
}
/**
* @param optType
* the optType to set
*/
public static void setDefaultOptType(OptType optType) {
defaultOptType = optType;
}
/**
* @param optType
* the optType to set
*/
public void setOptType(OptType optType) {
this.optType = optType;
}
public static enum OptType {
CSG_BOUND,
POLYGON_BOUND,
NONE
}
}
|
removed CSG.toJavaFXMeshSimple() toJavaFXMesh()
|
src/eu/mihosoft/vrl/v3d/CSG.java
|
removed CSG.toJavaFXMeshSimple() toJavaFXMesh()
|
|
Java
|
apache-2.0
|
ce495f7bca7b773482c82b47ee7f5c75a78fe0ff
| 0
|
Kerbores/NUTZ-ONEKEY,Kerbores/NUTZ-ONEKEY,Kerbores/NUTZ-ONEKEY
|
package club.zhcs.thunder.task;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.nutz.aop.interceptor.async.Async;
import org.nutz.dao.Cnd;
import org.nutz.dao.Dao;
import org.nutz.integration.quartz.annotation.Scheduled;
import org.nutz.ioc.impl.PropertiesProxy;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.lang.ContinueLoop;
import org.nutz.lang.Each;
import org.nutz.lang.ExitLoop;
import org.nutz.lang.Lang;
import org.nutz.lang.LoopException;
import org.nutz.lang.Strings;
import org.nutz.log.Log;
import org.nutz.log.Logs;
import org.nutz.weixin.bean.WxTemplateData;
import org.nutz.weixin.spi.WxApi2;
import org.nutz.weixin.spi.WxResp;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import club.zhcs.thunder.bean.acl.User;
import club.zhcs.thunder.bean.apm.APMAlarm;
import club.zhcs.thunder.bean.apm.APMAlarm.Type;
import club.zhcs.thunder.biz.acl.UserService;
import club.zhcs.thunder.biz.eamil.EmailService;
import club.zhcs.titans.gather.CPUGather;
import club.zhcs.titans.gather.DISKGather;
import club.zhcs.titans.gather.MemoryGather;
import club.zhcs.titans.gather.NetInterfaceGather;
import club.zhcs.titans.utils.common.Ips;
import club.zhcs.titans.utils.common.Numbers;
import com.google.common.collect.Lists;
/**
* @author Kerbores(kerbores@gmail.com)
*
* @project app
*
* @file APMTask.java
*
* @description 告警检测业务
*
* @time 2016年3月15日 上午11:54:46
*
*/
@IocBean(name = "apmTask", fields = "dao", create = "init")
@Scheduled(cron = "*/10 * * * * ? ")
public class APMTask implements Job {
private static Log LOG = Logs.getLog(APMTask.class);
private Dao dao;
@Inject
PropertiesProxy config;
@Inject("wxApi")
WxApi2 api;
@Inject
UserService userService;
@Inject
EmailService emailService;
List<User> listeners = Lists.newArrayList();
public void init() {
String listener = config.get("alarm.listener");
Lang.each(listener.split(","), new Each<String>() {
@Override
public void invoke(int index, String lis, int length) throws ExitLoop, ContinueLoop, LoopException {
listeners.add(userService.fetch(Cnd.where("name", "=", lis)));
}
});
}
/**
*
*/
public APMTask() {
}
public APMTask(Dao dao) {
this.dao = dao;
}
public Dao getDao() {
return dao;
}
public void setDao(Dao dao) {
this.dao = dao;
}
public String hostIp = Ips.hostIp();
/*
* (non-Javadoc)
*
* @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
*/
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
try {
Sigar sigar = new Sigar();
MemoryGather memory = MemoryGather.gather(sigar);
// 内存
double jvmUsage, ramUsage, swapUsage;
if ((jvmUsage = memory.getJvm().getUsedPercent()) > config.getInt("jvm.alarm.percent")) {
alarm(Type.MEM, "内存警告", "JVM", jvmUsage, config.getInt("jvm.alarm.percent"));
}
if ((ramUsage = memory.getMem().getUsedPercent()) > config.getInt("ram.alarm.percent")) {
alarm(Type.MEM, "内存警告", "RAM", ramUsage, config.getInt("ram.alarm.percent"));
}
if ((swapUsage = memory.getSwap().getUsed() * 100 / memory.getSwap().getTotal()) > config.getInt("swap.alarm.percent")) {
alarm(Type.MEM, "内存警告", "SWAP", swapUsage, config.getInt("swap.alarm.percent"));
}
CPUGather cpu = CPUGather.gather(sigar);
// CPU
double cpuUsage;
if ((cpuUsage = 100 - (cpu.getTimer().getIdle() * 100 / cpu.getTimer().getTotal())) > config.getInt("cpu.alarm.percent")) {
alarm(Type.MEM, "CPU警告", "CPU", cpuUsage, config.getInt("cpu.alarm.percent"));
}
// 磁盘
List<DISKGather> disks = DISKGather.gather(sigar);
for (DISKGather disk : disks) {
if (disk.getStat() != null && disk.getStat().getUsePercent() * 100 > config.getInt("disk.alarm.percent")) {
alarm(Type.DISK, "磁盘警告", disk.getConfig().getDevName(), disk.getStat().getUsePercent(), config.getInt("disk.alarm.percent"));
}
}
// 网络流量
double niUsage, noUsage;
NetInterfaceGather ni = NetInterfaceGather.gather(sigar);
if ((niUsage = ni.getRxbps() * 100 / ni.getStat().getSpeed()) > config.getInt("network.alarm.percent")) {
alarm(Type.NETWORK, "流量警告", "网卡", niUsage, config.getInt("network.alarm.percent"));
}
if ((noUsage = ni.getTxbps() * 100 / ni.getStat().getSpeed()) > config.getInt("network.alarm.percent")) {
alarm(Type.NETWORK, "流量警告", "网卡", noUsage, config.getInt("network.alarm.percent"));
}
} catch (SigarException e) {
LOG.error(e);
}
}
/**
*
* @param type
* @param title
* @param device
* @param usage
* @param alarmPoint
*/
@Async
private void alarm(Type type, String title, String device, double usage, int alarmPoint) {
final APMAlarm alarm = new APMAlarm();
alarm.setType(type);
alarm.setIp(hostIp);
alarm.setMsg(String.format("%s:当前 %s 使用率 %f,高于预警值 %d", title, device, usage, alarmPoint));
alarm.setTitle(title);
alarm.setDevice(device);
alarm.setUsage(usage);
alarm.setAlarm(alarmPoint);
String alarmTypes = config.get(device.toLowerCase() + ".alarm.types");
Lang.each(alarmTypes.split(","), new Each<String>() {
@Override
public void invoke(int index, String type, int length) throws ExitLoop, ContinueLoop, LoopException {
if (Strings.equals(type, "EMAIL")) {// 发送邮件
sendALarmByEmail(alarm);
}
if (Strings.equals(type, "SMS")) {// 发送短信
}
if (Strings.equals(type, "WECHAT")) {// 发送微信消息
sendAlarmByWechat(alarm);
}
}
});
if (dao == null) {
LOG.debug(alarm);
} else {
dao.insert(alarm);
}
}
@Async
private void sendALarmByEmail(final APMAlarm alarm) {
Lang.each(listeners, new Each<User>() {
@Override
public void invoke(int index, User user, int length) throws ExitLoop, ContinueLoop, LoopException {
if (user == null) {
return;
}
emailService.sendAlarm(alarm, user.getEmail());
}
});
}
/**
* @param listener
* @param alarm
*/
@Async
protected void sendAlarmByWechat(APMAlarm alarm) {
final Map<String, WxTemplateData> data = new HashMap<String, WxTemplateData>();
data.put("type", new WxTemplateData(alarm.getTitle()));
data.put("ip", new WxTemplateData(alarm.getIp()));
data.put("key", new WxTemplateData(alarm.getDevice()));
data.put("usage", new WxTemplateData(Numbers.keepPrecision(alarm.getUsage() + "", 2)));
data.put("alarm", new WxTemplateData(alarm.getAlarm() + ""));
Lang.each(listeners, new Each<User>() {
@Override
public void invoke(int index, User user, int length) throws ExitLoop, ContinueLoop, LoopException {
if (user == null) {
return;
}
WxResp resp = api.template_send(user.getOpenid(), "MnNkTihmclGa4OAFelkMwAwxUiKu41hsn2l9fHxLRdA", null, data);
LOG.debug(resp);
}
});
}
}
|
thunder/thunder-web/src/main/java/club/zhcs/thunder/task/APMTask.java
|
package club.zhcs.thunder.task;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.nutz.aop.interceptor.async.Async;
import org.nutz.dao.Cnd;
import org.nutz.dao.Dao;
import org.nutz.integration.quartz.annotation.Scheduled;
import org.nutz.ioc.impl.PropertiesProxy;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.lang.ContinueLoop;
import org.nutz.lang.Each;
import org.nutz.lang.ExitLoop;
import org.nutz.lang.Lang;
import org.nutz.lang.LoopException;
import org.nutz.lang.Strings;
import org.nutz.log.Log;
import org.nutz.log.Logs;
import org.nutz.weixin.bean.WxTemplateData;
import org.nutz.weixin.spi.WxApi2;
import org.nutz.weixin.spi.WxResp;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import club.zhcs.thunder.bean.acl.User;
import club.zhcs.thunder.bean.apm.APMAlarm;
import club.zhcs.thunder.bean.apm.APMAlarm.Type;
import club.zhcs.thunder.biz.acl.UserService;
import club.zhcs.thunder.biz.eamil.EmailService;
import club.zhcs.titans.gather.CPUGather;
import club.zhcs.titans.gather.DISKGather;
import club.zhcs.titans.gather.MemoryGather;
import club.zhcs.titans.gather.NetInterfaceGather;
import club.zhcs.titans.utils.common.Ips;
import club.zhcs.titans.utils.common.Numbers;
import com.google.common.collect.Lists;
/**
* @author Kerbores(kerbores@gmail.com)
*
* @project app
*
* @file APMTask.java
*
* @description 告警检测业务
*
* @time 2016年3月15日 上午11:54:46
*
*/
@IocBean(name = "apmTask", fields = "dao", create = "init")
@Scheduled(cron = "*/10 * * * * ? ")
public class APMTask implements Job {
private static Log LOG = Logs.getLog(APMTask.class);
private Dao dao;
@Inject
PropertiesProxy config;
@Inject("wxApi")
WxApi2 api;
@Inject
UserService userService;
@Inject
EmailService emailService;
List<User> listeners = Lists.newArrayList();
public void init() {
String listener = config.get("alarm.listener");
Lang.each(listener.split(","), new Each<String>() {
@Override
public void invoke(int index, String lis, int length) throws ExitLoop, ContinueLoop, LoopException {
listeners.add(userService.fetch(Cnd.where("name", "=", lis)));
}
});
}
/**
*
*/
public APMTask() {
}
public APMTask(Dao dao) {
this.dao = dao;
}
public Dao getDao() {
return dao;
}
public void setDao(Dao dao) {
this.dao = dao;
}
public String hostIp = Ips.hostIp();
/*
* (non-Javadoc)
*
* @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
*/
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
try {
Sigar sigar = new Sigar();
MemoryGather memory = MemoryGather.gather(sigar);
// 内存
double jvmUsage, ramUsage, swapUsage;
if ((jvmUsage = memory.getJvm().getUsedPercent()) > config.getInt("jvm.alarm.percent")) {
alarm(Type.MEM, "内存警告", "JVM", jvmUsage, config.getInt("jvm.alarm.percent"));
}
if ((ramUsage = memory.getMem().getUsedPercent()) > config.getInt("ram.alarm.percent")) {
alarm(Type.MEM, "内存警告", "RAM", ramUsage, config.getInt("ram.alarm.percent"));
}
if ((swapUsage = memory.getSwap().getUsed() * 100 / memory.getSwap().getTotal()) > config.getInt("swap.alarm.percent")) {
alarm(Type.MEM, "内存警告", "SWAP", swapUsage, config.getInt("swap.alarm.percent"));
}
CPUGather cpu = CPUGather.gather(sigar);
// CPU
double cpuUsage;
if ((cpuUsage = 100 - (cpu.getTimer().getIdle() * 100 / cpu.getTimer().getTotal())) > config.getInt("cpu.alarm.percent")) {
alarm(Type.MEM, "CPU警告", "CPU", cpuUsage, config.getInt("cpu.alarm.percent"));
}
// 磁盘
List<DISKGather> disks = DISKGather.gather(sigar);
for (DISKGather disk : disks) {
if (disk.getStat().getUsePercent() * 100 > config.getInt("disk.alarm.percent")) {
alarm(Type.DISK, "磁盘警告", disk.getConfig().getDevName(), disk.getStat().getUsePercent(), config.getInt("disk.alarm.percent"));
}
}
// 网络流量
double niUsage, noUsage;
NetInterfaceGather ni = NetInterfaceGather.gather(sigar);
if ((niUsage = ni.getRxbps() * 100 / ni.getStat().getSpeed()) > config.getInt("network.alarm.percent")) {
alarm(Type.NETWORK, "流量警告", "网卡", niUsage, config.getInt("network.alarm.percent"));
}
if ((noUsage = ni.getTxbps() * 100 / ni.getStat().getSpeed()) > config.getInt("network.alarm.percent")) {
alarm(Type.NETWORK, "流量警告", "网卡", noUsage, config.getInt("network.alarm.percent"));
}
} catch (SigarException e) {
LOG.error(e);
}
}
/**
*
* @param type
* @param title
* @param device
* @param usage
* @param alarmPoint
*/
@Async
private void alarm(Type type, String title, String device, double usage, int alarmPoint) {
final APMAlarm alarm = new APMAlarm();
alarm.setType(type);
alarm.setIp(hostIp);
alarm.setMsg(String.format("%s:当前 %s 使用率 %f,高于预警值 %d", title, device, usage, alarmPoint));
alarm.setTitle(title);
alarm.setDevice(device);
alarm.setUsage(usage);
alarm.setAlarm(alarmPoint);
String alarmTypes = config.get(device.toLowerCase() + ".alarm.types");
Lang.each(alarmTypes.split(","), new Each<String>() {
@Override
public void invoke(int index, String type, int length) throws ExitLoop, ContinueLoop, LoopException {
if (Strings.equals(type, "EMAIL")) {// 发送邮件
sendALarmByEmail(alarm);
}
if (Strings.equals(type, "SMS")) {// 发送短信
}
if (Strings.equals(type, "WECHAT")) {// 发送微信消息
sendAlarmByWechat(alarm);
}
}
});
if (dao == null) {
LOG.debug(alarm);
} else {
dao.insert(alarm);
}
}
@Async
private void sendALarmByEmail(final APMAlarm alarm) {
Lang.each(listeners, new Each<User>() {
@Override
public void invoke(int index, User user, int length) throws ExitLoop, ContinueLoop, LoopException {
if (user == null) {
return;
}
emailService.sendAlarm(alarm, user.getEmail());
}
});
}
/**
* @param listener
* @param alarm
*/
@Async
protected void sendAlarmByWechat(APMAlarm alarm) {
final Map<String, WxTemplateData> data = new HashMap<String, WxTemplateData>();
data.put("type", new WxTemplateData(alarm.getTitle()));
data.put("ip", new WxTemplateData(alarm.getIp()));
data.put("key", new WxTemplateData(alarm.getDevice()));
data.put("usage", new WxTemplateData(Numbers.keepPrecision(alarm.getUsage() + "", 2)));
data.put("alarm", new WxTemplateData(alarm.getAlarm() + ""));
Lang.each(listeners, new Each<User>() {
@Override
public void invoke(int index, User user, int length) throws ExitLoop, ContinueLoop, LoopException {
if (user == null) {
return;
}
WxResp resp = api.template_send(user.getOpenid(), "MnNkTihmclGa4OAFelkMwAwxUiKu41hsn2l9fHxLRdA", null, data);
LOG.debug(resp);
}
});
}
}
|
非本地盘的问题
|
thunder/thunder-web/src/main/java/club/zhcs/thunder/task/APMTask.java
|
非本地盘的问题
|
|
Java
|
apache-2.0
|
727c1dc77e143d0bcf6ca6dda512e36767a74753
| 0
|
OlafLee/java-algorithms-implementation,boyfox/java-algorithms-implementation,psadusumilli/java-algorithms-implementation,ahadoo/java_algorithms,crashb29/java-algorithms-implementation,ynagarjuna1995/java-algorithms-implementation,xianSkyKing/java-algorithms-implementation,JeffreyWei/java-algorithms-implementation,samstealth/java-algorithms-implementation,qqqil/java-algorithms-implementation,paulodacosta/java-algorithms-implementation,designreuse/java-algorithms-implementation,Maktub714/java-algorithms-implementation,gcavallanti/java-algorithms-implementation,tarek-salah/tareksalah91-algorithms,mathewdenis/java-algorithms-implementation,cesarmarinhorj/java-algorithms-implementation,Hirama/java-algorithms-implementation,jibaro/java-algorithms-implementation,jbferraz/java-algorithms-implementation,kevindra/java-algorithms-implementation,Ved2302/java-algorithms-implementation,Ab1d/java-algorithms-implementation,vishgupta92/java-algorithms-implementation,meexplorer11/java-algorithms-implementation,wbwmartin/java-algorithms-implementation,Sudarsan-Sridharan/java-algorithms-implementation,phishman3579/java-algorithms-implementation,MENG2010/java-algorithms-implementation,lgeddam/java-algorithms-implementation,helloannali/java-algorithms-implementation,zhongyinzhang/java-algorithms-implementation,pavanmehta91/java-algorithms-implementation,gargas/java-algorithms-implementation,piyushdubey/java-algorithms-implementation,thil1212/java-algorithms-implementation,eGit/java-algorithms-implementation,murari-goswami/java-algorithms-implementation,charanthota/java-algorithms-implementation,dhilipb/java-algorithms-implementation,herowanzhu/java-algorithms-implementation,mciancia/java-algorithms-implementation,barthezzko/java-algorithms-implementation
|
package com.jwetherell.algorithms;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import com.jwetherell.algorithms.data_structures.AVLTree;
import com.jwetherell.algorithms.data_structures.BinarySearchTree;
import com.jwetherell.algorithms.data_structures.BinaryHeap;
import com.jwetherell.algorithms.data_structures.Graph.Edge;
import com.jwetherell.algorithms.data_structures.Graph.Vertex;
import com.jwetherell.algorithms.data_structures.Graph;
import com.jwetherell.algorithms.data_structures.HashMap;
import com.jwetherell.algorithms.data_structures.PatriciaTrie;
import com.jwetherell.algorithms.data_structures.RadixTree;
import com.jwetherell.algorithms.data_structures.SuffixTree;
import com.jwetherell.algorithms.data_structures.TrieMap;
import com.jwetherell.algorithms.data_structures.LinkedList;
import com.jwetherell.algorithms.data_structures.Matrix;
import com.jwetherell.algorithms.data_structures.Queue;
import com.jwetherell.algorithms.data_structures.SegmentTree;
import com.jwetherell.algorithms.data_structures.SkipList;
import com.jwetherell.algorithms.data_structures.SplayTree;
import com.jwetherell.algorithms.data_structures.Stack;
import com.jwetherell.algorithms.data_structures.SuffixTrie;
import com.jwetherell.algorithms.data_structures.Treap;
import com.jwetherell.algorithms.data_structures.Trie;
import com.jwetherell.algorithms.graph.BellmanFord;
import com.jwetherell.algorithms.graph.CycleDetection;
import com.jwetherell.algorithms.graph.Dijkstra;
import com.jwetherell.algorithms.graph.FloydWarshall;
import com.jwetherell.algorithms.graph.Johnson;
import com.jwetherell.algorithms.graph.Prim;
import com.jwetherell.algorithms.graph.TopologicalSort;
public class DataStructures {
private static final int NUMBER_OF_TESTS = 100;
private static final Random RANDOM = new Random();
private static final int ARRAY_SIZE = 100000;
private static Integer[] unsorted = null;
private static Integer[] reversed = null;
private static Integer[] sorted = null;
private static String string = null;
private static int debug = 1; //debug level. 0=None, 1=Time and Memory (if enabled), 2=Time, Memory, data structure debug
private static boolean debugTime = true; //How much time to: add all, remove all, add all items in reverse order, remove all
private static boolean debugMemory = true; //How much memory is used by the data structure
private static boolean validateStructure = false; //Is the data structure valid (passed invariants) and proper size
private static boolean validateContents = false; //Was the item added/removed really added/removed from the structure
private static final int TESTS = 17; //Number of dynamic data structures to test
private static int test = 0;
private static String[] testNames = new String[TESTS];
private static long[][] testResults = new long[TESTS][];
public static void main(String[] args) {
System.out.println("Starting tests.");
boolean passed = true;
for (int i=0; i<NUMBER_OF_TESTS; i++) {
passed = runTests();
if (!passed) break;
}
if (passed) System.out.println("Tests finished. All passed.");
else System.err.println("Tests finished. Detected a failure.");
}
private static boolean runTests() {
test = 0;
System.out.println("Generating data.");
StringBuilder builder = new StringBuilder();
builder.append("Array=");
unsorted = new Integer[ARRAY_SIZE];
for (int i=0; i<unsorted.length; i++) {
Integer j = RANDOM.nextInt(unsorted.length*10);
//Make sure there are no duplicates
boolean found = true;
while (found) {
found = false;
for (int k=0; k<i; k++) {
int l = unsorted[k];
if (j==l) {
found = true;
j = RANDOM.nextInt(unsorted.length*10);
break;
}
}
}
unsorted[i] = j;
builder.append(j).append(',');
}
builder.append('\n');
string = builder.toString();
if (debug>1) System.out.println(string);
reversed = new Integer[ARRAY_SIZE];
for (int i=unsorted.length-1, j=0; i>=0; i--, j++) {
reversed[j] = unsorted[i];
}
sorted = Arrays.copyOf(unsorted, unsorted.length);
Arrays.sort(sorted);
System.out.println("Generated data.");
boolean passed = true;
passed = testAVLTree();
if (!passed) {
System.err.println("AVL Tree failed.");
return false;
}
passed = testBST();
if (!passed) {
System.err.println("BST failed.");
return false;
}
passed = testHeap();
if (!passed) {
System.err.println("Heap failed.");
return false;
}
passed = testHashMap();
if (!passed) {
System.err.println("Hash Map failed.");
return false;
}
passed = testLinkedList();
if (!passed) {
System.err.println("Linked List failed.");
return false;
}
passed = testPatriciaTrie();
if (!passed) {
System.err.println("Patricia Trie failed.");
return false;
}
passed = testQueue();
if (!passed) {
System.err.println("Queue failed.");
return false;
}
passed = testRadixTree();
if (!passed) {
System.err.println("Radix Tree failed.");
return false;
}
//passed = testSkipList();
if (!passed) {
System.err.println("Skip List failed.");
return false;
}
passed = testSplayTree();
if (!passed) {
System.err.println("Splay Tree failed.");
return false;
}
passed = testStack();
if (!passed) {
System.err.println("Stack failed.");
return false;
}
passed = testTreap();
if (!passed) {
System.err.println("Treap failed.");
return false;
}
passed = testTrie();
if (!passed) {
System.err.println("Trie failed.");
return false;
}
passed = testTrieMap();
if (!passed) {
System.err.println("Trie Map failed.");
return false;
}
if (debugTime && debugMemory) {
String results = getTestResults(testNames,testResults);
System.out.println(results);
}
//STATIC DATA STRUCTURES
passed = testGraph();
if (!passed) {
System.err.println("Graph failed.");
return false;
}
passed = testMatrix();
if (!passed) {
System.err.println("Matrix failed.");
return false;
}
passed = testSegmentTree();
if (!passed) {
System.err.println("Segment Tree failed.");
return false;
}
passed = testSuffixTree();
if (!passed) {
System.err.println("Suffix Tree failed.");
return false;
}
passed = testSuffixTrie();
if (!passed) {
System.err.println("Suffix Trie failed.");
return false;
}
return true;
}
private static void handleError(Object obj) {
System.err.println(string);
System.err.println(obj.toString());
}
private static boolean testAVLTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//AVL Tree
if (debug>1) System.out.println("AVL Tree");
testNames[test] = "AVL Tree";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
AVLTree<Integer> tree = new AVLTree<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("AVL Tree add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testBST() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (first)
if (debug>1) System.out.println("Binary search tree with first node.");
testNames[test] = "BST (first)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.FIRST);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("BST (first) add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (first) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (middle)
if (debug>1) System.out.println("Binary search tree with middle node.");
testNames[test] = "BST (middle)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.MIDDLE);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("BST (middle) add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (middle) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (random)
if (debug>1) System.out.println("Binary search tree with random node.");
testNames[test] = "BST (random)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.RANDOM);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (random) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (random) remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("BST (random) add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (random) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testGraph() {
{
// UNDIRECTED GRAPH
if (debug>1) System.out.println("Undirected Graph.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e1_3 = new Graph.Edge<Integer>(9, v1, v3);
edges.add(e1_3);
Graph.Edge<Integer> e1_6 = new Graph.Edge<Integer>(14, v1, v6);
edges.add(e1_6);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(10, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e5_6 = new Graph.Edge<Integer>(9, v5, v6);
edges.add(e5_6);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph<Integer> undirected = new Graph<Integer>(verticies,edges);
if (debug>1) System.out.println(undirected.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Dijstra's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map1 = Dijkstra.getShortestPaths(undirected, start);
if (debug>1) System.out.println(getPathMapString(start,map1));
Graph.Vertex<Integer> end = v5;
if (debug>1) System.out.println("Dijstra's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair1 = Dijkstra.getShortestPath(undirected, start, end);
if (debug>1) {
if (pair1!=null) System.out.println(pair1.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(undirected, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
end = v5;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(undirected, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println("Prim's minimum spanning tree of the undirected graph from "+start.getValue());
Graph.CostPathPair<Integer> pair = Prim.getMinimumSpanningTree(undirected, start);
if (debug>1) System.out.println(pair.toString());
if (debug>1) System.out.println();
}
{
// DIRECTED GRAPH
if (debug>1) System.out.println("Directed Graph.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
Graph.Vertex<Integer> v7 = new Graph.Vertex<Integer>(7);
verticies.add(v7);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e1_3 = new Graph.Edge<Integer>(9, v1, v3);
edges.add(e1_3);
Graph.Edge<Integer> e1_6 = new Graph.Edge<Integer>(14, v1, v6);
edges.add(e1_6);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(10, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e6_5 = new Graph.Edge<Integer>(9, v6, v5);
edges.add(e6_5);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph.Edge<Integer> e4_7 = new Graph.Edge<Integer>(16, v4, v7);
edges.add(e4_7);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Dijstra's shortest paths of the directed graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map = Dijkstra.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map));
Graph.Vertex<Integer> end = v5;
if (debug>1) System.out.println("Dijstra's shortest path of the directed graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair = Dijkstra.getShortestPath(directed, start, end);
if (debug>1) {
if (pair!=null) System.out.println(pair.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
end = v5;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(directed, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println();
}
{
// DIRECTED GRAPH (WITH NEGATIVE WEIGHTS)
if (debug>1) System.out.println("Undirected Graph with Negative Weights.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_4 = new Graph.Edge<Integer>(2, v1, v4);
edges.add(e1_4);
Graph.Edge<Integer> e2_1 = new Graph.Edge<Integer>(6, v2, v1);
edges.add(e2_1);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(3, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e3_1 = new Graph.Edge<Integer>(4, v3, v1);
edges.add(e3_1);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(5, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e4_2 = new Graph.Edge<Integer>(-7, v4, v2);
edges.add(e4_2);
Graph.Edge<Integer> e4_3 = new Graph.Edge<Integer>(-3, v4, v3);
edges.add(e4_3);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the directed graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
Graph.Vertex<Integer> end = v3;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the directed graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(directed, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println("Johnson's all-pairs shortest path of the directed graph.");
Map<Vertex<Integer>, Map<Vertex<Integer>, Set<Edge<Integer>>>> paths = Johnson.getAllPairsShortestPaths(directed);
if (debug>1) {
if (paths==null) System.out.println("Directed graph contains a negative weight cycle.");
else System.out.println(getPathMapString(paths));
}
if (debug>1) System.out.println("Floyd-Warshall's all-pairs shortest path weights of the directed graph.");
Map<Vertex<Integer>, Map<Vertex<Integer>, Integer>> pathWeights = FloydWarshall.getAllPairsShortestPaths(directed);
if (debug>1) System.out.println(getWeightMapString(pathWeights));
if (debug>1) System.out.println();
}
{
// UNDIRECTED GRAPH
if (debug>1) System.out.println("Undirected Graph cycle check.");
List<Vertex<Integer>> cycledVerticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> cv1 = new Graph.Vertex<Integer>(1);
cycledVerticies.add(cv1);
Graph.Vertex<Integer> cv2 = new Graph.Vertex<Integer>(2);
cycledVerticies.add(cv2);
Graph.Vertex<Integer> cv3 = new Graph.Vertex<Integer>(3);
cycledVerticies.add(cv3);
Graph.Vertex<Integer> cv4 = new Graph.Vertex<Integer>(4);
cycledVerticies.add(cv4);
Graph.Vertex<Integer> cv5 = new Graph.Vertex<Integer>(5);
cycledVerticies.add(cv5);
Graph.Vertex<Integer> cv6 = new Graph.Vertex<Integer>(6);
cycledVerticies.add(cv6);
List<Edge<Integer>> cycledEdges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> ce1_2 = new Graph.Edge<Integer>(7, cv1, cv2);
cycledEdges.add(ce1_2);
Graph.Edge<Integer> ce2_4 = new Graph.Edge<Integer>(15, cv2, cv4);
cycledEdges.add(ce2_4);
Graph.Edge<Integer> ce3_4 = new Graph.Edge<Integer>(11, cv3, cv4);
cycledEdges.add(ce3_4);
Graph.Edge<Integer> ce3_6 = new Graph.Edge<Integer>(2, cv3, cv6);
cycledEdges.add(ce3_6);
Graph.Edge<Integer> ce5_6 = new Graph.Edge<Integer>(9, cv5, cv6);
cycledEdges.add(ce5_6);
Graph.Edge<Integer> ce4_5 = new Graph.Edge<Integer>(6, cv4, cv5);
cycledEdges.add(ce4_5);
Graph<Integer> undirectedWithCycle = new Graph<Integer>(cycledVerticies,cycledEdges);
if (debug>1) System.out.println(undirectedWithCycle.toString());
if (debug>1) {
System.out.println("Cycle detection of the undirected graph.");
boolean result = CycleDetection.detect(undirectedWithCycle);
System.out.println("result="+result);
System.out.println();
}
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph<Integer> undirectedWithoutCycle = new Graph<Integer>(verticies,edges);
if (debug>1) System.out.println(undirectedWithoutCycle.toString());
if (debug>1) {
System.out.println("Cycle detection of the undirected graph.");
boolean result = CycleDetection.detect(undirectedWithoutCycle);
System.out.println("result="+result);
System.out.println();
}
}
{
// DIRECTED GRAPH
if (debug>1) System.out.println("Directed Graph topological sort.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> cv1 = new Graph.Vertex<Integer>(1);
verticies.add(cv1);
Graph.Vertex<Integer> cv2 = new Graph.Vertex<Integer>(2);
verticies.add(cv2);
Graph.Vertex<Integer> cv3 = new Graph.Vertex<Integer>(3);
verticies.add(cv3);
Graph.Vertex<Integer> cv4 = new Graph.Vertex<Integer>(4);
verticies.add(cv4);
Graph.Vertex<Integer> cv5 = new Graph.Vertex<Integer>(5);
verticies.add(cv5);
Graph.Vertex<Integer> cv6 = new Graph.Vertex<Integer>(6);
verticies.add(cv6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> ce1_2 = new Graph.Edge<Integer>(1, cv1, cv2);
edges.add(ce1_2);
Graph.Edge<Integer> ce2_4 = new Graph.Edge<Integer>(2, cv2, cv4);
edges.add(ce2_4);
Graph.Edge<Integer> ce4_3 = new Graph.Edge<Integer>(3, cv4, cv3);
edges.add(ce4_3);
Graph.Edge<Integer> ce3_6 = new Graph.Edge<Integer>(4, cv3, cv6);
edges.add(ce3_6);
Graph.Edge<Integer> ce5_6 = new Graph.Edge<Integer>(5, cv5, cv6);
edges.add(ce5_6);
Graph.Edge<Integer> ce4_5 = new Graph.Edge<Integer>(6, cv4, cv5);
edges.add(ce4_5);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
if (debug>1) System.out.println("Topological sort of the directed graph.");
List<Graph.Vertex<Integer>> results = TopologicalSort.sort(directed);
if (debug>1) {
System.out.println("result="+results);
System.out.println();
}
}
return true;
}
private static boolean testHeap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// MIN-HEAP
if (debug>1) System.out.println("Min-Heap.");
testNames[test] = "Min-heap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
BinaryHeap<Integer> minHeap = new BinaryHeap<Integer>(BinaryHeap.TYPE.MIN);
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (validateStructure && minHeap.getRootValue()!=null) {
System.err.println("YIKES!! Min-Heap isn't empty.");
handleError(minHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (validateStructure && minHeap.getRootValue()!=null) {
System.err.println("YIKES!! Min-Heap isn't empty.");
handleError(minHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Min-Heap add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// MAX-HEAP
if (debug>1) System.out.println("Max-Heap.");
testNames[test] = "Max-heap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinaryHeap<Integer> maxHeap = new BinaryHeap<Integer>(BinaryHeap.TYPE.MAX);
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (validateStructure && maxHeap.getRootValue()!=null) {
System.err.println("YIKES!! Max-Heap isn't empty.");
handleError(maxHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (validateStructure && maxHeap.getRootValue()!=null) {
System.err.println("YIKES!! Max-Heap isn't empty.");
handleError(maxHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Max-Heap add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testHashMap() {
int key = unsorted.length/2;
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Hash Map
if (debug>1) System.out.println("Hash Map.");
testNames[test] = "Hash Map";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
HashMap<Integer,String> hash = new HashMap<Integer,String>(key);
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
hash.put(item, string);
if (validateStructure && !(hash.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
hash.put(item,string);
if (validateStructure && !(hash.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
hash.put(item,string);
if (validateStructure && !(hash.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Hash Map add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testLinkedList() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Linked List
if (debug>1) System.out.println("Linked List.");
testNames[test] = "Linked List";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
LinkedList<Integer> list = new LinkedList<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.add(item);
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Linked List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Linked List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
list.add(item);
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Linked List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Linked List remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Linked List add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Linked List remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testMatrix() {
{
// MATRIX
if (debug>1) System.out.println("Matrix.");
Matrix<Integer> matrix1 = new Matrix<Integer>(4,3);
matrix1.set(0, 0, 14);
matrix1.set(0, 1, 9);
matrix1.set(0, 2, 3);
matrix1.set(1, 0, 2);
matrix1.set(1, 1, 11);
matrix1.set(1, 2, 15);
matrix1.set(2, 0, 0);
matrix1.set(2, 1, 12);
matrix1.set(2, 2, 17);
matrix1.set(3, 0, 5);
matrix1.set(3, 1, 2);
matrix1.set(3, 2, 3);
Matrix<Integer> matrix2 = new Matrix<Integer>(3,2);
matrix2.set(0, 0, 12);
matrix2.set(0, 1, 25);
matrix2.set(1, 0, 9);
matrix2.set(1, 1, 10);
matrix2.set(2, 0, 8);
matrix2.set(2, 1, 5);
if (debug>1) System.out.println("Matrix multiplication.");
Matrix<Integer> matrix3 = matrix1.multiply(matrix2);
if (debug>1) System.out.println(matrix3);
int rows = 2;
int cols = 2;
int counter = 0;
Matrix<Integer> matrix4 = new Matrix<Integer>(rows,cols);
for (int r=0; r<rows; r++) {
for (int c=0; c<cols; c++) {
matrix4.set(r, c, counter++);
}
}
if (debug>1) System.out.println("Matrix subtraction.");
Matrix<Integer> matrix5 = matrix4.subtract(matrix4);
if (debug>1) System.out.println(matrix5);
if (debug>1) System.out.println("Matrix addition.");
Matrix<Integer> matrix6 = matrix4.add(matrix4);
if (debug>1) System.out.println(matrix6);
Matrix<Integer> matrix7 = new Matrix<Integer>(2,2);
matrix7.set(0, 0, 1);
matrix7.set(0, 1, 2);
matrix7.set(1, 0, 3);
matrix7.set(1, 1, 4);
Matrix<Integer> matrix8 = new Matrix<Integer>(2,2);
matrix8.set(0, 0, 1);
matrix8.set(0, 1, 2);
matrix8.set(1, 0, 3);
matrix8.set(1, 1, 4);
if (debug>1) System.out.println("Matrix multiplication.");
Matrix<Integer> matrix9 = matrix7.multiply(matrix8);
if (debug>1) System.out.println(matrix9);
}
return true;
}
private static boolean testPatriciaTrie() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Patricia Trie
if (debug>1) System.out.println("Patricia Trie.");
testNames[test] = "Patricia Trie";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
PatriciaTrie<String> trie = new PatriciaTrie<String>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Patricia Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+string+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+string+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Patricia Trie remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Patricia Tree add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Tree memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Patricia Tree remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testQueue() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Queue
if (debug>1) System.out.println("Queue.");
testNames[test] = "Queue";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Queue<Integer> queue = new Queue<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Queue add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
int size = queue.getSize();
for (int i=0; i<size; i++) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Queue remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Queue add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Queue remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Queue add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Queue remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testRadixTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Radix Tree (map)
if (debug>1) System.out.println("Radix Tree (map).");
testNames[test] = "Radix Tree (map)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
RadixTree<String,Integer> tree = new RadixTree<String,Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.put(string, i);
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.put(string, i);
if (validateStructure && !(tree.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
tree.put(string,item);
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Radix Tree add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSegmentTree() {
{
//Segment tree
if (debug>1) System.out.println("Segment Tree.");
SegmentTree.Segment[] segments = new SegmentTree.Segment[4];
segments[0] = new SegmentTree.Segment(0,1,0,0,0); //first point in the 0th quadrant
segments[1] = new SegmentTree.Segment(1,0,1,0,0); //second point in the 1st quadrant
segments[2] = new SegmentTree.Segment(2,0,0,1,0); //third point in the 2nd quadrant
segments[3] = new SegmentTree.Segment(3,0,0,0,1); //fourth point in the 3rd quadrant
SegmentTree tree = new SegmentTree(segments);
SegmentTree.Query query = tree.query(0, 3);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
tree.update(1, 0, -1, 1, 0); //Move the first point from quadrant one to quadrant two
tree.update(2, 0, 1, -1, 0); //Move the second point from quadrant two to quadrant one
tree.update(3, 1, 0, 0, -1); //Move the third point from quadrant third to quadrant zero
query = tree.query(2, 3);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
tree.update(0, -1, 1, 0, 0); //Move the zeroth point from quadrant zero to quadrant one
tree.update(1, 0, 0, -1, 1); //Move the first point from quadrant three to quadrant four
query = tree.query(0, 2);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSkipList() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// SkipList
if (debug>1) System.out.println("Skip List.");
testNames[test] = "Skip List";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
SkipList<Integer> list = new SkipList<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Skip List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Skip List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Skip List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Skip List remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Skip List add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Skip List remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSplayTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Splay Tree
if (debug>1) System.out.println("Splay Tree.");
testNames[test] = "Splay Tree";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
SplayTree<Integer> splay = new SplayTree<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.remove(item);
if (validateStructure && !(splay.getSize()==((unsorted.length-1)-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.remove(item);
if (validateStructure && !(splay.getSize()==((unsorted.length-1)-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Splay Tree add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
splay.remove(item);
if (validateStructure && !(splay.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testStack() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Stack
if (debug>1) System.out.println("Stack.");
testNames[test] = "Stack";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Stack<Integer> stack = new Stack<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Stack add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
int size = stack.getSize();
for (int i=0; i<size; i++) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Stack remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Stack add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Stack remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Stack add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Stack remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSuffixTree() {
{
//Suffix Tree
if (debug>1) System.out.println("Suffix Tree.");
String bookkeeper = "bookkeeper";
SuffixTree<String> tree = new SuffixTree<String>(bookkeeper);
if (debug>1) System.out.println(tree.toString());
if (debug>1) System.out.println(tree.getSuffixes());
boolean exists = tree.doesSubStringExist(bookkeeper);
if (!exists) {
System.err.println("YIKES!! "+bookkeeper+" doesn't exists.");
handleError(tree);
return false;
}
String failed = "booker";
exists = tree.doesSubStringExist(failed);
if (exists) {
System.err.println("YIKES!! "+failed+" exists.");
handleError(tree);
return false;
}
String pass = "kkee";
exists = tree.doesSubStringExist(pass);
if (!exists) {
System.err.println("YIKES!! "+pass+" doesn't exists.");
handleError(tree);
return false;
}
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSuffixTrie() {
{
//Suffix Trie
if (debug>1) System.out.println("Suffix Trie.");
String bookkeeper = "bookkeeper";
SuffixTrie<String> trie = new SuffixTrie<String>(bookkeeper);
if (debug>1) System.out.println(trie.toString());
if (debug>1) System.out.println(trie.getSuffixes());
boolean exists = trie.doesSubStringExist(bookkeeper);
if (!exists) {
System.err.println("YIKES!! "+bookkeeper+" doesn't exists.");
handleError(trie);
return false;
}
String failed = "booker";
exists = trie.doesSubStringExist(failed);
if (exists) {
System.err.println("YIKES!! "+failed+" exists.");
handleError(trie);
return false;
}
String pass = "kkee";
exists = trie.doesSubStringExist(pass);
if (!exists) {
System.err.println("YIKES!! "+pass+" doesn't exists.");
handleError(trie);
return false;
}
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTreap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Treap
if (debug>1) System.out.println("Treap.");
testNames[test] = "Treap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Treap<Integer> treap = new Treap<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.1");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Treap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.remove(item);
if (validateStructure && !(treap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.2");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Treap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.3");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Treap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.remove(item);
if (validateStructure && !(treap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.4");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Treap remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Treap add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
treap.remove(item);
if (validateStructure && !(treap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Treap remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTrie() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Trie.
if (debug>1) System.out.println("Trie.");
testNames[test] = "Trie";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Trie<String> trie = new Trie<String>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Trie add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Trie remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTrieMap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Trie Map
if (debug>1) System.out.println("Trie Map.");
testNames[test] = "Trie Map";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
TrieMap<String,Integer> trieMap = new TrieMap<String,Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.put(string, i);
if (validateStructure && !(trieMap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.put(string, i);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeTime/count+" ms");
}
//sorted
long addSortedTime = 0L;
long removeSortedTime = 0L;
long beforeAddSortedTime = 0L;
long afterAddSortedTime = 0L;
long beforeRemoveSortedTime = 0L;
long afterRemoveSortedTime = 0L;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddSortedTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trieMap.put(string,item);
if (validateStructure && !(trieMap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddSortedTime = System.currentTimeMillis();
addSortedTime += afterAddSortedTime-beforeAddSortedTime;
if (debug>0) System.out.println("Trie Map add time = "+addSortedTime+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/(count+1))+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveSortedTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveSortedTime = System.currentTimeMillis();
removeSortedTime += afterRemoveSortedTime-beforeRemoveSortedTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeSortedTime+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,addSortedTime,removeSortedTime,memory/(count+1)};
if (debug>1) System.out.println();
}
return true;
}
private static final String getTestResults(String[] names, long[][] results) {
StringBuilder resultsBuilder = new StringBuilder();
int KB = 1000;
int MB = 1000*KB;
long SECOND = 1000;
long MINUTES = 60*SECOND;
resultsBuilder.append("Data Structure ").append("\t");
resultsBuilder.append("Add time").append("\t").append("Remove time").append("\t");
resultsBuilder.append("Sorted add time").append("\t").append("Sorted Remove time").append("\t");
resultsBuilder.append("Size");
resultsBuilder.append("\n");
for (int i=0; i<TESTS; i++) {
String name = names[i];
long[] result = results[i];
if (name!=null && result!=null) {
if (name.length()<20) {
StringBuilder nameBuilder = new StringBuilder(name);
for (int j=name.length(); j<20; j++) {
nameBuilder.append(" ");
}
name = nameBuilder.toString();
}
resultsBuilder.append(name).append("\t");
long addTime = result[0];
String addTimeString = null;
if (addTime>MINUTES) {
addTime = addTime/MINUTES;
addTimeString = addTime+" mins";
} else if (addTime>SECOND) {
addTime = addTime/SECOND;
addTimeString = addTime+" secs";
} else {
addTimeString = addTime+" ms";
}
long removeTime = result[1];
String removeTimeString = null;
if (removeTime>MINUTES) {
removeTime = removeTime/MINUTES;
removeTimeString = removeTime+" mins";
} else if (removeTime>SECOND) {
removeTime = removeTime/SECOND;
removeTimeString = removeTime+" secs";
} else {
removeTimeString = removeTime+" ms";
}
resultsBuilder.append(addTimeString).append("\t\t");
resultsBuilder.append(removeTimeString).append("\t\t");
//sorted
addTime = result[2];
addTimeString = null;
if (addTime>MINUTES) {
addTime = addTime/MINUTES;
addTimeString = addTime+" mins";
} else if (addTime>SECOND) {
addTime = addTime/SECOND;
addTimeString = addTime+" secs";
} else {
addTimeString = addTime+" ms";
}
removeTime = result[3];
removeTimeString = null;
if (removeTime>MINUTES) {
removeTime = removeTime/MINUTES;
removeTimeString = removeTime+" mins";
} else if (removeTime>SECOND) {
removeTime = removeTime/SECOND;
removeTimeString = removeTime+" secs";
} else {
removeTimeString = removeTime+" ms";
}
resultsBuilder.append(addTimeString).append("\t\t");
resultsBuilder.append(removeTimeString).append("\t\t\t");
long size = result[4];
String sizeString = null;
if (size>MB) {
size = size/MB;
sizeString = size+" MB";
} else if (size>KB) {
size = size/KB;
sizeString = size+" KB";
} else {
sizeString = size+" Bytes";
}
resultsBuilder.append(sizeString);
resultsBuilder.append("\n");
}
}
return resultsBuilder.toString();
}
private static final String getPathMapString(Graph.Vertex<Integer> start, Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : map.keySet()) {
Graph.CostPathPair<Integer> pair = map.get(v);
builder.append("From ").append(start.getValue()).append(" to vertex=").append(v.getValue()).append("\n");
if (pair!=null) builder.append(pair.toString()).append("\n");
}
return builder.toString();
}
private static final String getPathMapString(Map<Vertex<Integer>, Map<Vertex<Integer>, Set<Edge<Integer>>>> paths) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : paths.keySet()) {
Map<Vertex<Integer>, Set<Edge<Integer>>> map = paths.get(v);
for (Graph.Vertex<Integer> v2 : map.keySet()) {
builder.append("From=").append(v.getValue()).append(" to=").append(v2.getValue()).append("\n");
Set<Graph.Edge<Integer>> path = map.get(v2);
builder.append(path).append("\n");
}
}
return builder.toString();
}
private static final String getWeightMapString(Map<Vertex<Integer>, Map<Vertex<Integer>, Integer>> paths) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : paths.keySet()) {
Map<Vertex<Integer>, Integer> map = paths.get(v);
for (Graph.Vertex<Integer> v2 : map.keySet()) {
builder.append("From=").append(v.getValue()).append(" to=").append(v2.getValue()).append("\n");
Integer weight = map.get(v2);
builder.append(weight).append("\n");
}
}
return builder.toString();
}
private static final long getMemoryUse() {
putOutTheGarbage();
long totalMemory = Runtime.getRuntime().totalMemory();
putOutTheGarbage();
long freeMemory = Runtime.getRuntime().freeMemory();
return (totalMemory - freeMemory);
}
private static final void putOutTheGarbage() {
collectGarbage();
collectGarbage();
}
private static final long fSLEEP_INTERVAL = 50;
private static final void collectGarbage() {
try {
System.gc();
Thread.sleep(fSLEEP_INTERVAL);
System.runFinalization();
Thread.sleep(fSLEEP_INTERVAL);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
}
|
src/com/jwetherell/algorithms/DataStructures.java
|
package com.jwetherell.algorithms;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import com.jwetherell.algorithms.data_structures.AVLTree;
import com.jwetherell.algorithms.data_structures.BinarySearchTree;
import com.jwetherell.algorithms.data_structures.BinaryHeap;
import com.jwetherell.algorithms.data_structures.Graph.Edge;
import com.jwetherell.algorithms.data_structures.Graph.Vertex;
import com.jwetherell.algorithms.data_structures.Graph;
import com.jwetherell.algorithms.data_structures.HashMap;
import com.jwetherell.algorithms.data_structures.PatriciaTrie;
import com.jwetherell.algorithms.data_structures.RadixTree;
import com.jwetherell.algorithms.data_structures.SuffixTree;
import com.jwetherell.algorithms.data_structures.TrieMap;
import com.jwetherell.algorithms.data_structures.LinkedList;
import com.jwetherell.algorithms.data_structures.Matrix;
import com.jwetherell.algorithms.data_structures.Queue;
import com.jwetherell.algorithms.data_structures.SegmentTree;
import com.jwetherell.algorithms.data_structures.SkipList;
import com.jwetherell.algorithms.data_structures.SplayTree;
import com.jwetherell.algorithms.data_structures.Stack;
import com.jwetherell.algorithms.data_structures.SuffixTrie;
import com.jwetherell.algorithms.data_structures.Treap;
import com.jwetherell.algorithms.data_structures.Trie;
import com.jwetherell.algorithms.graph.BellmanFord;
import com.jwetherell.algorithms.graph.CycleDetection;
import com.jwetherell.algorithms.graph.Dijkstra;
import com.jwetherell.algorithms.graph.FloydWarshall;
import com.jwetherell.algorithms.graph.Johnson;
import com.jwetherell.algorithms.graph.Prim;
import com.jwetherell.algorithms.graph.TopologicalSort;
public class DataStructures {
private static final int NUMBER_OF_TESTS = 100;
private static final Random RANDOM = new Random();
private static final int ARRAY_SIZE = 1000;
private static Integer[] unsorted = null;
private static Integer[] reversed = null;
private static Integer[] sorted = null;
private static String string = null;
private static int debug = 1; //debug level. 0=None, 1=Time and Memory (if enabled), 2=Time, Memory, data structure debug
private static boolean debugTime = true; //How much time to: add all, remove all, add all items in reverse order, remove all
private static boolean debugMemory = true; //How much memory is used by the data structure
private static boolean validateStructure = false; //Is the data structure valid (passed invariants) and proper size
private static boolean validateContents = false; //Was the item added/removed really added/removed from the structure
private static final int TESTS = 17; //Number of dynamic data structures to test
private static int test = 0;
private static String[] testNames = new String[TESTS];
private static long[][] testResults = new long[TESTS][];
public static void main(String[] args) {
System.out.println("Starting tests.");
boolean passed = true;
for (int i=0; i<NUMBER_OF_TESTS; i++) {
passed = runTests();
if (!passed) break;
}
if (passed) System.out.println("Tests finished. All passed.");
else System.err.println("Tests finished. Detected a failure.");
}
private static boolean runTests() {
test = 0;
System.out.println("Generating data.");
StringBuilder builder = new StringBuilder();
builder.append("Array=");
unsorted = new Integer[ARRAY_SIZE];
for (int i=0; i<unsorted.length; i++) {
Integer j = RANDOM.nextInt(unsorted.length*10);
//Make sure there are no duplicates
boolean found = true;
while (found) {
found = false;
for (int k=0; k<i; k++) {
int l = unsorted[k];
if (j==l) {
found = true;
j = RANDOM.nextInt(unsorted.length*10);
break;
}
}
}
unsorted[i] = j;
builder.append(j).append(',');
}
builder.append('\n');
string = builder.toString();
if (debug>1) System.out.println(string);
reversed = new Integer[ARRAY_SIZE];
for (int i=unsorted.length-1, j=0; i>=0; i--, j++) {
reversed[j] = unsorted[i];
}
sorted = Arrays.copyOf(unsorted, unsorted.length);
Arrays.sort(sorted);
System.out.println("Generated data.");
boolean passed = true;
passed = testAVLTree();
if (!passed) {
System.err.println("AVL Tree failed.");
return false;
}
passed = testBST();
if (!passed) {
System.err.println("BST failed.");
return false;
}
passed = testHeap();
if (!passed) {
System.err.println("Heap failed.");
return false;
}
passed = testHashMap();
if (!passed) {
System.err.println("Hash Map failed.");
return false;
}
passed = testLinkedList();
if (!passed) {
System.err.println("Linked List failed.");
return false;
}
passed = testPatriciaTrie();
if (!passed) {
System.err.println("Patricia Trie failed.");
return false;
}
passed = testQueue();
if (!passed) {
System.err.println("Queue failed.");
return false;
}
passed = testRadixTree();
if (!passed) {
System.err.println("Radix Tree failed.");
return false;
}
passed = testSkipList();
if (!passed) {
System.err.println("Skip List failed.");
return false;
}
passed = testSplayTree();
if (!passed) {
System.err.println("Splay Tree failed.");
return false;
}
passed = testStack();
if (!passed) {
System.err.println("Stack failed.");
return false;
}
passed = testTreap();
if (!passed) {
System.err.println("Treap failed.");
return false;
}
passed = testTrie();
if (!passed) {
System.err.println("Trie failed.");
return false;
}
passed = testTrieMap();
if (!passed) {
System.err.println("Trie Map failed.");
return false;
}
if (debugTime && debugMemory) {
String results = getTestResults(testNames,testResults);
System.out.println(results);
}
//STATIC DATA STRUCTURES
passed = testGraph();
if (!passed) {
System.err.println("Graph failed.");
return false;
}
passed = testMatrix();
if (!passed) {
System.err.println("Matrix failed.");
return false;
}
passed = testSegmentTree();
if (!passed) {
System.err.println("Segment Tree failed.");
return false;
}
passed = testSuffixTree();
if (!passed) {
System.err.println("Suffix Tree failed.");
return false;
}
passed = testSuffixTrie();
if (!passed) {
System.err.println("Suffix Trie failed.");
return false;
}
return true;
}
private static void handleError(Object obj) {
System.err.println(string);
System.err.println(obj.toString());
}
private static boolean testAVLTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//AVL Tree
if (debug>1) System.out.println("AVL Tree");
testNames[test] = "AVL Tree";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
AVLTree<Integer> tree = new AVLTree<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
tree.add(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("AVL Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
tree.remove(item);
if (validateStructure && !tree.validate()) {
System.err.println("YIKES!! AVL Tree isn't valid.");
handleError(tree);
return false;
}
if (validateStructure && !(tree.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("AVL Tree remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testBST() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (first)
if (debug>1) System.out.println("Binary search tree with first node.");
testNames[test] = "BST (first)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.FIRST);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (first) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (first) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("BST (first) remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (middle)
if (debug>1) System.out.println("Binary search tree with middle node.");
testNames[test] = "BST (middle)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.MIDDLE);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (middle) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (middle) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("BST (middle) remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// BINARY SEARCH TREE (random)
if (debug>1) System.out.println("Binary search tree with random node.");
testNames[test] = "BST (random)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinarySearchTree<Integer> bst = new BinarySearchTree<Integer>(unsorted,BinarySearchTree.TYPE.RANDOM);
if (validateContents) {
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (random) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(reversed);
if (validateContents) {
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("BST (random) remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
bst.addAll(sorted);
if (validateContents) {
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
if (!bst.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(bst);
return false;
}
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("BST (random) memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(bst.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
bst.remove(item);
if (validateStructure && !bst.validate()) {
System.err.println("YIKES!! BST (random) isn't valid.");
handleError(bst);
return false;
}
if (validateStructure && !(bst.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(bst);
return false;
}
if (validateContents && bst.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(bst);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("BST (random) remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testGraph() {
{
// UNDIRECTED GRAPH
if (debug>1) System.out.println("Undirected Graph.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e1_3 = new Graph.Edge<Integer>(9, v1, v3);
edges.add(e1_3);
Graph.Edge<Integer> e1_6 = new Graph.Edge<Integer>(14, v1, v6);
edges.add(e1_6);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(10, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e5_6 = new Graph.Edge<Integer>(9, v5, v6);
edges.add(e5_6);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph<Integer> undirected = new Graph<Integer>(verticies,edges);
if (debug>1) System.out.println(undirected.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Dijstra's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map1 = Dijkstra.getShortestPaths(undirected, start);
if (debug>1) System.out.println(getPathMapString(start,map1));
Graph.Vertex<Integer> end = v5;
if (debug>1) System.out.println("Dijstra's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair1 = Dijkstra.getShortestPath(undirected, start, end);
if (debug>1) {
if (pair1!=null) System.out.println(pair1.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(undirected, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
end = v5;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(undirected, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println("Prim's minimum spanning tree of the undirected graph from "+start.getValue());
Graph.CostPathPair<Integer> pair = Prim.getMinimumSpanningTree(undirected, start);
if (debug>1) System.out.println(pair.toString());
if (debug>1) System.out.println();
}
{
// DIRECTED GRAPH
if (debug>1) System.out.println("Directed Graph.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
Graph.Vertex<Integer> v7 = new Graph.Vertex<Integer>(7);
verticies.add(v7);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e1_3 = new Graph.Edge<Integer>(9, v1, v3);
edges.add(e1_3);
Graph.Edge<Integer> e1_6 = new Graph.Edge<Integer>(14, v1, v6);
edges.add(e1_6);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(10, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e6_5 = new Graph.Edge<Integer>(9, v6, v5);
edges.add(e6_5);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph.Edge<Integer> e4_7 = new Graph.Edge<Integer>(16, v4, v7);
edges.add(e4_7);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Dijstra's shortest paths of the directed graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map = Dijkstra.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map));
Graph.Vertex<Integer> end = v5;
if (debug>1) System.out.println("Dijstra's shortest path of the directed graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair = Dijkstra.getShortestPath(directed, start, end);
if (debug>1) {
if (pair!=null) System.out.println(pair.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the undirected graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
end = v5;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the undirected graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(directed, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println();
}
{
// DIRECTED GRAPH (WITH NEGATIVE WEIGHTS)
if (debug>1) System.out.println("Undirected Graph with Negative Weights.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_4 = new Graph.Edge<Integer>(2, v1, v4);
edges.add(e1_4);
Graph.Edge<Integer> e2_1 = new Graph.Edge<Integer>(6, v2, v1);
edges.add(e2_1);
Graph.Edge<Integer> e2_3 = new Graph.Edge<Integer>(3, v2, v3);
edges.add(e2_3);
Graph.Edge<Integer> e3_1 = new Graph.Edge<Integer>(4, v3, v1);
edges.add(e3_1);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(5, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e4_2 = new Graph.Edge<Integer>(-7, v4, v2);
edges.add(e4_2);
Graph.Edge<Integer> e4_3 = new Graph.Edge<Integer>(-3, v4, v3);
edges.add(e4_3);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
Graph.Vertex<Integer> start = v1;
if (debug>1) System.out.println("Bellman-Ford's shortest paths of the directed graph from "+start.getValue());
Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map2 = BellmanFord.getShortestPaths(directed, start);
if (debug>1) System.out.println(getPathMapString(start,map2));
Graph.Vertex<Integer> end = v3;
if (debug>1) System.out.println("Bellman-Ford's shortest path of the directed graph from "+start.getValue()+" to "+end.getValue());
Graph.CostPathPair<Integer> pair2 = BellmanFord.getShortestPath(directed, start, end);
if (debug>1) {
if (pair2!=null) System.out.println(pair2.toString());
else System.out.println("No path from "+start.getValue()+" to "+end.getValue());
}
if (debug>1) System.out.println("Johnson's all-pairs shortest path of the directed graph.");
Map<Vertex<Integer>, Map<Vertex<Integer>, Set<Edge<Integer>>>> paths = Johnson.getAllPairsShortestPaths(directed);
if (debug>1) {
if (paths==null) System.out.println("Directed graph contains a negative weight cycle.");
else System.out.println(getPathMapString(paths));
}
if (debug>1) System.out.println("Floyd-Warshall's all-pairs shortest path weights of the directed graph.");
Map<Vertex<Integer>, Map<Vertex<Integer>, Integer>> pathWeights = FloydWarshall.getAllPairsShortestPaths(directed);
if (debug>1) System.out.println(getWeightMapString(pathWeights));
if (debug>1) System.out.println();
}
{
// UNDIRECTED GRAPH
if (debug>1) System.out.println("Undirected Graph cycle check.");
List<Vertex<Integer>> cycledVerticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> cv1 = new Graph.Vertex<Integer>(1);
cycledVerticies.add(cv1);
Graph.Vertex<Integer> cv2 = new Graph.Vertex<Integer>(2);
cycledVerticies.add(cv2);
Graph.Vertex<Integer> cv3 = new Graph.Vertex<Integer>(3);
cycledVerticies.add(cv3);
Graph.Vertex<Integer> cv4 = new Graph.Vertex<Integer>(4);
cycledVerticies.add(cv4);
Graph.Vertex<Integer> cv5 = new Graph.Vertex<Integer>(5);
cycledVerticies.add(cv5);
Graph.Vertex<Integer> cv6 = new Graph.Vertex<Integer>(6);
cycledVerticies.add(cv6);
List<Edge<Integer>> cycledEdges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> ce1_2 = new Graph.Edge<Integer>(7, cv1, cv2);
cycledEdges.add(ce1_2);
Graph.Edge<Integer> ce2_4 = new Graph.Edge<Integer>(15, cv2, cv4);
cycledEdges.add(ce2_4);
Graph.Edge<Integer> ce3_4 = new Graph.Edge<Integer>(11, cv3, cv4);
cycledEdges.add(ce3_4);
Graph.Edge<Integer> ce3_6 = new Graph.Edge<Integer>(2, cv3, cv6);
cycledEdges.add(ce3_6);
Graph.Edge<Integer> ce5_6 = new Graph.Edge<Integer>(9, cv5, cv6);
cycledEdges.add(ce5_6);
Graph.Edge<Integer> ce4_5 = new Graph.Edge<Integer>(6, cv4, cv5);
cycledEdges.add(ce4_5);
Graph<Integer> undirectedWithCycle = new Graph<Integer>(cycledVerticies,cycledEdges);
if (debug>1) System.out.println(undirectedWithCycle.toString());
if (debug>1) {
System.out.println("Cycle detection of the undirected graph.");
boolean result = CycleDetection.detect(undirectedWithCycle);
System.out.println("result="+result);
System.out.println();
}
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> v1 = new Graph.Vertex<Integer>(1);
verticies.add(v1);
Graph.Vertex<Integer> v2 = new Graph.Vertex<Integer>(2);
verticies.add(v2);
Graph.Vertex<Integer> v3 = new Graph.Vertex<Integer>(3);
verticies.add(v3);
Graph.Vertex<Integer> v4 = new Graph.Vertex<Integer>(4);
verticies.add(v4);
Graph.Vertex<Integer> v5 = new Graph.Vertex<Integer>(5);
verticies.add(v5);
Graph.Vertex<Integer> v6 = new Graph.Vertex<Integer>(6);
verticies.add(v6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> e1_2 = new Graph.Edge<Integer>(7, v1, v2);
edges.add(e1_2);
Graph.Edge<Integer> e2_4 = new Graph.Edge<Integer>(15, v2, v4);
edges.add(e2_4);
Graph.Edge<Integer> e3_4 = new Graph.Edge<Integer>(11, v3, v4);
edges.add(e3_4);
Graph.Edge<Integer> e3_6 = new Graph.Edge<Integer>(2, v3, v6);
edges.add(e3_6);
Graph.Edge<Integer> e4_5 = new Graph.Edge<Integer>(6, v4, v5);
edges.add(e4_5);
Graph<Integer> undirectedWithoutCycle = new Graph<Integer>(verticies,edges);
if (debug>1) System.out.println(undirectedWithoutCycle.toString());
if (debug>1) {
System.out.println("Cycle detection of the undirected graph.");
boolean result = CycleDetection.detect(undirectedWithoutCycle);
System.out.println("result="+result);
System.out.println();
}
}
{
// DIRECTED GRAPH
if (debug>1) System.out.println("Directed Graph topological sort.");
List<Vertex<Integer>> verticies = new ArrayList<Vertex<Integer>>();
Graph.Vertex<Integer> cv1 = new Graph.Vertex<Integer>(1);
verticies.add(cv1);
Graph.Vertex<Integer> cv2 = new Graph.Vertex<Integer>(2);
verticies.add(cv2);
Graph.Vertex<Integer> cv3 = new Graph.Vertex<Integer>(3);
verticies.add(cv3);
Graph.Vertex<Integer> cv4 = new Graph.Vertex<Integer>(4);
verticies.add(cv4);
Graph.Vertex<Integer> cv5 = new Graph.Vertex<Integer>(5);
verticies.add(cv5);
Graph.Vertex<Integer> cv6 = new Graph.Vertex<Integer>(6);
verticies.add(cv6);
List<Edge<Integer>> edges = new ArrayList<Edge<Integer>>();
Graph.Edge<Integer> ce1_2 = new Graph.Edge<Integer>(1, cv1, cv2);
edges.add(ce1_2);
Graph.Edge<Integer> ce2_4 = new Graph.Edge<Integer>(2, cv2, cv4);
edges.add(ce2_4);
Graph.Edge<Integer> ce4_3 = new Graph.Edge<Integer>(3, cv4, cv3);
edges.add(ce4_3);
Graph.Edge<Integer> ce3_6 = new Graph.Edge<Integer>(4, cv3, cv6);
edges.add(ce3_6);
Graph.Edge<Integer> ce5_6 = new Graph.Edge<Integer>(5, cv5, cv6);
edges.add(ce5_6);
Graph.Edge<Integer> ce4_5 = new Graph.Edge<Integer>(6, cv4, cv5);
edges.add(ce4_5);
Graph<Integer> directed = new Graph<Integer>(Graph.TYPE.DIRECTED,verticies,edges);
if (debug>1) System.out.println(directed.toString());
if (debug>1) System.out.println("Topological sort of the directed graph.");
List<Graph.Vertex<Integer>> results = TopologicalSort.sort(directed);
if (debug>1) {
System.out.println("result="+results);
System.out.println();
}
}
return true;
}
private static boolean testHeap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// MIN-HEAP
if (debug>1) System.out.println("Min-Heap.");
testNames[test] = "Min-heap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
BinaryHeap<Integer> minHeap = new BinaryHeap<Integer>(BinaryHeap.TYPE.MIN);
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (validateStructure && minHeap.getRootValue()!=null) {
System.err.println("YIKES!! Min-Heap isn't empty.");
handleError(minHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (validateStructure && minHeap.getRootValue()!=null) {
System.err.println("YIKES!! Min-Heap isn't empty.");
handleError(minHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
minHeap.add(item);
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && !minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Min-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(minHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = minHeap.removeRoot();
if (validateStructure && !minHeap.validate()) {
System.err.println("YIKES!! Min-Heap isn't valid.");
handleError(minHeap);
return false;
}
if (validateStructure && !(minHeap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(minHeap);
return false;
}
if (validateContents && minHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(minHeap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Min-Heap remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// MIN-HEAP
if (debug>1) System.out.println("Max-Heap.");
testNames[test] = "Max-heap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
BinaryHeap<Integer> maxHeap = new BinaryHeap<Integer>(BinaryHeap.TYPE.MAX);
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (validateStructure && maxHeap.getRootValue()!=null) {
System.err.println("YIKES!! Max-Heap isn't empty.");
handleError(maxHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (validateStructure && maxHeap.getRootValue()!=null) {
System.err.println("YIKES!! Max-Heap isn't empty.");
handleError(maxHeap);
return false;
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
maxHeap.add(item);
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && !maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Max-Heap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(maxHeap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = maxHeap.removeRoot();
if (validateStructure && !maxHeap.validate()) {
System.err.println("YIKES!! Max-Heap isn't valid.");
handleError(maxHeap);
return false;
}
if (validateStructure && !(maxHeap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(maxHeap);
return false;
}
if (validateContents && maxHeap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(maxHeap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Max-Heap remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testHashMap() {
int key = unsorted.length/2;
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Hash Map
if (debug>1) System.out.println("Hash Map.");
testNames[test] = "Hash Map";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
HashMap<Integer,Integer> hash = new HashMap<Integer,Integer>(key);
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
hash.put(item, item);
if (validateStructure && !(hash.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
hash.put(item,item);
if (validateStructure && !(hash.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
hash.put(item,item);
if (validateStructure && !(hash.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && !hash.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Hash Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(hash.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
hash.remove(item);
if (validateStructure && !(hash.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(hash);
return false;
}
if (validateContents && hash.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(hash);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Hash Map remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testLinkedList() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Linked List
if (debug>1) System.out.println("Linked List.");
testNames[test] = "Linked List";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
LinkedList<Integer> list = new LinkedList<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.add(item);
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Linked List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Linked List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
list.add(item);
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Linked List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Linked List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Linked List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Linked List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Linked List remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testMatrix() {
{
// MATRIX
if (debug>1) System.out.println("Matrix.");
Matrix<Integer> matrix1 = new Matrix<Integer>(4,3);
matrix1.set(0, 0, 14);
matrix1.set(0, 1, 9);
matrix1.set(0, 2, 3);
matrix1.set(1, 0, 2);
matrix1.set(1, 1, 11);
matrix1.set(1, 2, 15);
matrix1.set(2, 0, 0);
matrix1.set(2, 1, 12);
matrix1.set(2, 2, 17);
matrix1.set(3, 0, 5);
matrix1.set(3, 1, 2);
matrix1.set(3, 2, 3);
Matrix<Integer> matrix2 = new Matrix<Integer>(3,2);
matrix2.set(0, 0, 12);
matrix2.set(0, 1, 25);
matrix2.set(1, 0, 9);
matrix2.set(1, 1, 10);
matrix2.set(2, 0, 8);
matrix2.set(2, 1, 5);
if (debug>1) System.out.println("Matrix multiplication.");
Matrix<Integer> matrix3 = matrix1.multiply(matrix2);
if (debug>1) System.out.println(matrix3);
int rows = 2;
int cols = 2;
int counter = 0;
Matrix<Integer> matrix4 = new Matrix<Integer>(rows,cols);
for (int r=0; r<rows; r++) {
for (int c=0; c<cols; c++) {
matrix4.set(r, c, counter++);
}
}
if (debug>1) System.out.println("Matrix subtraction.");
Matrix<Integer> matrix5 = matrix4.subtract(matrix4);
if (debug>1) System.out.println(matrix5);
if (debug>1) System.out.println("Matrix addition.");
Matrix<Integer> matrix6 = matrix4.add(matrix4);
if (debug>1) System.out.println(matrix6);
Matrix<Integer> matrix7 = new Matrix<Integer>(2,2);
matrix7.set(0, 0, 1);
matrix7.set(0, 1, 2);
matrix7.set(1, 0, 3);
matrix7.set(1, 1, 4);
Matrix<Integer> matrix8 = new Matrix<Integer>(2,2);
matrix8.set(0, 0, 1);
matrix8.set(0, 1, 2);
matrix8.set(1, 0, 3);
matrix8.set(1, 1, 4);
if (debug>1) System.out.println("Matrix multiplication.");
Matrix<Integer> matrix9 = matrix7.multiply(matrix8);
if (debug>1) System.out.println(matrix9);
}
return true;
}
private static boolean testPatriciaTrie() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Patricia Trie
if (debug>1) System.out.println("Patricia Trie.");
testNames[test] = "Patricia Trie";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
PatriciaTrie<String> trie = new PatriciaTrie<String>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Patricia Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+string+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+string+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Patricia Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Patricia Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Patricia Trie add time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testQueue() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Queue
if (debug>1) System.out.println("Queue.");
testNames[test] = "Queue";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Queue<Integer> queue = new Queue<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Queue add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
int size = queue.getSize();
for (int i=0; i<size; i++) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Queue remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Queue add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Queue remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
queue.enqueue(item);
if (validateStructure && !(queue.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && !queue.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Queue add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Queue memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(queue.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = queue.dequeue();
if (validateStructure && !(queue.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(queue);
return false;
}
if (validateContents && queue.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(queue);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Queue remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testRadixTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Radix Tree (map)
if (debug>1) System.out.println("Radix Tree (map).");
testNames[test] = "Radix Tree (map)";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
RadixTree<String,Integer> tree = new RadixTree<String,Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.put(string, i);
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.put(string, i);
if (validateStructure && !(tree.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
tree.put(string,i);
if (validateStructure && !(tree.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && !tree.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Radix Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(tree.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
tree.remove(string);
if (validateStructure && !(tree.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(tree);
return false;
}
if (validateContents && tree.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(tree);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Radix Tree remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSegmentTree() {
{
//Segment tree
if (debug>1) System.out.println("Segment Tree.");
SegmentTree.Segment[] segments = new SegmentTree.Segment[4];
segments[0] = new SegmentTree.Segment(0,1,0,0,0); //first point in the 0th quadrant
segments[1] = new SegmentTree.Segment(1,0,1,0,0); //second point in the 1st quadrant
segments[2] = new SegmentTree.Segment(2,0,0,1,0); //third point in the 2nd quadrant
segments[3] = new SegmentTree.Segment(3,0,0,0,1); //fourth point in the 3rd quadrant
SegmentTree tree = new SegmentTree(segments);
SegmentTree.Query query = tree.query(0, 3);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
tree.update(1, 0, -1, 1, 0); //Move the first point from quadrant one to quadrant two
tree.update(2, 0, 1, -1, 0); //Move the second point from quadrant two to quadrant one
tree.update(3, 1, 0, 0, -1); //Move the third point from quadrant third to quadrant zero
query = tree.query(2, 3);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
tree.update(0, -1, 1, 0, 0); //Move the zeroth point from quadrant zero to quadrant one
tree.update(1, 0, 0, -1, 1); //Move the first point from quadrant three to quadrant four
query = tree.query(0, 2);
if (debug>1) System.out.println(query.quad1+" "+query.quad2+" "+query.quad3+" "+query.quad4);
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSkipList() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// SkipList
if (debug>1) System.out.println("Skip List.");
testNames[test] = "Skip List";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
SkipList<Integer> list = new SkipList<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Skip List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Skip List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Skip List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Skip List remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
list.add(item);
if (validateStructure && !(list.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && !list.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(list);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Skip List add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Skip List memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(list.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
list.remove(item);
if (validateStructure && !(list.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(list);
return false;
}
if (validateContents && list.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(list);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Skip List remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSplayTree() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Splay Tree
if (debug>1) System.out.println("Splay Tree.");
testNames[test] = "Splay Tree";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
SplayTree<Integer> splay = new SplayTree<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.remove(item);
if (validateStructure && !(splay.getSize()==((unsorted.length-1)-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
splay.remove(item);
if (validateStructure && !(splay.getSize()==((unsorted.length-1)-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
splay.add(item);
if (validateStructure && !(splay.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && !splay.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Splay Tree memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(splay.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
splay.remove(item);
if (validateStructure && !splay.validate()) {
System.err.println("YIKES!! Splay Tree isn't valid.");
handleError(splay);
return false;
}
if (validateStructure && !(splay.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(splay);
return false;
}
if (validateContents && splay.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(splay);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Splay Tree remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testStack() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
// Stack
if (debug>1) System.out.println("Stack.");
testNames[test] = "Stack";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Stack<Integer> stack = new Stack<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Stack add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
int size = stack.getSize();
for (int i=0; i<size; i++) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Stack remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Stack add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Stack remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
stack.push(item);
if (validateStructure && !(stack.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && !stack.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Stack add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Stack memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(stack.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = stack.pop();
if (validateStructure && !(stack.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(stack);
return false;
}
if (validateContents && stack.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(stack);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Stack remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSuffixTree() {
{
//Suffix Tree
if (debug>1) System.out.println("Suffix Tree.");
String bookkeeper = "bookkeeper";
SuffixTree<String> tree = new SuffixTree<String>(bookkeeper);
if (debug>1) System.out.println(tree.toString());
if (debug>1) System.out.println(tree.getSuffixes());
boolean exists = tree.doesSubStringExist(bookkeeper);
if (!exists) {
System.err.println("YIKES!! "+bookkeeper+" doesn't exists.");
handleError(tree);
return false;
}
String failed = "booker";
exists = tree.doesSubStringExist(failed);
if (exists) {
System.err.println("YIKES!! "+failed+" exists.");
handleError(tree);
return false;
}
String pass = "kkee";
exists = tree.doesSubStringExist(pass);
if (!exists) {
System.err.println("YIKES!! "+pass+" doesn't exists.");
handleError(tree);
return false;
}
if (debug>1) System.out.println();
}
return true;
}
private static boolean testSuffixTrie() {
{
//Suffix Trie
if (debug>1) System.out.println("Suffix Trie.");
String bookkeeper = "bookkeeper";
SuffixTrie<String> trie = new SuffixTrie<String>(bookkeeper);
if (debug>1) System.out.println(trie.toString());
if (debug>1) System.out.println(trie.getSuffixes());
boolean exists = trie.doesSubStringExist(bookkeeper);
if (!exists) {
System.err.println("YIKES!! "+bookkeeper+" doesn't exists.");
handleError(trie);
return false;
}
String failed = "booker";
exists = trie.doesSubStringExist(failed);
if (exists) {
System.err.println("YIKES!! "+failed+" exists.");
handleError(trie);
return false;
}
String pass = "kkee";
exists = trie.doesSubStringExist(pass);
if (!exists) {
System.err.println("YIKES!! "+pass+" doesn't exists.");
handleError(trie);
return false;
}
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTreap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Treap
if (debug>1) System.out.println("Treap.");
testNames[test] = "Treap";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Treap<Integer> treap = new Treap<Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.1");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Treap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.remove(item);
if (validateStructure && !(treap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.2");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Treap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.3");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Treap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
treap.remove(item);
if (validateStructure && !(treap.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.4");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Treap remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
treap.add(item);
if (validateStructure && !(treap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(treap);
return false;
}
if (validateContents && !treap.contains(item)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Treap add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Treap memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(treap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
treap.remove(item);
if (validateStructure && !treap.validate()) {
System.err.println("YIKES!! Treap isn't valid.");
handleError(treap);
return false;
}
if (validateStructure && !(treap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(treap);
return false;
}
if (validateContents && treap.contains(item)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(treap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Treap remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTrie() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Trie.
if (debug>1) System.out.println("Trie.");
testNames[test] = "Trie";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
Trie<String> trie = new Trie<String>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==unsorted.length-i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==unsorted.length-(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trie.add(string);
if (validateStructure && !(trie.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && !trie.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trie.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trie.remove(string);
if (validateStructure && !(trie.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trie);
return false;
}
if (validateContents && trie.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trie);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Trie remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static boolean testTrieMap() {
{
long count = 0;
long addTime = 0L;
long removeTime = 0L;
long beforeAddTime = 0L;
long afterAddTime = 0L;
long beforeRemoveTime = 0L;
long afterRemoveTime = 0L;
long memory = 0L;
long beforeMemory = 0L;
long afterMemory = 0L;
//Trie Map
if (debug>1) System.out.println("Trie Map.");
testNames[test] = "Trie Map";
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
TrieMap<String,Integer> trieMap = new TrieMap<String,Integer>();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.put(string, i);
if (validateStructure && !(trieMap.getSize()==i+1)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=unsorted.length-1; i>=0; i--) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.put(string, i);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-i))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=0; i<unsorted.length; i++) {
int item = unsorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==(unsorted.length-(i+1)))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+string+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeRemoveTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeTime/count+" ms");
}
count++;
if (debugMemory) beforeMemory = DataStructures.getMemoryUse();
if (debugTime) beforeAddTime = System.currentTimeMillis();
for (int i=0; i<sorted.length; i++) {
int item = sorted[i];
String string = String.valueOf(item);
trieMap.put(string,i);
if (validateStructure && !(trieMap.getSize()==(i+1))) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && !trieMap.contains(string)) {
System.err.println("YIKES!! "+item+" doesn't exist.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterAddTime = System.currentTimeMillis();
addTime += afterAddTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map add time = "+addTime/count+" ms");
}
if (debugMemory) {
afterMemory = DataStructures.getMemoryUse();
memory += afterMemory-beforeMemory;
if (debug>0) System.out.println("Trie Map memory use = "+(memory/count)+" bytes");
}
if (debug>1) System.out.println(trieMap.toString());
if (debugTime) beforeRemoveTime = System.currentTimeMillis();
for (int i=sorted.length-1; i>=0; i--) {
int item = sorted[i];
String string = String.valueOf(item);
trieMap.remove(string);
if (validateStructure && !(trieMap.getSize()==i)) {
System.err.println("YIKES!! "+item+" caused a size mismatch.");
handleError(trieMap);
return false;
}
if (validateContents && trieMap.contains(string)) {
System.err.println("YIKES!! "+item+" still exists.");
handleError(trieMap);
return false;
}
}
if (debugTime) {
afterRemoveTime = System.currentTimeMillis();
removeTime += afterRemoveTime-beforeAddTime;
if (debug>0) System.out.println("Trie Map remove time = "+removeTime/count+" ms");
}
testResults[test++] = new long[]{addTime/count,removeTime/count,memory};
if (debug>1) System.out.println();
}
return true;
}
private static final String getTestResults(String[] names, long[][] results) {
StringBuilder resultsBuilder = new StringBuilder();
int KB = 1000;
int MB = 1000*KB;
long SECOND = 1000;
long MINUTES = 60*SECOND;
resultsBuilder.append("Data Structure ").append("\t");
resultsBuilder.append("Add time").append("\t").append("Remove time").append("\t").append("Size");
resultsBuilder.append("\n");
for (int i=0; i<TESTS; i++) {
String name = names[i];
long[] result = results[i];
if (name!=null && result!=null) {
if (name.length()<20) {
StringBuilder nameBuilder = new StringBuilder(name);
for (int j=name.length(); j<20; j++) {
nameBuilder.append(" ");
}
name = nameBuilder.toString();
}
resultsBuilder.append(name).append("\t");
long size = result[2];
String sizeString = null;
if (size>MB) {
size = size/MB;
sizeString = size+" MB";
} else if (size>KB) {
size = size/KB;
sizeString = size+" KB";
} else {
sizeString = size+" Bytes";
}
long addTime = result[0];
String addTimeString = null;
if (addTime>MINUTES) {
addTime = addTime/MINUTES;
addTimeString = addTime+" mins";
} else if (addTime>SECOND) {
addTime = addTime/SECOND;
addTimeString = addTime+" secs";
} else {
addTimeString = addTime+" ms";
}
long removeTime = result[1];
String removeTimeString = null;
if (removeTime>MINUTES) {
removeTime = removeTime/MINUTES;
removeTimeString = removeTime+" mins";
} else if (removeTime>SECOND) {
removeTime = removeTime/SECOND;
removeTimeString = removeTime+" secs";
} else {
removeTimeString = removeTime+" ms";
}
resultsBuilder.append(addTimeString).append("\t\t");
resultsBuilder.append(removeTimeString).append("\t\t");
resultsBuilder.append(sizeString);
resultsBuilder.append("\n");
}
}
return resultsBuilder.toString();
}
private static final String getPathMapString(Graph.Vertex<Integer> start, Map<Graph.Vertex<Integer>, Graph.CostPathPair<Integer>> map) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : map.keySet()) {
Graph.CostPathPair<Integer> pair = map.get(v);
builder.append("From ").append(start.getValue()).append(" to vertex=").append(v.getValue()).append("\n");
if (pair!=null) builder.append(pair.toString()).append("\n");
}
return builder.toString();
}
private static final String getPathMapString(Map<Vertex<Integer>, Map<Vertex<Integer>, Set<Edge<Integer>>>> paths) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : paths.keySet()) {
Map<Vertex<Integer>, Set<Edge<Integer>>> map = paths.get(v);
for (Graph.Vertex<Integer> v2 : map.keySet()) {
builder.append("From=").append(v.getValue()).append(" to=").append(v2.getValue()).append("\n");
Set<Graph.Edge<Integer>> path = map.get(v2);
builder.append(path).append("\n");
}
}
return builder.toString();
}
private static final String getWeightMapString(Map<Vertex<Integer>, Map<Vertex<Integer>, Integer>> paths) {
StringBuilder builder = new StringBuilder();
for (Graph.Vertex<Integer> v : paths.keySet()) {
Map<Vertex<Integer>, Integer> map = paths.get(v);
for (Graph.Vertex<Integer> v2 : map.keySet()) {
builder.append("From=").append(v.getValue()).append(" to=").append(v2.getValue()).append("\n");
Integer weight = map.get(v2);
builder.append(weight).append("\n");
}
}
return builder.toString();
}
private static final long getMemoryUse() {
putOutTheGarbage();
long totalMemory = Runtime.getRuntime().totalMemory();
putOutTheGarbage();
long freeMemory = Runtime.getRuntime().freeMemory();
return (totalMemory - freeMemory);
}
private static final void putOutTheGarbage() {
collectGarbage();
collectGarbage();
}
private static final long fSLEEP_INTERVAL = 50;
private static final void collectGarbage() {
try {
System.gc();
Thread.sleep(fSLEEP_INTERVAL);
System.runFinalization();
Thread.sleep(fSLEEP_INTERVAL);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
}
|
Seperated the sorted from the unsorted test timing
git-svn-id: 1708c3ad9ff9eb03a5ee6f7d8258d0b3769e07ec@198 032fbc0f-8cab-eb90-e552-f08422b9a96a
|
src/com/jwetherell/algorithms/DataStructures.java
|
Seperated the sorted from the unsorted test timing
|
|
Java
|
apache-2.0
|
3512749ca57f4f067e73e12fd93f9b230d17b5f7
| 0
|
multi-os-engine/moe-plugin-gradle,multi-os-engine/moe-plugin-gradle
|
/*
Copyright (C) 2016 Migeran
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.moe.gradle.tasks;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.tools.ant.taskdefs.condition.Os;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.JavaExec;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.process.ExecSpec;
import org.gradle.process.JavaExecSpec;
import org.moe.common.utils.SimCtl;
import org.moe.gradle.AbstractMoePlugin;
import org.moe.gradle.MoePlugin;
import org.moe.gradle.anns.IgnoreUnused;
import org.moe.gradle.anns.NotNull;
import org.moe.gradle.anns.Nullable;
import org.moe.gradle.tasks.Launchers.DeviceLauncherBuilder.InstallMode;
import org.moe.gradle.utils.FileUtils;
import org.moe.gradle.utils.JUnitTestCollector;
import org.moe.gradle.utils.Mode;
import org.moe.gradle.utils.Require;
import org.moe.gradle.utils.StreamToLogForwarder;
import org.moe.gradle.utils.TaskUtils;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
public class Launchers {
private static final Logger LOG = Logging.getLogger(Launchers.class);
private static final String MOE_LAUNCHER_DEVICE_UDID_PROPERTY = "moe.launcher.devices";
private static final String MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY = "moe.launcher.simulators";
private static final String MOE_LAUNCHER_OPTIONS_PROPERTY = "moe.launcher.options";
private static final String MOE_LAUNCHER_CONFIG_OPTION = "config";
private static final String MOE_LAUNCHER_NO_WAIT_DEVICE_OPTION = "no-wait-device";
private static final String MOE_LAUNCHER_NO_BUILD_OPTION = "no-build";
private static final String MOE_LAUNCHER_NO_LAUNCH_OPTION = "no-launch";
private static final String MOE_LAUNCHER_INSTALL_ON_TARGET_OPTION = "install-on-target";
private static final String MOE_LAUNCHER_NO_INSTALL_ON_TARGET_OPTION = "no-install-on-target";
private static final String MOE_LAUNCHER_DEBUG_OPTION = "debug";
private static final String MOE_LAUNCHER_ENV_OPTION = "env";
private static final String MOE_LAUNCHER_VMARG_OPTION = "vmarg";
private static final String MOE_LAUNCHER_ARG_OPTION = "arg";
private static final String MOE_LAUNCHER_PROXY_OPTION = "proxy";
private static final String MOE_LAUNCHER_RAW_TEST_OUTPUT_OPTION = "raw-test-output";
private static class Options {
boolean build = true;
boolean launch = true;
boolean installOnTarget = true;
boolean waitForDevice = true;
Mode mode = Mode.RELEASE;
Port debug;
final Map<String, String> envs = new HashMap<>();
final List<String> vmargs = new ArrayList<>();
final List<String> args = new ArrayList<>();
final List<Port> proxies = new ArrayList<>();
boolean rawTestOutput = false;
void read(@NotNull Project project) {
Require.nonNull(project);
if (!project.hasProperty(MOE_LAUNCHER_OPTIONS_PROPERTY)) {
return;
}
final List<String> list = getUnescapedValues((String) project.property(MOE_LAUNCHER_OPTIONS_PROPERTY));
for (String optline : list) {
String key, value;
final int sep = optline.indexOf(":");
if (sep == -1) {
key = optline;
value = null;
} else {
key = optline.substring(0, sep);
value = optline.substring(sep + 1);
}
if (MOE_LAUNCHER_CONFIG_OPTION.equals(key)) {
if (!Mode.validateName(value)) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_CONFIG_OPTION + "' requires one " +
"of the following values: debug, release. Defaults to release. Usage: '" +
MOE_LAUNCHER_CONFIG_OPTION + ":debug'");
}
mode = Mode.getForName(value);
} else if (MOE_LAUNCHER_NO_WAIT_DEVICE_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
waitForDevice = false;
} else if (MOE_LAUNCHER_NO_BUILD_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
build = false;
installOnTarget = true;
} else if (MOE_LAUNCHER_NO_LAUNCH_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
launch = false;
installOnTarget = false;
} else if (MOE_LAUNCHER_INSTALL_ON_TARGET_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
installOnTarget = true;
} else if (MOE_LAUNCHER_NO_INSTALL_ON_TARGET_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
installOnTarget = false;
} else if (MOE_LAUNCHER_DEBUG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_DEBUG_OPTION + "' requires a " +
"value in one of the following formats: <local> or <local>:<remote>. Usage: '" +
MOE_LAUNCHER_DEBUG_OPTION + ":5005'");
}
if (value.indexOf(':') != -1) {
debug = new Port(
Integer.parseInt(value.substring(0, value.indexOf(':'))),
Integer.parseInt(value.substring(value.indexOf(':') + 1)));
} else {
debug = new Port(Integer.parseInt(value), null);
}
} else if (MOE_LAUNCHER_ENV_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_ENV_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_ENV_OPTION + ":key=value'");
}
final int kvsep = value.indexOf("=");
if (kvsep != -1) {
envs.put(value.substring(0, kvsep), value.substring(kvsep + 1));
} else {
envs.put(value, "");
}
} else if (MOE_LAUNCHER_VMARG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_VMARG_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_VMARG_OPTION + ":-Xhello'");
}
vmargs.add(value);
} else if (MOE_LAUNCHER_ARG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_ARG_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_ARG_OPTION + ":helloworld'");
}
args.add(value);
} else if (MOE_LAUNCHER_PROXY_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_PROXY_OPTION + "' requires a " +
"value in one of the following formats: <local> or <local>:<remote>. Usage: '" +
MOE_LAUNCHER_PROXY_OPTION + ":8080'");
}
final Port port;
if (value.indexOf(':') != -1) {
port = new Port(
Integer.parseInt(value.substring(0, value.indexOf(':'))),
Integer.parseInt(value.substring(value.indexOf(':') + 1)));
} else {
port = new Port(Integer.parseInt(value), null);
}
proxies.add(port);
} else if (MOE_LAUNCHER_RAW_TEST_OUTPUT_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
rawTestOutput = true;
} else {
project.getLogger().warn("Unknown launcher option: '" + key + "'");
}
}
}
private static List<String> getUnescapedValues(final String value) {
if (value == null) {
return new ArrayList<>();
}
final List<String> opts = new ArrayList<>();
final int length = value.length();
final StringBuilder builder = new StringBuilder(length);
boolean escaped = false;
for (int idx = 0; idx < length; ++idx) {
final char c = value.charAt(idx);
if (escaped) {
escaped = false;
builder.append(c);
} else {
if (c == '\\') {
escaped = true;
} else if (c == ',') {
opts.add(builder.toString());
builder.replace(0, builder.length(), "");
} else {
builder.append(c);
}
}
}
if (builder.length() > 0) {
opts.add(builder.toString());
}
return opts;
}
}
private static class Port {
final int local;
@Nullable
final Integer remote;
Port(int local, @Nullable Integer remote) {
this.local = local;
this.remote = remote;
}
@Override
public String toString() {
return remote == null ? Integer.toString(local) : (local + ":" + remote);
}
}
static class DeviceLauncherBuilder {
// @formatter:off
private static final String UDID_ARG = "--udid";
private static final String APP_PATH_ARG = "--app-path";
private static final String LIST_ARG = "--list";
private static final String WAIT_FOR_DEVICE_ARG = "--wait-for-device";
private static final String LAUNCH_ARG_ARG = "--launch-arg";
private static final String ENV_ARG = "--env";
private static final String PROXY_PORT_ARG = "--proxy-port";
private static final String DEBUG_ARG = "--debug";
private static final String INSTALL_MODE_ARG = "--install-mode";
private static final String NATIVE_DEBUG_ARG = "--native-debug";
private static final String OUTPUT_ARG = "--output";
private static final String OUTPUT_FILE_ARG = "--output-file";
// @formatter:on
enum InstallMode {
RUN_ONLY, INSTALL, UPGRADE, INSTALL_ONLY, UPGRADE_ONLY;
@Override
public String toString() {
switch (this) {
case RUN_ONLY:
return "runonly";
case INSTALL:
return "install";
case UPGRADE:
return "upgrade";
case INSTALL_ONLY:
return "installonly";
case UPGRADE_ONLY:
return "upgradeonly";
default:
throw new IllegalStateException();
}
}
}
private String udid;
private File appPath;
private boolean list;
private boolean waitForDevice;
private final List<String> launchArgs = new ArrayList<>();
private final Map<String, String> envVars = new HashMap<>();
private final List<Port> proxyPorts = new ArrayList<>();
private Port debug;
private InstallMode installMode;
private Integer nativeDebug;
private Integer output;
private File outputFile;
private DeviceLauncherBuilder setUDID(@Nullable String udid) {
this.udid = udid;
return this;
}
private DeviceLauncherBuilder setAppPath(@Nullable File appPath) {
this.appPath = appPath;
return this;
}
private DeviceLauncherBuilder setList(boolean list) {
this.list = list;
return this;
}
private DeviceLauncherBuilder setWaitForDevice(boolean waitForDevice) {
this.waitForDevice = waitForDevice;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder addLaunchArgs(@NotNull String arg) {
launchArgs.add(Require.nonNull(arg));
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder putEnvVar(@NotNull String key, @Nullable String value) {
if (value == null) {
envVars.remove(Require.nonNull(key));
} else {
envVars.put(Require.nonNull(key), value);
}
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder addProxyPort(int local, @Nullable Integer remote) {
proxyPorts.add(new Port(local, remote));
return this;
}
private DeviceLauncherBuilder setDebug(int local, @Nullable Integer remote) {
debug = new Port(local, remote);
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setInstallMode(@Nullable InstallMode mode) {
this.installMode = mode;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setNativeDebug(@Nullable Integer local) {
nativeDebug = local;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setOutput(@Nullable Integer local) {
output = local;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setOutputFile(@Nullable File file) {
outputFile = file;
return this;
}
private void build(@NotNull MoePlugin plugin, @NotNull JavaExecSpec exec) {
Require.nonNull(plugin);
Require.nonNull(exec);
exec.setWorkingDir(plugin.getSDK().getToolsDir().getAbsolutePath());
exec.setMain("-jar");
exec.args(plugin.getSDK().getiOSDeviceJar().getAbsolutePath());
if (udid != null) {
exec.args(UDID_ARG + "=" + udid);
}
if (appPath != null) {
exec.args(APP_PATH_ARG + "=" + appPath);
}
if (list) {
exec.args(LIST_ARG);
}
if (waitForDevice) {
exec.args(WAIT_FOR_DEVICE_ARG);
}
launchArgs.forEach(arg -> exec.args(LAUNCH_ARG_ARG + "=" + arg));
envVars.forEach((k, v) -> exec.args(ENV_ARG + "=" + k + "=" + v));
proxyPorts.forEach(port -> exec.args(PROXY_PORT_ARG + "=" + port));
if (debug != null) {
exec.args(DEBUG_ARG + "=" + debug);
}
if (installMode != null) {
exec.args(INSTALL_MODE_ARG + "=" + installMode);
}
if (nativeDebug != null) {
exec.args(NATIVE_DEBUG_ARG + "=" + nativeDebug);
}
if (output != null) {
exec.args(OUTPUT_ARG + "=" + output);
}
if (outputFile != null) {
exec.args(OUTPUT_FILE_ARG + "=" + outputFile.getAbsolutePath());
}
}
}
private static class SimulatorLauncherBuilder {
// @formatter:off
private static final String UDID_ARG = "--udid";
private static final String APP_PATH_ARG = "--app-path";
private static final String LAUNCH_ARG_ARG = "--launch-arg";
private static final String ENV_ARG = "--env";
private static final String DEBUG_ARG = "--debug";
// @formatter:on
private String udid;
private File appPath;
private final List<String> launchArgs = new ArrayList<>();
private final Map<String, String> envVars = new HashMap<>();
private Port debug;
private SimulatorLauncherBuilder setUDID(@Nullable String udid) {
this.udid = udid;
return this;
}
private SimulatorLauncherBuilder setAppPath(@Nullable File appPath) {
this.appPath = appPath;
return this;
}
@IgnoreUnused
private SimulatorLauncherBuilder addLaunchArgs(@NotNull String arg) {
launchArgs.add(Require.nonNull(arg));
return this;
}
@IgnoreUnused
private SimulatorLauncherBuilder putEnvVar(@NotNull String key, @Nullable String value) {
if (value == null) {
envVars.remove(Require.nonNull(key));
} else {
envVars.put(Require.nonNull(key), value);
}
return this;
}
private SimulatorLauncherBuilder setDebug(int local) {
debug = new Port(local, null);
return this;
}
private void build(@NotNull MoePlugin plugin, @NotNull ExecSpec exec) {
Require.nonNull(plugin);
Require.nonNull(exec);
exec.setWorkingDir(plugin.getSDK().getToolsDir().getAbsolutePath());
exec.setExecutable(plugin.getSDK().getSimlauncherExec());
if (udid != null) {
exec.args(UDID_ARG + "=" + udid);
}
if (appPath != null) {
exec.args(APP_PATH_ARG + "=" + appPath);
}
launchArgs.forEach(arg -> exec.args(LAUNCH_ARG_ARG + "=" + arg));
envVars.forEach((k, v) -> exec.args(ENV_ARG + "=" + k + "=" + v));
if (debug != null) {
exec.args(DEBUG_ARG + "=" + debug.local);
}
}
}
public static void addTasks(@NotNull MoePlugin plugin) {
Require.nonNull(plugin);
final Project project = plugin.getProject();
final TaskContainer tasks = project.getTasks();
// Collect devices
final List<String> devices = new ArrayList<>();
if (project.hasProperty(MOE_LAUNCHER_DEVICE_UDID_PROPERTY)) {
Arrays.stream(((String) project.property(MOE_LAUNCHER_DEVICE_UDID_PROPERTY)).split(Pattern.quote(",")))
.map(String::trim)
.forEach(devices::add);
}
// Collect simulators
final List<String> simulators = new ArrayList<>();
if (project.hasProperty(MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY)) {
Arrays.stream(((String) project.property(MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY)).split(Pattern.quote(",")))
.map(String::trim)
.forEach(simulators::add);
}
// If there were no devices & simulators specified, add null to devices to indicate first available device
if (devices.size() == 0 && simulators.size() == 0) {
devices.add(null);
}
// Options
final Options options = new Options();
options.read(project);
{ // List connected devices
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
tasks.create("moeListDevices", JavaExec.class, exec -> {
exec.setGroup(AbstractMoePlugin.MOE);
exec.setDescription("Lists all connected devices.");
new DeviceLauncherBuilder().setList(true).build(plugin, exec);
exec.setIgnoreExitValue(true);
exec.setStandardOutput(baos);
exec.setErrorOutput(new NullOutputStream());
}).getActions().add(task -> LOG.quiet("\n" + baos.toString().trim() + "\n"));
}
{ // List available simulators
tasks.create("moeListSimulators", DefaultTask.class, exec -> {
exec.setGroup(AbstractMoePlugin.MOE);
exec.setDescription("Lists all simulators.");
}).getActions().add(task -> {
final String list;
if (Os.isFamily(Os.FAMILY_MAC) && TaskUtils.checkExec(project, "which", "xcrun")) {
LOG.info("Initializing");
try {
list = SimCtl.getDevices().stream()
.map(d -> "- " + d)
.collect(Collectors.joining("\n"));
} catch (Throwable t) {
throw new GradleException("Failed to get list of simulators", t);
}
} else {
list = "";
}
LOG.quiet("\nAvailable Simulators:\n" + list.trim() + "\n");
});
}
final SourceSetContainer sourceSets = plugin.getJavaConvention().getSourceSets();
{ // Launch an application on a device or simulator
final SourceSet sourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
// Get XcodeBuild task for device
final XcodeBuild xcodeBuildDev;
if (devices.size() > 0) {
xcodeBuildDev = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode,
plugin.getExtension().getPlatformType());
} else {
xcodeBuildDev = null;
}
// Get XcodeBuild task for simulator
final XcodeBuild xcodeBuildSim;
if (simulators.size() > 0) {
xcodeBuildSim = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode,
plugin.getExtension().getPlatformType().simulatorPlatform);
} else {
xcodeBuildSim = null;
}
// Create task
final Task launch = tasks.create("moeLaunch", task -> {
task.setGroup(AbstractMoePlugin.MOE);
task.setDescription("Build and run the MOE application on a device or simulator.");
setupDevicesAndSimulators(plugin, project, devices, simulators, options, xcodeBuildDev, xcodeBuildSim, task, false);
});
if (options.build && xcodeBuildDev != null) {
launch.dependsOn(xcodeBuildDev);
}
if (xcodeBuildDev != null) {
launch.mustRunAfter(xcodeBuildDev);
}
if (options.build && xcodeBuildSim != null) {
launch.dependsOn(xcodeBuildSim);
}
if (xcodeBuildSim != null) {
launch.mustRunAfter(xcodeBuildSim);
}
}
{ // Launch tests on a device or simulator
final SourceSet sourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME);
// Get XcodeBuild task for device
final XcodeBuild xcodeBuildDev;
if (devices.size() > 0) {
xcodeBuildDev = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode, plugin.getExtension().getPlatformType());
} else {
xcodeBuildDev = null;
}
// Get XcodeBuild task for simulator
final XcodeBuild xcodeBuildSim;
if (simulators.size() > 0) {
xcodeBuildSim = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode, plugin.getExtension().getPlatformType().simulatorPlatform);
} else {
xcodeBuildSim = null;
}
// Create task
final Task launch = tasks.create("moeTest", task -> {
task.setGroup(AbstractMoePlugin.MOE);
task.setDescription("Build and run tests on a device or simulator.");
setupDevicesAndSimulators(plugin, project, devices, simulators, options, xcodeBuildDev, xcodeBuildSim, task, true);
});
if (options.build && xcodeBuildDev != null) {
launch.dependsOn(xcodeBuildDev);
}
if (xcodeBuildDev != null) {
launch.mustRunAfter(xcodeBuildDev);
}
if (options.build && xcodeBuildSim != null) {
launch.dependsOn(xcodeBuildSim);
}
if (xcodeBuildSim != null) {
launch.mustRunAfter(xcodeBuildSim);
}
}
}
private static class JUnitTestCollectorWriter extends OutputStream {
private final JUnitTestCollector testCollector;
private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JUnitTestCollectorWriter(@NotNull JUnitTestCollector testCollector) {
this.testCollector = testCollector;
}
@Override
public synchronized void write(int b) throws IOException {
if (b == '\n') {
testCollector.appendLine(buffer.toString());
buffer.reset();
} else {
buffer.write(b);
}
}
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
} else if ((off < 0) || (off > b.length) || (len < 0) ||
((off + len) > b.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return;
}
int start = 0;
for (int i = start; i < len; ++i) {
final byte d = b[off + i];
if (d == '\n') {
buffer.write(b, off + start, i - start);
testCollector.appendLine(buffer.toString());
buffer.reset();
start = i + 1;
}
}
buffer.write(b, off + start, len - start);
}
@Override
public synchronized void close() throws IOException {
if (buffer.size() > 0) {
testCollector.appendLine(buffer.toString());
buffer.reset();
buffer.close();
}
super.close();
}
}
private static void setupDevicesAndSimulators(@NotNull MoePlugin plugin, @NotNull Project project,
@NotNull List<String> devices, @NotNull List<String> simulators,
@NotNull Options options,
@Nullable XcodeBuild xcodeBuildDev, @Nullable XcodeBuild xcodeBuildSim,
@NotNull Task task, boolean test) {
Require.nonNull(plugin);
Require.nonNull(project);
Require.nonNull(devices);
Require.nonNull(simulators);
Require.nonNull(options);
Require.nonNull(task);
final AtomicInteger numFailedTests = new AtomicInteger();
final File testOutputDir = project.getBuildDir().toPath().resolve(Paths.get(MoePlugin.MOE, "reports")).toFile();
if (test) {
try {
FileUtils.deleteFileOrFolder(testOutputDir);
} catch (IOException e) {
throw new GradleException(e.getMessage(), e);
}
if (!testOutputDir.exists() && !testOutputDir.mkdirs()) {
throw new GradleException("Failed to create directory " + testOutputDir);
}
if (testOutputDir.exists() && !testOutputDir.isDirectory()) {
throw new GradleException("Expected directory at " + testOutputDir);
}
}
for (String udid : devices) {
if (!options.launch && !options.installOnTarget) {
continue;
}
task.getActions().add(t -> {
// Get proper Xcode settings
final Map<String, String> settings;
if (xcodeBuildDev.getDidWork()) {
settings = xcodeBuildDev.getXcodeBuildSettings();
} else {
settings = xcodeBuildDev.getCachedXcodeBuildSettings();
}
// Get app path
String productName = settings.get("FULL_PRODUCT_NAME");
if (settings.get("FULL_PRODUCT_NAME").endsWith("Tests.xctest")) {
productName = productName.replace("Tests.xctest", ".app");
}
final File appPath = new File(settings.get("BUILT_PRODUCTS_DIR"), productName);
final JUnitTestCollector testCollector;
if (test && !options.rawTestOutput && options.launch) {
testCollector = new JUnitTestCollector();
} else {
testCollector = null;
}
TaskUtils.javaexec(project, exec -> {
// Create device launcher
final DeviceLauncherBuilder builder = new DeviceLauncherBuilder().setWaitForDevice(options.waitForDevice);
if (udid != null) {
builder.setUDID(udid);
}
if (options.debug != null) {
builder.setDebug(options.debug.local, options.debug.remote);
}
if (options.installOnTarget && !options.launch) {
builder.setInstallMode(InstallMode.UPGRADE_ONLY);
} else if (!options.installOnTarget && options.launch) {
builder.setInstallMode(InstallMode.RUN_ONLY);
}
options.envs.forEach(builder::putEnvVar);
options.vmargs.forEach(builder::addLaunchArgs);
builder.addLaunchArgs("-args");
options.args.forEach(builder::addLaunchArgs);
options.proxies.forEach(p -> builder.addProxyPort(p.local, p.remote));
builder.setAppPath(appPath)
.build(plugin, exec);
if (testCollector != null) {
final JUnitTestCollectorWriter writer = new JUnitTestCollectorWriter(testCollector);
exec.setStandardOutput(writer);
exec.setErrorOutput(writer);
} else {
exec.setStandardOutput(new StreamToLogForwarder(LOG, false));
exec.setErrorOutput(new StreamToLogForwarder(LOG, true));
}
});
if (testCollector != null) {
numFailedTests.getAndAdd(testCollector.getNumFailures() + testCollector.getNumErrors());
writeJUnitReport(udid == null ? "unknown-device" : udid, testCollector, testOutputDir);
}
});
}
for (String udid : simulators) {
if (!options.launch) {
continue;
}
task.getActions().add(t -> {
// Get proper Xcode settings
final Map<String, String> settings;
if (xcodeBuildSim.getDidWork()) {
settings = xcodeBuildSim.getXcodeBuildSettings();
} else {
settings = xcodeBuildSim.getCachedXcodeBuildSettings();
}
// Get app path
String productName = settings.get("FULL_PRODUCT_NAME");
if (settings.get("FULL_PRODUCT_NAME").endsWith("Tests.xctest")) {
productName = productName.replace("Tests.xctest", ".app");
}
final File appPath = new File(settings.get("BUILT_PRODUCTS_DIR"), productName);
final JUnitTestCollector testCollector;
if (test && !options.rawTestOutput) {
testCollector = new JUnitTestCollector();
} else {
testCollector = null;
}
TaskUtils.exec(project, exec -> {
// Create simulator launcher
final SimulatorLauncherBuilder builder = new SimulatorLauncherBuilder();
if (udid != null) {
builder.setUDID(udid);
}
if (options.debug != null) {
builder.setDebug(options.debug.local);
}
options.envs.forEach(builder::putEnvVar);
options.vmargs.forEach(builder::addLaunchArgs);
builder.addLaunchArgs("-args");
options.args.forEach(builder::addLaunchArgs);
builder.setAppPath(appPath)
.build(plugin, exec);
if (testCollector != null) {
final JUnitTestCollectorWriter writer = new JUnitTestCollectorWriter(testCollector);
exec.setStandardOutput(writer);
exec.setErrorOutput(writer);
} else {
exec.setStandardOutput(new StreamToLogForwarder(LOG, false));
exec.setErrorOutput(new StreamToLogForwarder(LOG, true));
}
});
if (testCollector != null) {
numFailedTests.getAndAdd(testCollector.getNumFailures() + testCollector.getNumErrors());
writeJUnitReport(udid == null ? "unknown-simulator" : udid, testCollector, testOutputDir);
}
});
}
if (test) {
task.getActions().add(t -> {
if (numFailedTests.get() > 0) {
throw new GradleException(numFailedTests.get() + " tests failed on all targets combined, reports can be found here: " + testOutputDir);
}
});
}
}
private static void writeJUnitReport(@NotNull String udid, @NotNull JUnitTestCollector testCollector,
@NotNull File out) {
Require.nonNull(udid);
Require.nonNull(testCollector);
Require.nonNull(out);
final File out_xml = prepareOutputSubdir(out, "xml");
final File out_html = prepareOutputSubdir(out, "html");
final File out_txt = prepareOutputSubdir(out, "txt");
// Try to write xml
try {
final File file = new File(out_xml, udid + ".xml");
FileUtils.write(file, testCollector.getXMLReport());
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
// Try to write html
try {
final File file = new File(out_html, udid + ".html");
FileUtils.write(file, testCollector.getHTMLReport(udid));
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
// Try to write txt
try {
final File file = new File(out_txt, udid + ".txt");
FileUtils.write(file, testCollector.getCompleteInput());
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
}
private static File prepareOutputSubdir(@NotNull File parent, String name) {
final File out = new File(parent, name);
if (!out.mkdir()) {
LOG.info("mkdir failed for " + out);
}
if (!out.exists()) {
throw new GradleException("Directory doesn't exist at " + out);
}
if (!out.isDirectory()) {
throw new GradleException("Expected directory at " + out);
}
return out;
}
}
|
src/main/java/org/moe/gradle/tasks/Launchers.java
|
/*
Copyright (C) 2016 Migeran
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.moe.gradle.tasks;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.tools.ant.taskdefs.condition.Os;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.JavaExec;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.process.ExecSpec;
import org.gradle.process.JavaExecSpec;
import org.moe.common.utils.SimCtl;
import org.moe.gradle.AbstractMoePlugin;
import org.moe.gradle.MoePlugin;
import org.moe.gradle.anns.IgnoreUnused;
import org.moe.gradle.anns.NotNull;
import org.moe.gradle.anns.Nullable;
import org.moe.gradle.tasks.Launchers.DeviceLauncherBuilder.InstallMode;
import org.moe.gradle.utils.FileUtils;
import org.moe.gradle.utils.JUnitTestCollector;
import org.moe.gradle.utils.Mode;
import org.moe.gradle.utils.Require;
import org.moe.gradle.utils.StreamToLogForwarder;
import org.moe.gradle.utils.TaskUtils;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
public class Launchers {
private static final Logger LOG = Logging.getLogger(Launchers.class);
private static final String MOE_LAUNCHER_DEVICE_UDID_PROPERTY = "moe.launcher.devices";
private static final String MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY = "moe.launcher.simulators";
private static final String MOE_LAUNCHER_OPTIONS_PROPERTY = "moe.launcher.options";
private static final String MOE_LAUNCHER_CONFIG_OPTION = "config";
private static final String MOE_LAUNCHER_NO_WAIT_DEVICE_OPTION = "no-wait-device";
private static final String MOE_LAUNCHER_NO_BUILD_OPTION = "no-build";
private static final String MOE_LAUNCHER_NO_LAUNCH_OPTION = "no-launch";
private static final String MOE_LAUNCHER_INSTALL_ON_TARGET_OPTION = "install-on-target";
private static final String MOE_LAUNCHER_NO_INSTALL_ON_TARGET_OPTION = "no-install-on-target";
private static final String MOE_LAUNCHER_DEBUG_OPTION = "debug";
private static final String MOE_LAUNCHER_ENV_OPTION = "env";
private static final String MOE_LAUNCHER_VMARG_OPTION = "vmarg";
private static final String MOE_LAUNCHER_ARG_OPTION = "arg";
private static final String MOE_LAUNCHER_PROXY_OPTION = "proxy";
private static final String MOE_LAUNCHER_RAW_TEST_OUTPUT_OPTION = "raw-test-output";
private static class Options {
boolean build = true;
boolean launch = true;
boolean installOnTarget = true;
boolean waitForDevice = true;
Mode mode = Mode.RELEASE;
Port debug;
final Map<String, String> envs = new HashMap<>();
final List<String> vmargs = new ArrayList<>();
final List<String> args = new ArrayList<>();
final List<Port> proxies = new ArrayList<>();
boolean rawTestOutput = false;
void read(@NotNull Project project) {
Require.nonNull(project);
if (!project.hasProperty(MOE_LAUNCHER_OPTIONS_PROPERTY)) {
return;
}
final List<String> list = getUnescapedValues((String) project.property(MOE_LAUNCHER_OPTIONS_PROPERTY));
for (String optline : list) {
String key, value;
final int sep = optline.indexOf(":");
if (sep == -1) {
key = optline;
value = null;
} else {
key = optline.substring(0, sep);
value = optline.substring(sep + 1);
}
if (MOE_LAUNCHER_CONFIG_OPTION.equals(key)) {
if (!Mode.validateName(value)) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_CONFIG_OPTION + "' requires one " +
"of the following values: debug, release. Defaults to release. Usage: '" +
MOE_LAUNCHER_CONFIG_OPTION + ":debug'");
}
mode = Mode.getForName(value);
} else if (MOE_LAUNCHER_NO_WAIT_DEVICE_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
waitForDevice = false;
} else if (MOE_LAUNCHER_NO_BUILD_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
build = false;
installOnTarget = true;
} else if (MOE_LAUNCHER_NO_LAUNCH_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
launch = false;
installOnTarget = false;
} else if (MOE_LAUNCHER_INSTALL_ON_TARGET_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
installOnTarget = true;
} else if (MOE_LAUNCHER_NO_INSTALL_ON_TARGET_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
installOnTarget = false;
} else if (MOE_LAUNCHER_DEBUG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_DEBUG_OPTION + "' requires a " +
"value in one of the following formats: <local> or <local>:<remote>. Usage: '" +
MOE_LAUNCHER_DEBUG_OPTION + ":5005'");
}
if (value.indexOf(':') != -1) {
debug = new Port(
Integer.parseInt(value.substring(0, value.indexOf(':'))),
Integer.parseInt(value.substring(value.indexOf(':') + 1)));
} else {
debug = new Port(Integer.parseInt(value), null);
}
} else if (MOE_LAUNCHER_ENV_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_ENV_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_ENV_OPTION + ":key=value'");
}
final int kvsep = value.indexOf("=");
if (kvsep != -1) {
envs.put(value.substring(0, kvsep), value.substring(kvsep + 1));
} else {
envs.put(value, "");
}
} else if (MOE_LAUNCHER_VMARG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_VMARG_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_VMARG_OPTION + ":-Xhello'");
}
vmargs.add(value);
} else if (MOE_LAUNCHER_ARG_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_ARG_OPTION + "' requires a value. " +
"Usage: '" + MOE_LAUNCHER_ARG_OPTION + ":helloworld'");
}
args.add(value);
} else if (MOE_LAUNCHER_PROXY_OPTION.equals(key)) {
if (value == null) {
throw new GradleException("Launcher option '" + MOE_LAUNCHER_PROXY_OPTION + "' requires a " +
"value in one of the following formats: <local> or <local>:<remote>. Usage: '" +
MOE_LAUNCHER_PROXY_OPTION + ":8080'");
}
final Port port;
if (value.indexOf(':') != -1) {
port = new Port(
Integer.parseInt(value.substring(0, value.indexOf(':'))),
Integer.parseInt(value.substring(value.indexOf(':') + 1)));
} else {
port = new Port(Integer.parseInt(value), null);
}
proxies.add(port);
} else if (MOE_LAUNCHER_RAW_TEST_OUTPUT_OPTION.equals(key)) {
if (value != null) {
project.getLogger().warn("Ignoring value for launcher option: '" + key + "'");
}
rawTestOutput = true;
} else {
project.getLogger().warn("Unknown launcher option: '" + key + "'");
}
}
}
private static List<String> getUnescapedValues(final String value) {
if (value == null) {
return new ArrayList<>();
}
final List<String> opts = new ArrayList<>();
final int length = value.length();
final StringBuilder builder = new StringBuilder(length);
boolean escaped = false;
for (int idx = 0; idx < length; ++idx) {
final char c = value.charAt(idx);
if (escaped) {
escaped = false;
builder.append(c);
} else {
if (c == '\\') {
escaped = true;
} else if (c == ',') {
opts.add(builder.toString());
builder.replace(0, builder.length(), "");
} else {
builder.append(c);
}
}
}
if (builder.length() > 0) {
opts.add(builder.toString());
}
return opts;
}
}
private static class Port {
final int local;
@Nullable
final Integer remote;
Port(int local, @Nullable Integer remote) {
this.local = local;
this.remote = remote;
}
@Override
public String toString() {
return remote == null ? Integer.toString(local) : (local + ":" + remote);
}
}
static class DeviceLauncherBuilder {
// @formatter:off
private static final String UDID_ARG = "--udid";
private static final String APP_PATH_ARG = "--app-path";
private static final String LIST_ARG = "--list";
private static final String WAIT_FOR_DEVICE_ARG = "--wait-for-device";
private static final String LAUNCH_ARG_ARG = "--launch-arg";
private static final String ENV_ARG = "--env";
private static final String PROXY_PORT_ARG = "--proxy-port";
private static final String DEBUG_ARG = "--debug";
private static final String INSTALL_MODE_ARG = "--install-mode";
private static final String NATIVE_DEBUG_ARG = "--native-debug";
private static final String OUTPUT_ARG = "--output";
private static final String OUTPUT_FILE_ARG = "--output-file";
// @formatter:on
enum InstallMode {
RUN_ONLY, INSTALL, UPGRADE, INSTALL_ONLY, UPGRADE_ONLY;
@Override
public String toString() {
switch (this) {
case RUN_ONLY:
return "runonly";
case INSTALL:
return "install";
case UPGRADE:
return "upgrade";
case INSTALL_ONLY:
return "installonly";
case UPGRADE_ONLY:
return "upgradeonly";
default:
throw new IllegalStateException();
}
}
}
private String udid;
private File appPath;
private boolean list;
private boolean waitForDevice;
private final List<String> launchArgs = new ArrayList<>();
private final Map<String, String> envVars = new HashMap<>();
private final List<Port> proxyPorts = new ArrayList<>();
private Port debug;
private InstallMode installMode;
private Integer nativeDebug;
private Integer output;
private File outputFile;
private DeviceLauncherBuilder setUDID(@Nullable String udid) {
this.udid = udid;
return this;
}
private DeviceLauncherBuilder setAppPath(@Nullable File appPath) {
this.appPath = appPath;
return this;
}
private DeviceLauncherBuilder setList(boolean list) {
this.list = list;
return this;
}
private DeviceLauncherBuilder setWaitForDevice(boolean waitForDevice) {
this.waitForDevice = waitForDevice;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder addLaunchArgs(@NotNull String arg) {
launchArgs.add(Require.nonNull(arg));
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder putEnvVar(@NotNull String key, @Nullable String value) {
if (value == null) {
envVars.remove(Require.nonNull(key));
} else {
envVars.put(Require.nonNull(key), value);
}
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder addProxyPort(int local, @Nullable Integer remote) {
proxyPorts.add(new Port(local, remote));
return this;
}
private DeviceLauncherBuilder setDebug(int local, @Nullable Integer remote) {
debug = new Port(local, remote);
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setInstallMode(@Nullable InstallMode mode) {
this.installMode = mode;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setNativeDebug(@Nullable Integer local) {
nativeDebug = local;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setOutput(@Nullable Integer local) {
output = local;
return this;
}
@IgnoreUnused
private DeviceLauncherBuilder setOutputFile(@Nullable File file) {
outputFile = file;
return this;
}
private void build(@NotNull MoePlugin plugin, @NotNull JavaExecSpec exec) {
Require.nonNull(plugin);
Require.nonNull(exec);
exec.setWorkingDir(plugin.getSDK().getToolsDir().getAbsolutePath());
exec.setMain("-jar");
exec.args(plugin.getSDK().getiOSDeviceJar().getAbsolutePath());
if (udid != null) {
exec.args(UDID_ARG + "=" + udid);
}
if (appPath != null) {
exec.args(APP_PATH_ARG + "=" + appPath);
}
if (list) {
exec.args(LIST_ARG);
}
if (waitForDevice) {
exec.args(WAIT_FOR_DEVICE_ARG);
}
launchArgs.forEach(arg -> exec.args(LAUNCH_ARG_ARG + "=" + arg));
envVars.forEach((k, v) -> exec.args(ENV_ARG + "=" + k + "=" + v));
proxyPorts.forEach(port -> exec.args(PROXY_PORT_ARG + "=" + port));
if (debug != null) {
exec.args(DEBUG_ARG + "=" + debug);
}
if (installMode != null) {
exec.args(INSTALL_MODE_ARG + "=" + installMode);
}
if (nativeDebug != null) {
exec.args(NATIVE_DEBUG_ARG + "=" + nativeDebug);
}
if (output != null) {
exec.args(OUTPUT_ARG + "=" + output);
}
if (outputFile != null) {
exec.args(OUTPUT_FILE_ARG + "=" + outputFile.getAbsolutePath());
}
}
}
private static class SimulatorLauncherBuilder {
// @formatter:off
private static final String UDID_ARG = "--udid";
private static final String APP_PATH_ARG = "--app-path";
private static final String LAUNCH_ARG_ARG = "--launch-arg";
private static final String ENV_ARG = "--env";
private static final String DEBUG_ARG = "--debug";
// @formatter:on
private String udid;
private File appPath;
private final List<String> launchArgs = new ArrayList<>();
private final Map<String, String> envVars = new HashMap<>();
private Port debug;
private SimulatorLauncherBuilder setUDID(@Nullable String udid) {
this.udid = udid;
return this;
}
private SimulatorLauncherBuilder setAppPath(@Nullable File appPath) {
this.appPath = appPath;
return this;
}
@IgnoreUnused
private SimulatorLauncherBuilder addLaunchArgs(@NotNull String arg) {
launchArgs.add(Require.nonNull(arg));
return this;
}
@IgnoreUnused
private SimulatorLauncherBuilder putEnvVar(@NotNull String key, @Nullable String value) {
if (value == null) {
envVars.remove(Require.nonNull(key));
} else {
envVars.put(Require.nonNull(key), value);
}
return this;
}
private SimulatorLauncherBuilder setDebug(int local) {
debug = new Port(local, null);
return this;
}
private void build(@NotNull MoePlugin plugin, @NotNull ExecSpec exec) {
Require.nonNull(plugin);
Require.nonNull(exec);
exec.setWorkingDir(plugin.getSDK().getToolsDir().getAbsolutePath());
exec.setExecutable(plugin.getSDK().getSimlauncherExec());
if (udid != null) {
exec.args(UDID_ARG + "=" + udid);
}
if (appPath != null) {
exec.args(APP_PATH_ARG + "=" + appPath);
}
launchArgs.forEach(arg -> exec.args(LAUNCH_ARG_ARG + "=" + arg));
envVars.forEach((k, v) -> exec.args(ENV_ARG + "=" + k + "=" + v));
if (debug != null) {
exec.args(DEBUG_ARG + "=" + debug.local);
}
}
}
public static void addTasks(@NotNull MoePlugin plugin) {
Require.nonNull(plugin);
final Project project = plugin.getProject();
final TaskContainer tasks = project.getTasks();
// Collect devices
final List<String> devices = new ArrayList<>();
if (project.hasProperty(MOE_LAUNCHER_DEVICE_UDID_PROPERTY)) {
Arrays.stream(((String) project.property(MOE_LAUNCHER_DEVICE_UDID_PROPERTY)).split(Pattern.quote(",")))
.map(String::trim)
.forEach(devices::add);
}
// Collect simulators
final List<String> simulators = new ArrayList<>();
if (project.hasProperty(MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY)) {
Arrays.stream(((String) project.property(MOE_LAUNCHER_SIMULATOR_UDID_PROPERTY)).split(Pattern.quote(",")))
.map(String::trim)
.forEach(simulators::add);
}
// If there were no devices & simulators specified, add null to devices to indicate first available device
if (devices.size() == 0 && simulators.size() == 0) {
devices.add(null);
}
// Options
final Options options = new Options();
options.read(project);
{ // List connected devices
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
tasks.create("moeListDevices", JavaExec.class, exec -> {
exec.setGroup(AbstractMoePlugin.MOE);
exec.setDescription("Lists all connected devices.");
new DeviceLauncherBuilder().setList(true).build(plugin, exec);
exec.setIgnoreExitValue(true);
exec.setStandardOutput(baos);
exec.setErrorOutput(new NullOutputStream());
}).getActions().add(task -> LOG.quiet("\n" + baos.toString().trim() + "\n"));
}
{ // List available simulators
tasks.create("moeListSimulators", DefaultTask.class, exec -> {
exec.setGroup(AbstractMoePlugin.MOE);
exec.setDescription("Lists all simulators.");
}).getActions().add(task -> {
final String list;
if (Os.isFamily(Os.FAMILY_MAC) && TaskUtils.checkExec(project, "which", "xcrun")) {
LOG.info("Initializing");
try {
list = SimCtl.getDevices().stream()
.map(d -> "- " + d)
.collect(Collectors.joining("\n"));
} catch (Throwable t) {
throw new GradleException("Failed to get list of simulators", t);
}
} else {
list = "";
}
LOG.quiet("\nAvailable Simulators:\n" + list.trim() + "\n");
});
}
final SourceSetContainer sourceSets = plugin.getJavaConvention().getSourceSets();
{ // Launch an application on a device or simulator
final SourceSet sourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
// Get XcodeBuild task for device
final XcodeBuild xcodeBuildDev;
if (devices.size() > 0) {
xcodeBuildDev = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode,
plugin.getExtension().getPlatformType());
} else {
xcodeBuildDev = null;
}
// Get XcodeBuild task for simulator
final XcodeBuild xcodeBuildSim;
if (simulators.size() > 0) {
xcodeBuildSim = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode,
plugin.getExtension().getPlatformType().simulatorPlatform);
} else {
xcodeBuildSim = null;
}
// Create task
final Task launch = tasks.create("moeLaunch", task -> {
task.setGroup(AbstractMoePlugin.MOE);
task.setDescription("Build and run the MOE application on a device or simulator.");
setupDevicesAndSimulators(plugin, project, devices, simulators, options, xcodeBuildDev, xcodeBuildSim, task, false);
});
if (options.build && xcodeBuildDev != null) {
launch.dependsOn(xcodeBuildDev);
}
if (xcodeBuildDev != null) {
launch.mustRunAfter(xcodeBuildDev);
}
if (options.build && xcodeBuildSim != null) {
launch.dependsOn(xcodeBuildSim);
}
if (xcodeBuildSim != null) {
launch.mustRunAfter(xcodeBuildSim);
}
}
{ // Launch tests on a device or simulator
final SourceSet sourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME);
// Get XcodeBuild task for device
final XcodeBuild xcodeBuildDev;
if (devices.size() > 0) {
xcodeBuildDev = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode, plugin.getExtension().getPlatformType());
} else {
xcodeBuildDev = null;
}
// Get XcodeBuild task for simulator
final XcodeBuild xcodeBuildSim;
if (simulators.size() > 0) {
xcodeBuildSim = plugin.getTaskBy(XcodeBuild.class, sourceSet, options.mode, plugin.getExtension().getPlatformType().simulatorPlatform);
} else {
xcodeBuildSim = null;
}
// Create task
final Task launch = tasks.create("moeTest", task -> {
task.setGroup(AbstractMoePlugin.MOE);
task.setDescription("Build and run tests on a device or simulator.");
setupDevicesAndSimulators(plugin, project, devices, simulators, options, xcodeBuildDev, xcodeBuildSim, task, true);
});
if (options.build && xcodeBuildDev != null) {
launch.dependsOn(xcodeBuildDev);
}
if (xcodeBuildDev != null) {
launch.mustRunAfter(xcodeBuildDev);
}
if (options.build && xcodeBuildSim != null) {
launch.dependsOn(xcodeBuildSim);
}
if (xcodeBuildSim != null) {
launch.mustRunAfter(xcodeBuildSim);
}
}
}
private static class JUnitTestCollectorWriter extends OutputStream {
private final JUnitTestCollector testCollector;
private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JUnitTestCollectorWriter(@NotNull JUnitTestCollector testCollector) {
this.testCollector = testCollector;
}
@Override
public synchronized void write(int b) throws IOException {
if (b == '\n') {
testCollector.appendLine(buffer.toString());
buffer.reset();
} else {
buffer.write(b);
}
}
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
} else if ((off < 0) || (off > b.length) || (len < 0) ||
((off + len) > b.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return;
}
int start = 0;
for (int i = start; i < len; ++i) {
final byte d = b[off + i];
if (d == '\n') {
buffer.write(b, off + start, i - start);
testCollector.appendLine(buffer.toString());
buffer.reset();
start = i + 1;
}
}
buffer.write(b, off + start, len - start);
}
@Override
public synchronized void close() throws IOException {
if (buffer.size() > 0) {
testCollector.appendLine(buffer.toString());
buffer.reset();
buffer.close();
}
super.close();
}
}
private static void setupDevicesAndSimulators(@NotNull MoePlugin plugin, @NotNull Project project,
@NotNull List<String> devices, @NotNull List<String> simulators,
@NotNull Options options,
@Nullable XcodeBuild xcodeBuildDev, @Nullable XcodeBuild xcodeBuildSim,
@NotNull Task task, boolean test) {
Require.nonNull(plugin);
Require.nonNull(project);
Require.nonNull(devices);
Require.nonNull(simulators);
Require.nonNull(options);
Require.nonNull(task);
final AtomicInteger numFailedTests = new AtomicInteger();
final File testOutputDir = project.getBuildDir().toPath().resolve(Paths.get(MoePlugin.MOE, "reports")).toFile();
if (test) {
try {
FileUtils.deleteFileOrFolder(testOutputDir);
} catch (IOException e) {
throw new GradleException(e.getMessage(), e);
}
if (!testOutputDir.exists() && !testOutputDir.mkdirs()) {
throw new GradleException("Failed to create directory " + testOutputDir);
}
if (testOutputDir.exists() && !testOutputDir.isDirectory()) {
throw new GradleException("Expected directory at " + testOutputDir);
}
}
for (String udid : devices) {
if (!options.launch && !options.installOnTarget) {
continue;
}
task.getActions().add(t -> {
// Get proper Xcode settings
final Map<String, String> settings;
if (xcodeBuildDev.getDidWork()) {
settings = xcodeBuildDev.getXcodeBuildSettings();
} else {
settings = xcodeBuildDev.getCachedXcodeBuildSettings();
}
// Get app path
final File appPath = new File(settings.get("BUILT_PRODUCTS_DIR"), settings.get("FULL_PRODUCT_NAME"));
final JUnitTestCollector testCollector;
if (test && !options.rawTestOutput && options.launch) {
testCollector = new JUnitTestCollector();
} else {
testCollector = null;
}
TaskUtils.javaexec(project, exec -> {
// Create device launcher
final DeviceLauncherBuilder builder = new DeviceLauncherBuilder().setWaitForDevice(options.waitForDevice);
if (udid != null) {
builder.setUDID(udid);
}
if (options.debug != null) {
builder.setDebug(options.debug.local, options.debug.remote);
}
if (options.installOnTarget && !options.launch) {
builder.setInstallMode(InstallMode.UPGRADE_ONLY);
} else if (!options.installOnTarget && options.launch) {
builder.setInstallMode(InstallMode.RUN_ONLY);
}
options.envs.forEach(builder::putEnvVar);
options.vmargs.forEach(builder::addLaunchArgs);
builder.addLaunchArgs("-args");
options.args.forEach(builder::addLaunchArgs);
options.proxies.forEach(p -> builder.addProxyPort(p.local, p.remote));
builder.setAppPath(appPath)
.build(plugin, exec);
if (testCollector != null) {
final JUnitTestCollectorWriter writer = new JUnitTestCollectorWriter(testCollector);
exec.setStandardOutput(writer);
exec.setErrorOutput(writer);
} else {
exec.setStandardOutput(new StreamToLogForwarder(LOG, false));
exec.setErrorOutput(new StreamToLogForwarder(LOG, true));
}
});
if (testCollector != null) {
numFailedTests.getAndAdd(testCollector.getNumFailures() + testCollector.getNumErrors());
writeJUnitReport(udid == null ? "unknown-device" : udid, testCollector, testOutputDir);
}
});
}
for (String udid : simulators) {
if (!options.launch) {
continue;
}
task.getActions().add(t -> {
// Get proper Xcode settings
final Map<String, String> settings;
if (xcodeBuildSim.getDidWork()) {
settings = xcodeBuildSim.getXcodeBuildSettings();
} else {
settings = xcodeBuildSim.getCachedXcodeBuildSettings();
}
// Get app path
final File appPath = new File(settings.get("BUILT_PRODUCTS_DIR"), settings.get("FULL_PRODUCT_NAME"));
final JUnitTestCollector testCollector;
if (test && !options.rawTestOutput) {
testCollector = new JUnitTestCollector();
} else {
testCollector = null;
}
TaskUtils.exec(project, exec -> {
// Create simulator launcher
final SimulatorLauncherBuilder builder = new SimulatorLauncherBuilder();
if (udid != null) {
builder.setUDID(udid);
}
if (options.debug != null) {
builder.setDebug(options.debug.local);
}
options.envs.forEach(builder::putEnvVar);
options.vmargs.forEach(builder::addLaunchArgs);
builder.addLaunchArgs("-args");
options.args.forEach(builder::addLaunchArgs);
builder.setAppPath(appPath)
.build(plugin, exec);
if (testCollector != null) {
final JUnitTestCollectorWriter writer = new JUnitTestCollectorWriter(testCollector);
exec.setStandardOutput(writer);
exec.setErrorOutput(writer);
} else {
exec.setStandardOutput(new StreamToLogForwarder(LOG, false));
exec.setErrorOutput(new StreamToLogForwarder(LOG, true));
}
});
if (testCollector != null) {
numFailedTests.getAndAdd(testCollector.getNumFailures() + testCollector.getNumErrors());
writeJUnitReport(udid == null ? "unknown-simulator" : udid, testCollector, testOutputDir);
}
});
}
if (test) {
task.getActions().add(t -> {
if (numFailedTests.get() > 0) {
throw new GradleException(numFailedTests.get() + " tests failed on all targets combined, reports can be found here: " + testOutputDir);
}
});
}
}
private static void writeJUnitReport(@NotNull String udid, @NotNull JUnitTestCollector testCollector,
@NotNull File out) {
Require.nonNull(udid);
Require.nonNull(testCollector);
Require.nonNull(out);
final File out_xml = prepareOutputSubdir(out, "xml");
final File out_html = prepareOutputSubdir(out, "html");
final File out_txt = prepareOutputSubdir(out, "txt");
// Try to write xml
try {
final File file = new File(out_xml, udid + ".xml");
FileUtils.write(file, testCollector.getXMLReport());
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
// Try to write html
try {
final File file = new File(out_html, udid + ".html");
FileUtils.write(file, testCollector.getHTMLReport(udid));
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
// Try to write txt
try {
final File file = new File(out_txt, udid + ".txt");
FileUtils.write(file, testCollector.getCompleteInput());
} catch (GradleException ex) {
LOG.error(ex.getMessage(), ex.getCause());
}
}
private static File prepareOutputSubdir(@NotNull File parent, String name) {
final File out = new File(parent, name);
if (!out.mkdir()) {
LOG.info("mkdir failed for " + out);
}
if (!out.exists()) {
throw new GradleException("Directory doesn't exist at " + out);
}
if (!out.isDirectory()) {
throw new GradleException("Expected directory at " + out);
}
return out;
}
}
|
Added xctest product handling
|
src/main/java/org/moe/gradle/tasks/Launchers.java
|
Added xctest product handling
|
|
Java
|
mit
|
3777f4f7c94baf84ac2140d6abde215bb0b28b14
| 0
|
MineLittlePony/MineLittlePony,MineLittlePony/MineLittlePony
|
package com.minelittlepony.model.gear;
import com.minelittlepony.model.BodyPart;
import com.minelittlepony.model.capabilities.IModel;
import com.minelittlepony.model.capabilities.IModelPegasus;
import com.minelittlepony.pony.data.PonyWearable;
import com.minelittlepony.render.model.PlaneRenderer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.entity.Entity;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.MathHelper;
public class SaddleBags extends AbstractGear {
private PlaneRenderer leftBag;
private PlaneRenderer rightBag;
private PlaneRenderer strap;
private boolean hangLow = false;
float dropAmount = 0;
private IModel model;
@Override
public void init(float yOffset, float stretch) {
leftBag = new PlaneRenderer(this, 56, 19);
rightBag = new PlaneRenderer(this, 56, 19);
strap = new PlaneRenderer(this, 56, 19);
float y = -0.5F;
int x = 4;
int z = -1;
strap.offset(-x, y + 0.2F, z + 3).around(0, 4, 4)
.tex(56, 31).top(0, 0, 0, 8, 1, stretch)
.top(0, 0, 1, 8, 1, stretch)
.south(0, 0, 2, 8, 1, stretch)
.north(0, 0, 0, 8, 1, stretch)
.child(0).offset(0, -3, -0.305F).tex(56, 31)
.west( 4.0002F, 0, 0, 1, 3, stretch) // 0.0001 is there
.west( 4.0002F, -1, 0, 1, 3, stretch) // otherwise straps
.west(-4.0002F, 0, 0, 1, 3, stretch) // clip into the body
.west(-4.0002F, -1, 0, 1, 3, stretch)
.rotateAngleX = ROTATE_270;
leftBag.offset(x, y, z).around(0, 4, 4)
.tex(56, 25).south(0, 0, 0, 3, 6, stretch)
.tex(59, 25).south(0, 0, 8, 3, 6, stretch)
.tex(56, 19) .west(3, 0, 0, 6, 8, stretch)
.west(0, 0, 0, 6, 8, stretch)
.child(0).offset(z, y, -x).tex(56, 16)
.top(0, 0, -3, 8, 3, stretch)
.tex(56, 22).flipZ().bottom(0, 6, -3, 8, 3, stretch)
.rotateAngleY = ROTATE_270;
x += 3;
rightBag.offset(-x, y, z).around(0, 4, 4).flip()
.tex(56, 25).south(0, 0, 0, 3, 6, stretch)
.tex(59, 25).south(0, 0, 8, 3, 6, stretch)
.tex(56, 19).west(3, 0, 0, 6, 8, stretch)
.west(0, 0, 0, 6, 8, stretch)
.child(0).offset(z, y, x).tex(56, 16)
.flipZ().top(0, 0, -3, 8, 3, stretch)
.tex(56, 22).flipZ().bottom(0, 6, -3, 8, 3, stretch)
.rotateAngleY = ROTATE_270;
}
@Override
public void setLivingAnimations(IModel model, Entity entity) {
this.model = model;
hangLow = false;
if (model instanceof IModelPegasus) {
hangLow = model.canFly() && ((IModelPegasus)model).wingsAreOpen();
}
}
@Override
public void setRotationAndAngles(boolean rainboom, float move, float swing, float bodySwing, float ticks) {
float pi = PI * (float) Math.pow(swing, 16);
float mve = move * 0.6662f;
float srt = swing / 10;
bodySwing = MathHelper.cos(mve + pi) * srt;
leftBag.rotateAngleX = bodySwing;
rightBag.rotateAngleX = bodySwing;
if (model instanceof IModelPegasus && model.isFlying()) {
bodySwing = ((IModelPegasus)model).getWingRotationFactor(ticks) - ROTATE_270;
bodySwing /= 10;
}
leftBag.rotateAngleZ = bodySwing;
rightBag.rotateAngleZ = -bodySwing;
dropAmount = hangLow ? 0.15F : 0;
}
public void sethangingLow(boolean veryLow) {
hangLow = veryLow;
}
@Override
public void renderPart(float scale) {
dropAmount = model.getMetadata().getInterpolator().interpolate("dropAmount", dropAmount, 3);
GlStateManager.pushMatrix();
GlStateManager.translate(0, dropAmount, 0);
leftBag.render(scale);
rightBag.render(scale);
GlStateManager.popMatrix();
strap.render(scale);
}
@Override
public boolean canRender(IModel model, Entity entity) {
return model.isWearing(PonyWearable.SADDLE_BAGS);
}
@Override
public BodyPart getGearLocation() {
return BodyPart.BODY;
}
@Override
public ResourceLocation getTexture(Entity entity) {
// use the default
return null;
}
}
|
src/main/java/com/minelittlepony/model/gear/SaddleBags.java
|
package com.minelittlepony.model.gear;
import com.minelittlepony.model.BodyPart;
import com.minelittlepony.model.capabilities.IModel;
import com.minelittlepony.model.capabilities.IModelPegasus;
import com.minelittlepony.pony.data.PonyWearable;
import com.minelittlepony.render.model.PlaneRenderer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.entity.Entity;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.MathHelper;
public class SaddleBags extends AbstractGear {
private PlaneRenderer leftBag;
private PlaneRenderer rightBag;
private PlaneRenderer strap;
private boolean hangLow = false;
float dropAmount = 0;
private IModel model;
@Override
public void init(float yOffset, float stretch) {
leftBag = new PlaneRenderer(this, 56, 19);
rightBag = new PlaneRenderer(this, 56, 19);
strap = new PlaneRenderer(this, 56, 19);
float y = -0.5F;
int x = 4;
int z = -1;
strap.offset(-x, y + 0.2F, z + 3).around(0, 4, 4)
.tex(56, 31).top(0, 0, 0, 8, 1, stretch)
.top(0, 0, 1, 8, 1, stretch)
.south(0, 0, 2, 8, 1, stretch)
.north(0, 0, 0, 8, 1, stretch)
.child(0).offset(0, -3, -0.305F).tex(56, 31)
.west( 4.0002F, 0, 0, 1, 3, stretch) // 0.0001 is there
.west( 4.0002F, -1, 0, 1, 3, stretch) // otherwise straps
.west(-4.0002F, 0, 0, 1, 3, stretch) // clip into the body
.west(-4.0002F, -1, 0, 1, 3, stretch)
.rotateAngleX = ROTATE_270;
leftBag.offset(x, y, z).around(0, 4, 4)
.tex(56, 25).south(0, 0, 0, 3, 6, stretch)
.tex(59, 25).south(0, 0, 8, 3, 6, stretch)
.tex(56, 19) .west(3, 0, 0, 6, 8, stretch)
.west(0, 0, 0, 6, 8, stretch)
.child(0).offset(z, y, -x).tex(56, 16)
.top(0, 0, -3, 8, 3, stretch)
.tex(56, 22).flipZ().bottom(0, 6, -3, 8, 3, stretch)
.rotateAngleY = ROTATE_270;
x += 3;
rightBag.offset(-x, y, z).around(0, 4, 4)
.tex(56, 25).flip().south(0, 0, 0, 3, 6, stretch)
.tex(59, 25).south(0, 0, 8, 3, 6, stretch)
.tex(56, 19).west(3, 0, 0, 6, 8, stretch)
.west(0, 0, 0, 6, 8, stretch)
.child(0).offset(z, y, x).tex(56, 16)
.flipZ().top(0, 0, -3, 8, 3, stretch)
.tex(56, 22).flipZ().bottom(0, 6, -3, 8, 3, stretch)
.rotateAngleY = ROTATE_270;
}
@Override
public void setLivingAnimations(IModel model, Entity entity) {
this.model = model;
hangLow = false;
if (model instanceof IModelPegasus) {
hangLow = model.canFly() && ((IModelPegasus)model).wingsAreOpen();
}
}
@Override
public void setRotationAndAngles(boolean rainboom, float move, float swing, float bodySwing, float ticks) {
float pi = PI * (float) Math.pow(swing, 16);
float mve = move * 0.6662f;
float srt = swing / 10;
bodySwing = MathHelper.cos(mve + pi) * srt;
leftBag.rotateAngleX = bodySwing;
rightBag.rotateAngleX = bodySwing;
if (model instanceof IModelPegasus && model.isFlying()) {
bodySwing = ((IModelPegasus)model).getWingRotationFactor(ticks) - ROTATE_270;
bodySwing /= 10;
}
leftBag.rotateAngleZ = bodySwing;
rightBag.rotateAngleZ = -bodySwing;
dropAmount = hangLow ? 0.15F : 0;
}
public void sethangingLow(boolean veryLow) {
hangLow = veryLow;
}
@Override
public void renderPart(float scale) {
dropAmount = model.getMetadata().getInterpolator().interpolate("dropAmount", dropAmount, 3);
GlStateManager.pushMatrix();
GlStateManager.translate(0, dropAmount, 0);
leftBag.render(scale);
rightBag.render(scale);
GlStateManager.popMatrix();
strap.render(scale);
}
@Override
public boolean canRender(IModel model, Entity entity) {
return model.isWearing(PonyWearable.SADDLE_BAGS);
}
@Override
public BodyPart getGearLocation() {
return BodyPart.BODY;
}
@Override
public ResourceLocation getTexture(Entity entity) {
// use the default
return null;
}
}
|
As far as I understand, .tex() and .flip() can change places
|
src/main/java/com/minelittlepony/model/gear/SaddleBags.java
|
As far as I understand, .tex() and .flip() can change places
|
|
Java
|
mit
|
a0d268699532bf611e5f7fc1168c5bf5bbe9bf19
| 0
|
punkbrwstr/pinto,punkbrwstr/pinto,punkbrwstr/pinto
|
package tech.pinto;
import java.util.Map;
import java.util.function.Supplier;
import com.google.common.collect.ImmutableMap;
import tech.pinto.function.FunctionFactory;
import tech.pinto.function.FunctionHelp;
import tech.pinto.function.intermediate.Clear;
import tech.pinto.function.intermediate.Copy;
import tech.pinto.function.intermediate.Cross;
import tech.pinto.function.intermediate.DoubleCollectors;
import tech.pinto.function.intermediate.BinaryOperator;
import tech.pinto.function.intermediate.Expanding;
import tech.pinto.function.intermediate.Fill;
import tech.pinto.function.intermediate.Label;
import tech.pinto.function.intermediate.Reverse;
import tech.pinto.function.intermediate.Roll;
import tech.pinto.function.intermediate.Rolling;
import tech.pinto.function.intermediate.RollingCorrelation;
import tech.pinto.function.intermediate.UnaryOperator;
import tech.pinto.function.supplier.MoonPhase;
import tech.pinto.function.supplier.Yahoo;
import tech.pinto.function.terminal.Delete;
import tech.pinto.function.terminal.Evaluate;
import tech.pinto.function.terminal.Execute;
import tech.pinto.function.terminal.Export;
import tech.pinto.function.terminal.Help;
import tech.pinto.function.terminal.Save;
public class StandardVocabulary implements Vocabulary {
private final Map<String,FunctionFactory> commands =
new ImmutableMap.Builder<String, FunctionFactory>()
.put("eval", (c,i,s,a) -> new Evaluate(i,a))
.put("export", (c,i,s,a) -> new Export(i,a))
.put("def", (c,i,s,a) -> new Save(c,s,a))
.put("del", (c,i,s,a) -> new Delete(c,i,a))
.put("help", (c,i,s,a) -> new Help(c,this,i,a))
.put("exec", (c,i,s,a) -> new Execute(c,this,i,a))
.put("+", (c,i,s,a) -> new BinaryOperator("+",i, (x,y) -> x + y))
.put("-", (c,i,s,a) -> new BinaryOperator("-",i, (x,y) -> x - y))
.put("*", (c,i,s,a) -> new BinaryOperator("*",i, (x,y) -> x * y))
.put("/", (c,i,s,a) -> new BinaryOperator("/",i, (x,y) -> x / y))
.put("%", (c,i,s,a) -> new BinaryOperator("%",i, (x,y) -> x % y))
.put("==", (c,i,s,a) -> new BinaryOperator("==",i, (x,y) -> x == y ? 1.0 : 0.0))
.put("!=", (c,i,s,a) -> new BinaryOperator("!=",i, (x,y) -> x != y ? 1.0 : 0.0))
.put(">", (c,i,s,a) -> new BinaryOperator(">",i, (x,y) -> x > y ? 1.0 : 0.0))
.put("<", (c,i,s,a) -> new BinaryOperator("<",i, (x,y) -> x < y ? 1.0 : 0.0))
.put(">=", (c,i,s,a) -> new BinaryOperator(">=",i, (x,y) -> x >= y ? 1.0 : 0.0))
.put("<=", (c,i,s,a) -> new BinaryOperator("<=",i, (x,y) -> x <= y ? 1.0 : 0.0))
.put("abs", (c,i,s,a) -> new UnaryOperator("abs",i, x -> Math.abs(x)))
.put("neg", (c,i,s,a) -> new UnaryOperator("neg",i, x -> x * -1d))
.put("inv", (c,i,s,a) -> new UnaryOperator("inv",i, x -> 1.0 / x))
.put("log", (c,i,s,a) -> new UnaryOperator("log",i, x -> Math.log(x)))
.put("exp", (c,i,s,a) -> new UnaryOperator("exp",i, x -> Math.exp(x)))
.put("acgbConvert", (c,j,s,a) -> new UnaryOperator("acgbConvert",j,
quote -> {
double TERM = 10, RATE = 6, price = 0;
for (int i = 0; i < TERM * 2; i++) {
price += RATE / 2 / Math.pow(1 + (100 - quote) / 2 / 100, i + 1);
}
price += 100 / Math.pow(1 + (100 - quote) / 2 / 100, TERM * 2);
return price; }))
.put("moon", (c,i,s,a) -> new MoonPhase())
.put("label", (c,i,s,a) -> new Label(i,a))
.put("rev", (c,i,s,a) -> new Reverse(i))
.put("copy", (c,i,s,a) -> new Copy(i,a))
.put("roll", (c,i,s,a) -> new Roll(i,a))
.put("clear", (c,i,s,a) -> new Clear(i,a))
.put("yhoo", (c,i,s,a) -> new Yahoo(c,i,a))
.put("last", (c,i,s,a) -> new Rolling("last",i,DoubleCollectors.last, false, a))
.put("r_lag", (c,i,s,a) -> new Rolling("r_lag",i,DoubleCollectors.first,false, a))
.put("r_chg", (c,i,s,a) -> new Rolling("r_chg",i,DoubleCollectors.change,false, a))
.put("e_chg", (c,i,s,a) -> new Expanding("e_chg",i,DoubleCollectors.change, a))
.put("r_chgpct", (c,i,s,a) -> new Rolling("r_chgpct",i,DoubleCollectors.changepct,false, a))
.put("e_chgpct", (c,i,s,a) -> new Expanding("e_chgpct",i,DoubleCollectors.changepct, a))
.put("r_chglog", (c,i,s,a) -> new Rolling("r_chglog",i,DoubleCollectors.changelog,false, a))
.put("e_chglog", (c,i,s,a) -> new Expanding("e_chglog",i,DoubleCollectors.changelog, a))
.put("r_mean", (c,i,s,a) -> new Rolling("r_mean",i,DoubleCollectors.average, true, a))
.put("e_mean", (c,i,s,a) -> new Expanding("e_mean",i,DoubleCollectors.average, a))
.put("x_mean", (c,i,s,a) -> new Cross("x_mean",i,DoubleCollectors.average, a))
.put("r_max", (c,i,s,a) -> new Rolling("r_max",i,DoubleCollectors.max, true, a))
.put("e_max", (c,i,s,a) -> new Expanding("e_max",i,DoubleCollectors.max, a))
.put("x_max", (c,i,s,a) -> new Cross("x_max",i,DoubleCollectors.max, a))
.put("r_min", (c,i,s,a) -> new Rolling("r_max",i,DoubleCollectors.min, true, a))
.put("e_min", (c,i,s,a) -> new Expanding("e_max",i,DoubleCollectors.min, a))
.put("x_min", (c,i,s,a) -> new Cross("x_max",i,DoubleCollectors.min, a))
.put("r_sum", (c,i,s,a) -> new Rolling("r_sum",i,DoubleCollectors.sum, true, a))
.put("e_sum", (c,i,s,a) -> new Expanding("e_sum",i,DoubleCollectors.sum, a))
.put("x_sum", (c,i,s,a) -> new Cross("x_sum",i,DoubleCollectors.sum, a))
.put("r_geomean", (c,i,s,a) -> new Rolling("r_geomean",i,DoubleCollectors.geomean, true, a))
.put("e_geomean", (c,i,s,a) -> new Expanding("e_geomean",i,DoubleCollectors.geomean, a))
.put("x_geomean", (c,i,s,a) -> new Cross("x_geomean",i,DoubleCollectors.geomean, a))
.put("r_var", (c,i,s,a) -> new Rolling("r_var",i,DoubleCollectors.var, true,a))
.put("e_var", (c,i,s,a) -> new Expanding("e_var",i,DoubleCollectors.var, a))
.put("x_var", (c,i,s,a) -> new Cross("x_var",i,DoubleCollectors.var,a))
.put("r_varp", (c,i,s,a) -> new Rolling("r_varp",i,DoubleCollectors.varp, true, a))
.put("e_varp", (c,i,s,a) -> new Expanding("e_varp",i,DoubleCollectors.varp, a))
.put("x_varp", (c,i,s,a) -> new Cross("x_varp",i,DoubleCollectors.varp, a))
.put("r_std", (c,i,s,a) -> new Rolling("r_std",i,DoubleCollectors.stdev, true, a))
.put("e_std", (c,i,s,a) -> new Expanding("e_std",i,DoubleCollectors.stdev, a))
.put("x_std", (c,i,s,a) -> new Cross("x_std",i,DoubleCollectors.stdev, a))
.put("r_stdp", (c,i,s,a) -> new Rolling("r_stdp",i,DoubleCollectors.stdevp, true, a))
.put("e_stdp", (c,i,s,a) -> new Expanding("e_stdp",i,DoubleCollectors.stdevp, a))
.put("x_stdp", (c,i,s,a) -> new Cross("x_stdp",i,DoubleCollectors.stdevp, a))
.put("r_zscore", (c,i,s,a) -> new Rolling("r_zscore",i,DoubleCollectors.zscore, true, a))
.put("e_zscore", (c,i,s,a) -> new Expanding("e_zscore",i,DoubleCollectors.zscore, a))
.put("x_zscore", (c,i,s,a) -> new Cross("x_zscore",i,DoubleCollectors.zscore, a))
.put("r_zscorep", (c,i,s,a) -> new Rolling("r_zscorep",i,DoubleCollectors.zscorep, true, a))
.put("e_zscorep", (c,i,s,a) -> new Expanding("e_zscorep",i,DoubleCollectors.zscorep, a))
.put("x_zscorep", (c,i,s,a) -> new Cross("x_zscorep",i,DoubleCollectors.zscorep, a))
.put("fill", (c,i,s,a) -> new Fill(i,a))
.put("correl", (c,i,s,a) -> new RollingCorrelation(i,a))
.build();
private final Map<String,Supplier<FunctionHelp>> commandHelp =
new ImmutableMap.Builder<String, Supplier<FunctionHelp>>()
/* terminal commands */
.put("eval", Evaluate.getHelp())
.put("export", Export.getHelp())
.put("def", Save.getHelp())
.put("help", Help.getHelp())
.put("del", Delete.getHelp())
/* stack manipulation commands */
.put("label", Label.getHelp())
.put("copy", Copy.getHelp())
.put("roll", Roll.getHelp())
.put("clear", Clear.getHelp())
/* initial data commands */
.put("yhoo", Yahoo.getHelp())
.put("moon", MoonPhase.getHelp())
/* rolling window commands */
.put("chg",Rolling.getHelp("chg", "change"))
.put("chg_pct",Rolling.getHelp("chg_pct", "change in percent"))
.put("chg_log",Rolling.getHelp("chg_log", "log change"))
.put("r_mean",Rolling.getHelp("r_mean", "mean"))
.put("r_max",Rolling.getHelp("r_max", "maximum"))
.put("r_min",Rolling.getHelp("r_min", "minimum"))
.put("r_sum",Rolling.getHelp("r_sum", "sum"))
.put("r_geomean",Rolling.getHelp("r_geomean", "geometric mean"))
.put("r_var",Rolling.getHelp("r_var", "sample variance"))
.put("r_varp",Rolling.getHelp("r_varp", "variance"))
.put("r_std",Rolling.getHelp("r_std", "sample standard deviation"))
.put("r_zscorep",Rolling.getHelp("r_zscorep", "z-score"))
.put("r_zscore",Rolling.getHelp("r_zscore", "sample z-score"))
.put("r_stdp",Rolling.getHelp("r_stdp", "standard deviation"))
.put("correl",Rolling.getHelp("correl", "average correlation"))
/* cross commands */
.put("x_mean",Cross.getHelp("x_mean", "mean"))
.put("x_max",Cross.getHelp("x_max", "maximum"))
.put("x_min",Cross.getHelp("x_min", "minimum"))
.put("x_sum",Cross.getHelp("x_sum", "sum"))
.put("x_geomean",Cross.getHelp("x_geomean", "geometric mean"))
.put("x_var",Cross.getHelp("x_var", "sample variance"))
.put("x_varp",Cross.getHelp("x_varp", "variance"))
.put("x_std",Cross.getHelp("x_std", "sample standard deviation"))
.put("x_zscorep",Cross.getHelp("x_zscorep", "z-score"))
.put("x_zscore",Cross.getHelp("x_zscore", "sample z-score"))
.put("x_stdp",Cross.getHelp("r_mean", "mean"))
/* expanding commands */
.put("e_mean",Rolling.getHelp("e_mean", "mean"))
.put("e_max",Expanding.getHelp("e_max", "maximum"))
.put("e_min",Expanding.getHelp("e_min", "minimum"))
.put("e_sum",Expanding.getHelp("e_sum", "sum"))
.put("e_geomean",Expanding.getHelp("e_geomean", "geometric mean"))
.put("e_var",Expanding.getHelp("e_var", "sample variance"))
.put("e_varp",Expanding.getHelp("e_varp", "variance"))
.put("e_std",Expanding.getHelp("e_std", "sample standard deviation"))
.put("e_zscorep",Expanding.getHelp("e_zscorep", "z-score"))
.put("e_zscore",Expanding.getHelp("e_zscore", "sample z-score"))
.put("e_stdp",Expanding.getHelp("e_stdp", "standard deviation"))
/* other commands */
.put("fill",Fill.getHelp())
/* binary operators */
.put("+",BinaryOperator.getHelp("+", "addition"))
.put("-",BinaryOperator.getHelp("-", "subtraction"))
.put("/",BinaryOperator.getHelp("/", "division"))
.put("*",BinaryOperator.getHelp("*", "multiplication"))
.put("%",BinaryOperator.getHelp("%", "modulo"))
.put("==",BinaryOperator.getHelp("==", "equals"))
.put("!=",BinaryOperator.getHelp("!=", "not equals"))
.put(">",BinaryOperator.getHelp(">", "greater than"))
.put("<",BinaryOperator.getHelp("<", "less than"))
.put(">=",BinaryOperator.getHelp(">=", "greater than or equal to"))
.put("<=",BinaryOperator.getHelp("<=", "less than or equal to"))
.put("abs",UnaryOperator.getHelp("abs", "absolute value"))
.put("neg",UnaryOperator.getHelp("neg", "negation"))
.put("inv",UnaryOperator.getHelp("inv", "inverse"))
.put("log",UnaryOperator.getHelp("log", "natural log"))
.build();
public StandardVocabulary() {
}
@Override
public Map<String, FunctionFactory> getCommandMap() {
return commands;
}
@Override
public Map<String, Supplier<FunctionHelp>> getCommandHelpMap() {
return commandHelp;
}
}
|
pinto-lang/src/main/java/tech/pinto/StandardVocabulary.java
|
package tech.pinto;
import java.util.Map;
import java.util.function.Supplier;
import com.google.common.collect.ImmutableMap;
import tech.pinto.function.FunctionFactory;
import tech.pinto.function.FunctionHelp;
import tech.pinto.function.intermediate.Clear;
import tech.pinto.function.intermediate.Copy;
import tech.pinto.function.intermediate.Cross;
import tech.pinto.function.intermediate.DoubleCollectors;
import tech.pinto.function.intermediate.BinaryOperator;
import tech.pinto.function.intermediate.Expanding;
import tech.pinto.function.intermediate.Fill;
import tech.pinto.function.intermediate.Label;
import tech.pinto.function.intermediate.Reverse;
import tech.pinto.function.intermediate.Roll;
import tech.pinto.function.intermediate.Rolling;
import tech.pinto.function.intermediate.RollingCorrelation;
import tech.pinto.function.intermediate.UnaryOperator;
import tech.pinto.function.supplier.MoonPhase;
import tech.pinto.function.supplier.Yahoo;
import tech.pinto.function.terminal.Delete;
import tech.pinto.function.terminal.Evaluate;
import tech.pinto.function.terminal.Execute;
import tech.pinto.function.terminal.Export;
import tech.pinto.function.terminal.Help;
import tech.pinto.function.terminal.Save;
public class StandardVocabulary implements Vocabulary {
private final Map<String,FunctionFactory> commands =
new ImmutableMap.Builder<String, FunctionFactory>()
.put("eval", (c,i,s,a) -> new Evaluate(i,a))
.put("export", (c,i,s,a) -> new Export(i,a))
.put("def", (c,i,s,a) -> new Save(c,s,a))
.put("del", (c,i,s,a) -> new Delete(c,i,a))
.put("help", (c,i,s,a) -> new Help(c,this,i,a))
.put("exec", (c,i,s,a) -> new Execute(c,this,i,a))
.put("+", (c,i,s,a) -> new BinaryOperator("+",i, (x,y) -> x + y))
.put("-", (c,i,s,a) -> new BinaryOperator("-",i, (x,y) -> x - y))
.put("*", (c,i,s,a) -> new BinaryOperator("*",i, (x,y) -> x * y))
.put("/", (c,i,s,a) -> new BinaryOperator("/",i, (x,y) -> x / y))
.put("%", (c,i,s,a) -> new BinaryOperator("%",i, (x,y) -> x % y))
.put("==", (c,i,s,a) -> new BinaryOperator("==",i, (x,y) -> x == y ? 1.0 : 0.0))
.put("!=", (c,i,s,a) -> new BinaryOperator("!=",i, (x,y) -> x != y ? 1.0 : 0.0))
.put(">", (c,i,s,a) -> new BinaryOperator(">",i, (x,y) -> x > y ? 1.0 : 0.0))
.put("<", (c,i,s,a) -> new BinaryOperator("<",i, (x,y) -> x < y ? 1.0 : 0.0))
.put(">=", (c,i,s,a) -> new BinaryOperator(">=",i, (x,y) -> x >= y ? 1.0 : 0.0))
.put("<=", (c,i,s,a) -> new BinaryOperator("<=",i, (x,y) -> x <= y ? 1.0 : 0.0))
.put("abs", (c,i,s,a) -> new UnaryOperator("abs",i, x -> Math.abs(x)))
.put("neg", (c,i,s,a) -> new UnaryOperator("neg",i, x -> x * -1d))
.put("inv", (c,i,s,a) -> new UnaryOperator("inv",i, x -> 1.0 / x))
.put("log", (c,i,s,a) -> new UnaryOperator("log",i, x -> Math.log(x)))
.put("acgbConvert", (c,j,s,a) -> new UnaryOperator("acgbConvert",j,
quote -> {
double TERM = 10, RATE = 6, price = 0;
for (int i = 0; i < TERM * 2; i++) {
price += RATE / 2 / Math.pow(1 + (100 - quote) / 2 / 100, i + 1);
}
price += 100 / Math.pow(1 + (100 - quote) / 2 / 100, TERM * 2);
return price; }))
.put("moon", (c,i,s,a) -> new MoonPhase())
.put("label", (c,i,s,a) -> new Label(i,a))
.put("rev", (c,i,s,a) -> new Reverse(i))
.put("copy", (c,i,s,a) -> new Copy(i,a))
.put("roll", (c,i,s,a) -> new Roll(i,a))
.put("clear", (c,i,s,a) -> new Clear(i,a))
.put("yhoo", (c,i,s,a) -> new Yahoo(c,i,a))
.put("last", (c,i,s,a) -> new Rolling("last",i,DoubleCollectors.last, false, a))
.put("r_lag", (c,i,s,a) -> new Rolling("r_lag",i,DoubleCollectors.first,false, a))
.put("r_chg", (c,i,s,a) -> new Rolling("r_chg",i,DoubleCollectors.change,false, a))
.put("e_chg", (c,i,s,a) -> new Expanding("e_chg",i,DoubleCollectors.change, a))
.put("r_chgpct", (c,i,s,a) -> new Rolling("r_chgpct",i,DoubleCollectors.changepct,false, a))
.put("e_chgpct", (c,i,s,a) -> new Expanding("e_chgpct",i,DoubleCollectors.changepct, a))
.put("r_chglog", (c,i,s,a) -> new Rolling("r_chglog",i,DoubleCollectors.changelog,false, a))
.put("e_chglog", (c,i,s,a) -> new Expanding("e_chglog",i,DoubleCollectors.changelog, a))
.put("r_mean", (c,i,s,a) -> new Rolling("r_mean",i,DoubleCollectors.average, true, a))
.put("e_mean", (c,i,s,a) -> new Expanding("e_mean",i,DoubleCollectors.average, a))
.put("x_mean", (c,i,s,a) -> new Cross("x_mean",i,DoubleCollectors.average, a))
.put("r_max", (c,i,s,a) -> new Rolling("r_max",i,DoubleCollectors.max, true, a))
.put("e_max", (c,i,s,a) -> new Expanding("e_max",i,DoubleCollectors.max, a))
.put("x_max", (c,i,s,a) -> new Cross("x_max",i,DoubleCollectors.max, a))
.put("r_min", (c,i,s,a) -> new Rolling("r_max",i,DoubleCollectors.min, true, a))
.put("e_min", (c,i,s,a) -> new Expanding("e_max",i,DoubleCollectors.min, a))
.put("x_min", (c,i,s,a) -> new Cross("x_max",i,DoubleCollectors.min, a))
.put("r_sum", (c,i,s,a) -> new Rolling("r_sum",i,DoubleCollectors.sum, true, a))
.put("e_sum", (c,i,s,a) -> new Expanding("e_sum",i,DoubleCollectors.sum, a))
.put("x_sum", (c,i,s,a) -> new Cross("x_sum",i,DoubleCollectors.sum, a))
.put("r_geomean", (c,i,s,a) -> new Rolling("r_geomean",i,DoubleCollectors.geomean, true, a))
.put("e_geomean", (c,i,s,a) -> new Expanding("e_geomean",i,DoubleCollectors.geomean, a))
.put("x_geomean", (c,i,s,a) -> new Cross("x_geomean",i,DoubleCollectors.geomean, a))
.put("r_var", (c,i,s,a) -> new Rolling("r_var",i,DoubleCollectors.var, true,a))
.put("e_var", (c,i,s,a) -> new Expanding("e_var",i,DoubleCollectors.var, a))
.put("x_var", (c,i,s,a) -> new Cross("x_var",i,DoubleCollectors.var,a))
.put("r_varp", (c,i,s,a) -> new Rolling("r_varp",i,DoubleCollectors.varp, true, a))
.put("e_varp", (c,i,s,a) -> new Expanding("e_varp",i,DoubleCollectors.varp, a))
.put("x_varp", (c,i,s,a) -> new Cross("x_varp",i,DoubleCollectors.varp, a))
.put("r_std", (c,i,s,a) -> new Rolling("r_std",i,DoubleCollectors.stdev, true, a))
.put("e_std", (c,i,s,a) -> new Expanding("e_std",i,DoubleCollectors.stdev, a))
.put("x_std", (c,i,s,a) -> new Cross("x_std",i,DoubleCollectors.stdev, a))
.put("r_stdp", (c,i,s,a) -> new Rolling("r_stdp",i,DoubleCollectors.stdevp, true, a))
.put("e_stdp", (c,i,s,a) -> new Expanding("e_stdp",i,DoubleCollectors.stdevp, a))
.put("x_stdp", (c,i,s,a) -> new Cross("x_stdp",i,DoubleCollectors.stdevp, a))
.put("r_zscore", (c,i,s,a) -> new Rolling("r_zscore",i,DoubleCollectors.zscore, true, a))
.put("e_zscore", (c,i,s,a) -> new Expanding("e_zscore",i,DoubleCollectors.zscore, a))
.put("x_zscore", (c,i,s,a) -> new Cross("x_zscore",i,DoubleCollectors.zscore, a))
.put("r_zscorep", (c,i,s,a) -> new Rolling("r_zscorep",i,DoubleCollectors.zscorep, true, a))
.put("e_zscorep", (c,i,s,a) -> new Expanding("e_zscorep",i,DoubleCollectors.zscorep, a))
.put("x_zscorep", (c,i,s,a) -> new Cross("x_zscorep",i,DoubleCollectors.zscorep, a))
.put("fill", (c,i,s,a) -> new Fill(i,a))
.put("correl", (c,i,s,a) -> new RollingCorrelation(i,a))
.build();
private final Map<String,Supplier<FunctionHelp>> commandHelp =
new ImmutableMap.Builder<String, Supplier<FunctionHelp>>()
/* terminal commands */
.put("eval", Evaluate.getHelp())
.put("export", Export.getHelp())
.put("def", Save.getHelp())
.put("help", Help.getHelp())
.put("del", Delete.getHelp())
/* stack manipulation commands */
.put("label", Label.getHelp())
.put("copy", Copy.getHelp())
.put("roll", Roll.getHelp())
.put("clear", Clear.getHelp())
/* initial data commands */
.put("yhoo", Yahoo.getHelp())
.put("moon", MoonPhase.getHelp())
/* rolling window commands */
.put("chg",Rolling.getHelp("chg", "change"))
.put("chg_pct",Rolling.getHelp("chg_pct", "change in percent"))
.put("chg_log",Rolling.getHelp("chg_log", "log change"))
.put("r_mean",Rolling.getHelp("r_mean", "mean"))
.put("r_max",Rolling.getHelp("r_max", "maximum"))
.put("r_min",Rolling.getHelp("r_min", "minimum"))
.put("r_sum",Rolling.getHelp("r_sum", "sum"))
.put("r_geomean",Rolling.getHelp("r_geomean", "geometric mean"))
.put("r_var",Rolling.getHelp("r_var", "sample variance"))
.put("r_varp",Rolling.getHelp("r_varp", "variance"))
.put("r_std",Rolling.getHelp("r_std", "sample standard deviation"))
.put("r_zscorep",Rolling.getHelp("r_zscorep", "z-score"))
.put("r_zscore",Rolling.getHelp("r_zscore", "sample z-score"))
.put("r_stdp",Rolling.getHelp("r_stdp", "standard deviation"))
.put("correl",Rolling.getHelp("correl", "average correlation"))
/* cross commands */
.put("x_mean",Cross.getHelp("x_mean", "mean"))
.put("x_max",Cross.getHelp("x_max", "maximum"))
.put("x_min",Cross.getHelp("x_min", "minimum"))
.put("x_sum",Cross.getHelp("x_sum", "sum"))
.put("x_geomean",Cross.getHelp("x_geomean", "geometric mean"))
.put("x_var",Cross.getHelp("x_var", "sample variance"))
.put("x_varp",Cross.getHelp("x_varp", "variance"))
.put("x_std",Cross.getHelp("x_std", "sample standard deviation"))
.put("x_zscorep",Cross.getHelp("x_zscorep", "z-score"))
.put("x_zscore",Cross.getHelp("x_zscore", "sample z-score"))
.put("x_stdp",Cross.getHelp("r_mean", "mean"))
/* expanding commands */
.put("e_mean",Rolling.getHelp("e_mean", "mean"))
.put("e_max",Expanding.getHelp("e_max", "maximum"))
.put("e_min",Expanding.getHelp("e_min", "minimum"))
.put("e_sum",Expanding.getHelp("e_sum", "sum"))
.put("e_geomean",Expanding.getHelp("e_geomean", "geometric mean"))
.put("e_var",Expanding.getHelp("e_var", "sample variance"))
.put("e_varp",Expanding.getHelp("e_varp", "variance"))
.put("e_std",Expanding.getHelp("e_std", "sample standard deviation"))
.put("e_zscorep",Expanding.getHelp("e_zscorep", "z-score"))
.put("e_zscore",Expanding.getHelp("e_zscore", "sample z-score"))
.put("e_stdp",Expanding.getHelp("e_stdp", "standard deviation"))
/* other commands */
.put("fill",Fill.getHelp())
/* binary operators */
.put("+",BinaryOperator.getHelp("+", "addition"))
.put("-",BinaryOperator.getHelp("-", "subtraction"))
.put("/",BinaryOperator.getHelp("/", "division"))
.put("*",BinaryOperator.getHelp("*", "multiplication"))
.put("%",BinaryOperator.getHelp("%", "modulo"))
.put("==",BinaryOperator.getHelp("==", "equals"))
.put("!=",BinaryOperator.getHelp("!=", "not equals"))
.put(">",BinaryOperator.getHelp(">", "greater than"))
.put("<",BinaryOperator.getHelp("<", "less than"))
.put(">=",BinaryOperator.getHelp(">=", "greater than or equal to"))
.put("<=",BinaryOperator.getHelp("<=", "less than or equal to"))
.put("abs",UnaryOperator.getHelp("abs", "absolute value"))
.put("neg",UnaryOperator.getHelp("neg", "negation"))
.put("inv",UnaryOperator.getHelp("inv", "inverse"))
.put("log",UnaryOperator.getHelp("log", "natural log"))
.build();
public StandardVocabulary() {
}
@Override
public Map<String, FunctionFactory> getCommandMap() {
return commands;
}
@Override
public Map<String, Supplier<FunctionHelp>> getCommandHelpMap() {
return commandHelp;
}
}
|
Adding exponential to vocab.
|
pinto-lang/src/main/java/tech/pinto/StandardVocabulary.java
|
Adding exponential to vocab.
|
|
Java
|
mit
|
69284478fcafeff4f1a886a5f3eb96202c80dca6
| 0
|
InventivetalentDev/CompactNPCLib,InventivetalentDev/CompactNPCLib
|
package org.inventivetalent.npclib.registry;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import javassist.ClassPool;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.plugin.Plugin;
import org.inventivetalent.mcwrapper.auth.GameProfileWrapper;
import org.inventivetalent.npclib.ClassGenerator;
import org.inventivetalent.npclib.NPCLib;
import org.inventivetalent.npclib.NPCType;
import org.inventivetalent.npclib.Reflection;
import org.inventivetalent.npclib.annotation.NPCInfo;
import org.inventivetalent.npclib.entity.NPCEntity;
import org.inventivetalent.npclib.entity.living.human.EntityPlayer;
import org.inventivetalent.npclib.npc.NPCAbstract;
import org.inventivetalent.npclib.npc.living.human.NPCHumanAbstract;
import org.inventivetalent.reflection.minecraft.Minecraft;
import org.inventivetalent.reflection.resolver.ConstructorResolver;
import org.inventivetalent.reflection.resolver.FieldResolver;
import org.inventivetalent.reflection.resolver.MethodResolver;
import java.lang.reflect.Constructor;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
public class NPCRegistry implements Iterable<NPCAbstract> {
static final Map<NPCInfo, Class> generatedClasses = new HashMap<>();
private static final Map<String, NPCRegistry> registryMap = new HashMap<>();
private final Plugin plugin;
private final Map<UUID, NPCAbstract> npcMap = Maps.newHashMap();
public NPCRegistry(Plugin plugin) {
this.plugin = plugin;
if (registryMap.containsKey(plugin.getName())) {
throw new IllegalArgumentException("Registry for '" + plugin.getName() + "' already exists");
}
registryMap.put(plugin.getName(), this);
}
public void destroy(boolean removeNpcs) {
if (!registryMap.containsKey(plugin.getName())) {
throw new IllegalStateException("Already destroyed");
}
if (removeNpcs) {
for (UUID uuid : npcMap.keySet()) {
removeNpc(uuid);
}
}
registryMap.remove(plugin.getName());
}
public void destroy() {
destroy(true);
}
/**
* Injects the specified NPC classes, so the entities can be loaded properly by the server
*
* @param classes classes to inject
*/
public static void injectClasses(Class... classes) {
for (Class clazz : classes) {
if (clazz == null) { continue; }
getOrGenerateClass(NPCInfo.of(clazz));
}
}
static Class<?> getOrGenerateClass(NPCInfo npcType) {
if (generatedClasses.containsKey(npcType)) {
return generatedClasses.get(npcType);
}
ClassPool classPool = ClassPool.getDefault();
try {
Class generated = ClassGenerator.generateEntityClass(classPool, npcType);
generatedClasses.put(npcType, generated);
if (npcType.getId() != -1) {
injectEntity(generated, npcType.getId(), npcType.getNPCClassName());
}// -1 -> special entity, don't inject
return generated;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
static void injectEntity(Class<?> clazz, int id, String name) {
Class EntityTypes = Reflection.nmsClassResolver.resolveSilent("EntityTypes");
FieldResolver fieldResolver = new FieldResolver(EntityTypes);
((Map) fieldResolver.resolveWrapper("c").get(null)).put(name, clazz);
((Map) fieldResolver.resolveWrapper("d").get(null)).put(clazz, name);
((Map) fieldResolver.resolveWrapper("f").get(null)).put(clazz, Integer.valueOf(id));
NPCLib.logger.info("Injected " + clazz.getSimpleName() + " as " + name + " with id " + id);
}
/**
* Creates and spawns the specified NPC Entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param <T> a NPC class extending {@link NPCAbstract}
* @return the spawned NPC Entity
*/
public <T extends NPCAbstract> T spawnNPC(Location location, Class<T> npcClass) {
checkNotNull(location);
checkNotNull(npcClass);
try {
NPCInfo npcInfo = NPCInfo.of(npcClass);
NPCEntity npcEntity = createEntity(location, npcInfo);
return wrapAndInitEntity(npcEntity, location, npcInfo, npcClass);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Creates and spawns the specified NPC Type
*
* @param location {@link Location} to spawn the entity at
* @param npcType type of the NPC
* @return the spawned NPC Entity
*/
public NPCAbstract spawnNPC(Location location, NPCType npcType) {
return spawnNPC(location, checkNotNull(npcType).getNpcClass());
}
/**
* Creates and spawns a player NPC entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param gameProfile {@link GameProfileWrapper} to use for the player
* @param <T> a NPC class extending {@link NPCHumanAbstract}
* @return the spawned NPC entity
*/
public <T extends NPCHumanAbstract> T spawnPlayerNPC(Location location, Class<T> npcClass, GameProfileWrapper gameProfile) {
checkNotNull(location);
checkNotNull(npcClass);
checkNotNull(gameProfile);
try {
NPCInfo npcInfo = NPCInfo.of(npcClass);
NPCEntity npcEntity = createPlayerEntity(location, npcInfo, gameProfile);
return wrapAndInitEntity(npcEntity, location, npcInfo, npcClass);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Creates and spawns a player NPC entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param uuid {@link UUID} of the player
* @param name Name of the player
* @param <T> a NPC class extending {@link NPCHumanAbstract}
* @return the spawned NPC entity
*/
public <T extends NPCHumanAbstract> T spawnPlayerNPC(Location location, Class<T> npcClass, UUID uuid, String name) {
if (uuid == null && Strings.isNullOrEmpty(name)) {
throw new IllegalArgumentException("UUID and Name cannot both be empty");
}
return spawnPlayerNPC(location, npcClass, new GameProfileWrapper(checkNotNull(uuid), name));
}
public <T extends NPCAbstract> T removeNpc(T npc) {
npcMap.remove(checkNotNull(npc).getUniqueId());
npc.despawn();
return npc;
}
public NPCAbstract removeNpc(UUID uuid) {
NPCAbstract npc = npcMap.remove(checkNotNull(uuid));
if (npc != null) { npc.despawn(); }
return npc;
}
public Collection<NPCAbstract> getNpcs() {
return new ArrayList<>(npcMap.values());
}
protected <T extends NPCEntity> T createEntity(Location location, NPCInfo npcInfo) {
if ("EntityPlayer".equals(npcInfo.getNms())) { throw new IllegalArgumentException("cannot construct EntityPlayer using #createEntity"); }
Class clazz = getOrGenerateClass(npcInfo);
try {
//noinspection unchecked
Constructor constructor = clazz.getConstructor(Reflection.nmsClassResolver.resolve("World"));
//noinspection unchecked
return (T) constructor.newInstance(Minecraft.getHandle(location.getWorld()));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected EntityPlayer createPlayerEntity(Location location, NPCInfo npcInfo, GameProfileWrapper gameProfile) {
Class clazz = getOrGenerateClass(npcInfo);
try {
Object minecraftServer = new MethodResolver(Bukkit.getServer().getClass()).resolveWrapper("getServer").invoke(Bukkit.getServer());
Object worldServer = Minecraft.getHandle(location.getWorld());
Object interactManager = new ConstructorResolver(Reflection.nmsClassResolver.resolve("PlayerInteractManager")).resolve(new Class[] { Reflection.nmsClassResolver.resolve("World") }).newInstance(worldServer);
//noinspection unchecked
Constructor constructor = clazz.getConstructor(Reflection.nmsClassResolver.resolve("MinecraftServer"), Reflection.nmsClassResolver.resolve("WorldServer"), gameProfile.getHandle().getClass(), Reflection.nmsClassResolver.resolve("PlayerInteractManager"));
return (EntityPlayer) constructor.newInstance(minecraftServer, worldServer, gameProfile.getHandle(), interactManager);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected <T extends NPCAbstract> T wrapAndInitEntity(NPCEntity entity, Location location, NPCInfo npcInfo, Class<T> npcClass) throws Exception {
// NPCAbstract npcAbstract = (NPCAbstract) new ConstructorResolver(npcClass).resolveFirstConstructorSilent().newInstance(entity);
NPCAbstract npcAbstract = entity.getNPC();
entity.setNpcInfo(npcInfo);
npcAbstract.postInit(this.plugin, location);
// Register NPC
npcMap.put(npcAbstract.getUniqueId(), npcAbstract);
npcAbstract.spawn();
//noinspection unchecked
return (T) npcAbstract;
}
@Override
public Iterator<NPCAbstract> iterator() {
return npcMap.values().iterator();
}
}
|
API/src/main/java/org/inventivetalent/npclib/registry/NPCRegistry.java
|
package org.inventivetalent.npclib.registry;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import javassist.ClassPool;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.plugin.Plugin;
import org.inventivetalent.mcwrapper.auth.GameProfileWrapper;
import org.inventivetalent.npclib.ClassGenerator;
import org.inventivetalent.npclib.NPCLib;
import org.inventivetalent.npclib.NPCType;
import org.inventivetalent.npclib.Reflection;
import org.inventivetalent.npclib.annotation.NPCInfo;
import org.inventivetalent.npclib.entity.NPCEntity;
import org.inventivetalent.npclib.entity.living.human.EntityPlayer;
import org.inventivetalent.npclib.npc.NPCAbstract;
import org.inventivetalent.npclib.npc.living.human.NPCHumanAbstract;
import org.inventivetalent.reflection.minecraft.Minecraft;
import org.inventivetalent.reflection.resolver.ConstructorResolver;
import org.inventivetalent.reflection.resolver.FieldResolver;
import org.inventivetalent.reflection.resolver.MethodResolver;
import java.lang.reflect.Constructor;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
@RequiredArgsConstructor(access = AccessLevel.PUBLIC)
public class NPCRegistry implements Iterable<NPCAbstract> {
static final Map<NPCInfo, Class> generatedClasses = new HashMap<>();
@Getter final Plugin plugin;
private final Map<UUID, NPCAbstract> npcMap = Maps.newHashMap();
/**
* Injects the specified NPC classes, so the entities can be loaded properly by the server
*
* @param classes classes to inject
*/
public static void injectClasses(Class... classes) {
for (Class clazz : classes) {
if (clazz == null) { continue; }
getOrGenerateClass(NPCInfo.of(clazz));
}
}
static Class<?> getOrGenerateClass(NPCInfo npcType) {
if (generatedClasses.containsKey(npcType)) {
return generatedClasses.get(npcType);
}
ClassPool classPool = ClassPool.getDefault();
try {
Class generated = ClassGenerator.generateEntityClass(classPool, npcType);
generatedClasses.put(npcType, generated);
if (npcType.getId() != -1) {
injectEntity(generated, npcType.getId(), npcType.getNPCClassName());
}// -1 -> special entity, don't inject
return generated;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
static void injectEntity(Class<?> clazz, int id, String name) {
Class EntityTypes = Reflection.nmsClassResolver.resolveSilent("EntityTypes");
FieldResolver fieldResolver = new FieldResolver(EntityTypes);
((Map) fieldResolver.resolveWrapper("c").get(null)).put(name, clazz);
((Map) fieldResolver.resolveWrapper("d").get(null)).put(clazz, name);
((Map) fieldResolver.resolveWrapper("f").get(null)).put(clazz, Integer.valueOf(id));
NPCLib.logger.info("Injected " + clazz.getSimpleName() + " as " + name + " with id " + id);
}
/**
* Creates and spawns the specified NPC Entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param <T> a NPC class extending {@link NPCAbstract}
* @return the spawned NPC Entity
*/
public <T extends NPCAbstract> T spawnNPC(Location location, Class<T> npcClass) {
checkNotNull(location);
checkNotNull(npcClass);
try {
NPCInfo npcInfo = NPCInfo.of(npcClass);
NPCEntity npcEntity = createEntity(location, npcInfo);
return wrapAndInitEntity(npcEntity, location, npcInfo, npcClass);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Creates and spawns the specified NPC Type
*
* @param location {@link Location} to spawn the entity at
* @param npcType type of the NPC
* @return the spawned NPC Entity
*/
public NPCAbstract spawnNPC(Location location, NPCType npcType) {
return spawnNPC(location, checkNotNull(npcType).getNpcClass());
}
/**
* Creates and spawns a player NPC entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param gameProfile {@link GameProfileWrapper} to use for the player
* @param <T> a NPC class extending {@link NPCHumanAbstract}
* @return the spawned NPC entity
*/
public <T extends NPCHumanAbstract> T spawnPlayerNPC(Location location, Class<T> npcClass, GameProfileWrapper gameProfile) {
checkNotNull(location);
checkNotNull(npcClass);
checkNotNull(gameProfile);
try {
NPCInfo npcInfo = NPCInfo.of(npcClass);
NPCEntity npcEntity = createPlayerEntity(location, npcInfo, gameProfile);
return wrapAndInitEntity(npcEntity, location, npcInfo, npcClass);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Creates and spawns a player NPC entity
*
* @param location {@link Location} to spawn the entity at
* @param npcClass NPC-Class to spawn
* @param uuid {@link UUID} of the player
* @param name Name of the player
* @param <T> a NPC class extending {@link NPCHumanAbstract}
* @return the spawned NPC entity
*/
public <T extends NPCHumanAbstract> T spawnPlayerNPC(Location location, Class<T> npcClass, UUID uuid, String name) {
if (uuid == null && Strings.isNullOrEmpty(name)) {
throw new IllegalArgumentException("UUID and Name cannot both be empty");
}
return spawnPlayerNPC(location, npcClass, new GameProfileWrapper(checkNotNull(uuid), name));
}
public <T extends NPCAbstract> T removeNpc(T npc) {
npcMap.remove(checkNotNull(npc).getUniqueId());
npc.despawn();
return npc;
}
public NPCAbstract removeNpc(UUID uuid) {
NPCAbstract npc = npcMap.remove(checkNotNull(uuid));
if (npc != null) { npc.despawn(); }
return npc;
}
public Collection<NPCAbstract> getNpcs() {
return new ArrayList<>(npcMap.values());
}
protected <T extends NPCEntity> T createEntity(Location location, NPCInfo npcInfo) {
if ("EntityPlayer".equals(npcInfo.getNms())) { throw new IllegalArgumentException("cannot construct EntityPlayer using #createEntity"); }
Class clazz = getOrGenerateClass(npcInfo);
try {
//noinspection unchecked
Constructor constructor = clazz.getConstructor(Reflection.nmsClassResolver.resolve("World"));
//noinspection unchecked
return (T) constructor.newInstance(Minecraft.getHandle(location.getWorld()));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected EntityPlayer createPlayerEntity(Location location, NPCInfo npcInfo, GameProfileWrapper gameProfile) {
Class clazz = getOrGenerateClass(npcInfo);
try {
Object minecraftServer = new MethodResolver(Bukkit.getServer().getClass()).resolveWrapper("getServer").invoke(Bukkit.getServer());
Object worldServer = Minecraft.getHandle(location.getWorld());
Object interactManager = new ConstructorResolver(Reflection.nmsClassResolver.resolve("PlayerInteractManager")).resolve(new Class[] { Reflection.nmsClassResolver.resolve("World") }).newInstance(worldServer);
//noinspection unchecked
Constructor constructor = clazz.getConstructor(Reflection.nmsClassResolver.resolve("MinecraftServer"), Reflection.nmsClassResolver.resolve("WorldServer"), gameProfile.getHandle().getClass(), Reflection.nmsClassResolver.resolve("PlayerInteractManager"));
return (EntityPlayer) constructor.newInstance(minecraftServer, worldServer, gameProfile.getHandle(), interactManager);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected <T extends NPCAbstract> T wrapAndInitEntity(NPCEntity entity, Location location, NPCInfo npcInfo, Class<T> npcClass) throws Exception {
// NPCAbstract npcAbstract = (NPCAbstract) new ConstructorResolver(npcClass).resolveFirstConstructorSilent().newInstance(entity);
NPCAbstract npcAbstract = entity.getNPC();
entity.setNpcInfo(npcInfo);
npcAbstract.postInit(this.plugin, location);
// Register NPC
npcMap.put(npcAbstract.getUniqueId(), npcAbstract);
npcAbstract.spawn();
//noinspection unchecked
return (T) npcAbstract;
}
@Override
public Iterator<NPCAbstract> iterator() {
return npcMap.values().iterator();
}
}
|
store all registries | add #destroy
|
API/src/main/java/org/inventivetalent/npclib/registry/NPCRegistry.java
|
store all registries | add #destroy
|
|
Java
|
mit
|
0cf462a7abdb0bcac9d494030a2218f47a118c6f
| 0
|
mercadopago/px-android,mercadopago/px-android,mercadopago/px-android
|
package com.mercadopago.core;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.mercadopago.BankDealsActivity;
import com.mercadopago.CallForAuthorizeActivity;
import com.mercadopago.CardVaultActivity;
import com.mercadopago.CheckoutActivity;
import com.mercadopago.CongratsActivity;
import com.mercadopago.CustomerCardsActivity;
import com.mercadopago.GuessingCardActivity;
import com.mercadopago.InstallmentsActivity;
import com.mercadopago.InstructionsActivity;
import com.mercadopago.IssuersActivity;
import com.mercadopago.PaymentMethodsActivity;
import com.mercadopago.PaymentResultActivity;
import com.mercadopago.PaymentVaultActivity;
import com.mercadopago.PendingActivity;
import com.mercadopago.RejectionActivity;
import com.mercadopago.VaultActivity;
import com.mercadopago.adapters.ErrorHandlingCallAdapter;
import com.mercadopago.callbacks.Callback;
import com.mercadopago.model.BankDeal;
import com.mercadopago.model.Card;
import com.mercadopago.model.CardToken;
import com.mercadopago.model.CheckoutPreference;
import com.mercadopago.model.DecorationPreference;
import com.mercadopago.model.IdentificationType;
import com.mercadopago.model.Installment;
import com.mercadopago.model.Instruction;
import com.mercadopago.model.Issuer;
import com.mercadopago.model.PayerCost;
import com.mercadopago.model.Payment;
import com.mercadopago.model.PaymentIntent;
import com.mercadopago.model.PaymentMethod;
import com.mercadopago.model.PaymentMethodSearch;
import com.mercadopago.model.PaymentPreference;
import com.mercadopago.model.SavedCardToken;
import com.mercadopago.model.Site;
import com.mercadopago.model.Token;
import com.mercadopago.mptracker.MPTracker;
import com.mercadopago.services.BankDealService;
import com.mercadopago.services.GatewayService;
import com.mercadopago.services.IdentificationService;
import com.mercadopago.services.PaymentService;
import com.mercadopago.util.HttpClientUtil;
import com.mercadopago.util.JsonUtil;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class MercadoPago {
public static final String KEY_TYPE_PUBLIC = "public_key";
public static final String KEY_TYPE_PRIVATE = "private_key";
public static final int CUSTOMER_CARDS_REQUEST_CODE = 0;
public static final int PAYMENT_METHODS_REQUEST_CODE = 1;
public static final int INSTALLMENTS_REQUEST_CODE = 2;
public static final int ISSUERS_REQUEST_CODE = 3;
public static final int NEW_CARD_REQUEST_CODE = 4;
public static final int RESULT_REQUEST_CODE = 5;
public static final int VAULT_REQUEST_CODE = 6;
public static final int CALL_FOR_AUTHORIZE_REQUEST_CODE = 7;
public static final int PENDING_REQUEST_CODE = 8;
public static final int REJECTION_REQUEST_CODE = 9;
public static final int PAYMENT_VAULT_REQUEST_CODE = 10;
public static final int BANK_DEALS_REQUEST_CODE = 11;
public static final int CHECKOUT_REQUEST_CODE = 12;
public static final int GUESSING_CARD_REQUEST_CODE = 13;
public static final int INSTRUCTIONS_REQUEST_CODE = 14;
public static final int CARD_VAULT_REQUEST_CODE = 15;
public static final int CONGRATS_REQUEST_CODE = 16;
public static final int BIN_LENGTH = 6;
private static final String MP_API_BASE_URL = "https://api.mercadopago.com";
private String mKey = null;
private String mKeyType = null;
private Context mContext = null;
Retrofit mRetrofit;
private MercadoPago(Builder builder) {
this.mContext = builder.mContext;
this.mKey = builder.mKey;
this.mKeyType = builder.mKeyType;
System.setProperty("http.keepAlive", "false");
mRetrofit = new Retrofit.Builder()
.baseUrl(MP_API_BASE_URL)
.addConverterFactory(GsonConverterFactory.create(JsonUtil.getInstance().getGson()))
.client(HttpClientUtil.getClient(this.mContext, 10, 20, 20))
.addCallAdapterFactory(new ErrorHandlingCallAdapter.ErrorHandlingCallAdapterFactory())
.build();
}
public void getPreference(String checkoutPreferenceId, Callback<CheckoutPreference> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_PREFERENCE", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getPreference(checkoutPreferenceId, this.mKey).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createPayment(final PaymentIntent paymentIntent, final Callback<Payment> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "CREATE_PAYMENT", "1", mKeyType, "MLA", "1.0", mContext);
Retrofit paymentsRetrofitAdapter = new Retrofit.Builder()
.baseUrl(MP_API_BASE_URL)
.addConverterFactory(GsonConverterFactory.create(JsonUtil.getInstance().getGson()))
.client(HttpClientUtil.getClient(this.mContext, 10, 40, 40))
.addCallAdapterFactory(new ErrorHandlingCallAdapter.ErrorHandlingCallAdapterFactory())
.build();
PaymentService service = paymentsRetrofitAdapter.create(PaymentService.class);
service.createPayment(String.valueOf(paymentIntent.getTransactionId()), paymentIntent).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createToken(final SavedCardToken savedCardToken, final Callback<Token> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","CREATE_SAVED_TOKEN","1", mKeyType, "MLA", "1.0", mContext);
savedCardToken.setDevice(mContext);
GatewayService service = mRetrofit.create(GatewayService.class);
service.getToken(this.mKey, savedCardToken).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createToken(final CardToken cardToken, final Callback<Token> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","CREATE_CARD_TOKEN","1", mKeyType, "MLA", "1.0", mContext);
cardToken.setDevice(mContext);
GatewayService service = mRetrofit.create(GatewayService.class);
service.getToken(this.mKey, cardToken).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getBankDeals(final Callback<List<BankDeal>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_BANK_DEALS","1", mKeyType, "MLA", "1.0", mContext);
BankDealService service = mRetrofit.create(BankDealService.class);
service.getBankDeals(this.mKey, mContext.getResources().getConfiguration().locale.toString()).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getIdentificationTypes(Callback<List<IdentificationType>> callback) {
IdentificationService service = mRetrofit.create(IdentificationService.class);
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_IDENTIFICATION_TYPES","1", mKeyType, "MLA", "1.0", mContext);
service.getIdentificationTypes(this.mKey, null).enqueue(callback);
} else {
service.getIdentificationTypes(null, this.mKey).enqueue(callback);
}
}
public void getInstallments(String bin, BigDecimal amount, Long issuerId, String paymentMethodId, Callback<List<Installment>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_INSTALLMENTS", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getInstallments(this.mKey, bin, amount, issuerId, paymentMethodId,
mContext.getResources().getConfiguration().locale.toString()).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getIssuers(String paymentMethodId, String bin, final Callback<List<Issuer>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_ISSUERS", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getIssuers(this.mKey, paymentMethodId, bin).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getPaymentMethods(final Callback<List<PaymentMethod>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_PAYMENT_METHODS","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getPaymentMethods(this.mKey).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getPaymentMethodSearch(BigDecimal amount, List<String> excludedPaymentTypes, List<String> excludedPaymentMethods, final Callback<PaymentMethodSearch> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_PAYMENT_METHOD_SEARCH","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
StringBuilder stringBuilder = new StringBuilder();
if(excludedPaymentTypes != null) {
for (String typeId : excludedPaymentTypes) {
stringBuilder.append(typeId);
if (!typeId.equals(excludedPaymentTypes.get(excludedPaymentTypes.size() - 1))) {
stringBuilder.append(",");
}
}
}
String excludedPaymentTypesAppended = stringBuilder.toString();
stringBuilder = new StringBuilder();
if(excludedPaymentMethods != null) {
for(String paymentMethodId : excludedPaymentMethods) {
stringBuilder.append(paymentMethodId);
if (!paymentMethodId.equals(excludedPaymentMethods.get(excludedPaymentMethods.size() - 1))) {
stringBuilder.append(",");
}
}
}
String excludedPaymentMethodsAppended = stringBuilder.toString();
service.getPaymentMethodSearch(this.mKey, amount, excludedPaymentTypesAppended, excludedPaymentMethodsAppended).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getInstructions(Long paymentId, String paymentTypeId, final Callback<Instruction> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_INSTRUCTIONS","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getInstruction(paymentId, this.mKey, paymentTypeId).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public static List<PaymentMethod> getValidPaymentMethodsForBin(String bin, List<PaymentMethod> paymentMethods){
if(bin.length() == BIN_LENGTH) {
List<PaymentMethod> validPaymentMethods = new ArrayList<>();
for (PaymentMethod pm : paymentMethods) {
if (pm.isValidForBin(bin)) {
validPaymentMethods.add(pm);
}
}
return validPaymentMethods;
}
else
throw new RuntimeException("Invalid bin: " + BIN_LENGTH + " digits needed, " + bin.length() + " found");
}
// * Static methods for StartActivityBuilder implementation
private static void startBankDealsActivity(Activity activity, String publicKey, DecorationPreference decorationPreference) {
Intent bankDealsIntent = new Intent(activity, BankDealsActivity.class);
bankDealsIntent.putExtra("merchantPublicKey", publicKey);
bankDealsIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(bankDealsIntent, BANK_DEALS_REQUEST_CODE);
}
private static void startCheckoutActivity(Activity activity, String merchantPublicKey, String checkoutPreferenceId, Boolean showBankDeals, DecorationPreference decorationPreference) {
Intent checkoutIntent = new Intent(activity, CheckoutActivity.class);
checkoutIntent.putExtra("merchantPublicKey", merchantPublicKey);
checkoutIntent.putExtra("checkoutPreferenceId", checkoutPreferenceId);
checkoutIntent.putExtra("showBankDeals", showBankDeals);
checkoutIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(checkoutIntent, CHECKOUT_REQUEST_CODE);
}
private static void startPaymentResultActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent resultIntent = new Intent(activity, PaymentResultActivity.class);
resultIntent.putExtra("merchantPublicKey", merchantPublicKey);
resultIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
resultIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(resultIntent, RESULT_REQUEST_CODE);
}
private static void startCongratsActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent congratsIntent = new Intent(activity, CongratsActivity.class);
congratsIntent.putExtra("merchantPublicKey", merchantPublicKey);
congratsIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
congratsIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(congratsIntent, CONGRATS_REQUEST_CODE);
}
private static void startCallForAuthorizeActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent callForAuthorizeIntent = new Intent(activity, CallForAuthorizeActivity.class);
callForAuthorizeIntent.putExtra("merchantPublicKey", merchantPublicKey);
callForAuthorizeIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
callForAuthorizeIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(callForAuthorizeIntent, CALL_FOR_AUTHORIZE_REQUEST_CODE);
}
private static void startPendingActivity(Activity activity, String merchantPublicKey, Payment payment) {
Intent pendingIntent = new Intent(activity, PendingActivity.class);
pendingIntent.putExtra("merchantPublicKey", merchantPublicKey);
pendingIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
activity.startActivityForResult(pendingIntent, PENDING_REQUEST_CODE);
}
private static void startRejectionActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent rejectionIntent = new Intent(activity, RejectionActivity.class);
rejectionIntent.putExtra("merchantPublicKey", merchantPublicKey);
rejectionIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
rejectionIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(rejectionIntent, REJECTION_REQUEST_CODE);
}
private static void startInstructionsActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent instructionIntent = new Intent(activity, InstructionsActivity.class);
instructionIntent.putExtra("merchantPublicKey", merchantPublicKey);
instructionIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
instructionIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(instructionIntent, INSTRUCTIONS_REQUEST_CODE);
}
private static void startCustomerCardsActivity(Activity activity, List<Card> cards) {
if ((activity == null) || (cards == null)) {
throw new RuntimeException("Invalid parameters");
}
Intent paymentMethodsIntent = new Intent(activity, CustomerCardsActivity.class);
Gson gson = new Gson();
paymentMethodsIntent.putExtra("cards", gson.toJson(cards));
activity.startActivityForResult(paymentMethodsIntent, CUSTOMER_CARDS_REQUEST_CODE);
}
private static void startInstallmentsActivity(Activity activity, BigDecimal amount, Site site,
Token token, String publicKey, List<PayerCost> payerCosts,
PaymentPreference paymentPreference, Issuer issuer,
PaymentMethod paymentMethod, DecorationPreference decorationPreference) {
Intent intent = new Intent(activity, InstallmentsActivity.class);
if(amount != null) {
intent.putExtra("amount", amount.toString());
}
intent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
intent.putExtra("token", JsonUtil.getInstance().toJson(token));
intent.putExtra("publicKey", publicKey);
intent.putExtra("issuer", JsonUtil.getInstance().toJson(issuer));
intent.putExtra("site", JsonUtil.getInstance().toJson(site));
intent.putExtra("payerCosts", JsonUtil.getInstance().toJson(payerCosts));
intent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
intent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(intent, INSTALLMENTS_REQUEST_CODE);
}
private static void startIssuersActivity(Activity activity, String publicKey,
PaymentMethod paymentMethod, Token token,
List<Issuer> issuers, DecorationPreference decorationPreference) {
Intent intent = new Intent(activity, IssuersActivity.class);
intent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
intent.putExtra("token", JsonUtil.getInstance().toJson(token));
intent.putExtra("publicKey", publicKey);
intent.putExtra("issuers", JsonUtil.getInstance().toJson(issuers));
intent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(intent, ISSUERS_REQUEST_CODE);
}
private static void startGuessingCardActivity(Activity activity, String key, Boolean requireSecurityCode,
Boolean requireIssuer, Boolean showBankDeals,
PaymentPreference paymentPreference, DecorationPreference decorationPreference,
Token token, List<PaymentMethod> paymentMethodList) {
Intent guessingCardIntent = new Intent(activity, GuessingCardActivity.class);
guessingCardIntent.putExtra("publicKey", key);
if (requireSecurityCode != null) {
guessingCardIntent.putExtra("requireSecurityCode", requireSecurityCode);
}
if (requireIssuer != null) {
guessingCardIntent.putExtra("requireIssuer", requireIssuer);
}
if(showBankDeals != null){
guessingCardIntent.putExtra("showBankDeals", showBankDeals);
}
guessingCardIntent.putExtra("showBankDeals", showBankDeals);
guessingCardIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
guessingCardIntent.putExtra("token", JsonUtil.getInstance().toJson(token));
guessingCardIntent.putExtra("paymentMethodList", JsonUtil.getInstance().toJson(paymentMethodList));
guessingCardIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(guessingCardIntent, GUESSING_CARD_REQUEST_CODE);
}
private static void startCardVaultActivity(Activity activity, String key, BigDecimal amount, Site site, Boolean installmentsEnabled,
PaymentPreference paymentPreference, DecorationPreference decorationPreference,
Token token, List<PaymentMethod> paymentMethodList) {
Intent cardVaultIntent = new Intent(activity, CardVaultActivity.class);
cardVaultIntent.putExtra("publicKey", key);
if(amount != null) {
cardVaultIntent.putExtra("amount", amount.toString());
}
cardVaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
cardVaultIntent.putExtra("installmentsEnabled", installmentsEnabled);
cardVaultIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
cardVaultIntent.putExtra("token", JsonUtil.getInstance().toJson(token));
cardVaultIntent.putExtra("paymentMethodList", JsonUtil.getInstance().toJson(paymentMethodList));
cardVaultIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(cardVaultIntent, CARD_VAULT_REQUEST_CODE);
}
private static void startPaymentMethodsActivity(Activity activity, String merchantPublicKey, Boolean showBankDeals, PaymentPreference paymentPreference, DecorationPreference decorationPreference) {
Intent paymentMethodsIntent = new Intent(activity, PaymentMethodsActivity.class);
paymentMethodsIntent.putExtra("merchantPublicKey", merchantPublicKey);
paymentMethodsIntent.putExtra("showBankDeals", showBankDeals);
paymentMethodsIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
paymentMethodsIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(paymentMethodsIntent, PAYMENT_METHODS_REQUEST_CODE);
}
private static void startPaymentVaultActivity(Activity activity, String merchantPublicKey, String merchantBaseUrl,
String merchantGetCustomerUri, String merchantAccessToken, BigDecimal amount,
Site site, Boolean installmentsEnabled, Boolean showBankDeals, PaymentPreference paymentPreference,
DecorationPreference decorationPreference, PaymentMethodSearch paymentMethodSearch) {
Intent vaultIntent = new Intent(activity, PaymentVaultActivity.class);
vaultIntent.putExtra("merchantPublicKey", merchantPublicKey);
vaultIntent.putExtra("merchantBaseUrl", merchantBaseUrl);
vaultIntent.putExtra("merchantGetCustomerUri", merchantGetCustomerUri);
vaultIntent.putExtra("merchantAccessToken", merchantAccessToken);
vaultIntent.putExtra("amount", amount.toString());
vaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
vaultIntent.putExtra("installmentsEnabled", installmentsEnabled);
vaultIntent.putExtra("showBankDeals", showBankDeals);
vaultIntent.putExtra("paymentMethodSearch", JsonUtil.getInstance().toJson(paymentMethodSearch));
vaultIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
vaultIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(vaultIntent, PAYMENT_VAULT_REQUEST_CODE);
}
private static void startNewCardActivity(Activity activity, String keyType, String key, PaymentMethod paymentMethod, Boolean requireSecurityCode) {
Intent newCardIntent = new Intent(activity, com.mercadopago.NewCardActivity.class);
newCardIntent.putExtra("keyType", keyType);
newCardIntent.putExtra("key", key);
newCardIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
if (requireSecurityCode != null) {
newCardIntent.putExtra("requireSecurityCode", requireSecurityCode);
}
activity.startActivityForResult(newCardIntent, NEW_CARD_REQUEST_CODE);
}
private static void startVaultActivity(Activity activity, String merchantPublicKey, String merchantBaseUrl, String merchantGetCustomerUri, String merchantAccessToken, BigDecimal amount, Site site, List<String> supportedPaymentTypes, Boolean showBankDeals) {
Intent vaultIntent = new Intent(activity, VaultActivity.class);
vaultIntent.putExtra("merchantPublicKey", merchantPublicKey);
vaultIntent.putExtra("merchantBaseUrl", merchantBaseUrl);
vaultIntent.putExtra("merchantGetCustomerUri", merchantGetCustomerUri);
vaultIntent.putExtra("merchantAccessToken", merchantAccessToken);
vaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
vaultIntent.putExtra("amount", amount.toString());
putListExtra(vaultIntent, "supportedPaymentTypes", supportedPaymentTypes);
vaultIntent.putExtra("showBankDeals", showBankDeals);
activity.startActivityForResult(vaultIntent, VAULT_REQUEST_CODE);
}
private static void putListExtra(Intent intent, String listName, List<String> list) {
if (list != null) {
Gson gson = new Gson();
Type listType = new TypeToken<List<String>>(){}.getType();
intent.putExtra(listName, gson.toJson(list, listType));
}
}
public static class Builder {
private Context mContext;
private String mKey;
private String mKeyType;
public Builder() {
mContext = null;
mKey = null;
}
public Builder setContext(Context context) {
if (context == null) throw new IllegalArgumentException("context is null");
this.mContext = context;
return this;
}
public Builder setKey(String key, String keyType) {
this.mKey = key;
this.mKeyType = keyType;
return this;
}
public Builder setPrivateKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PRIVATE;
return this;
}
public Builder setPublicKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
return this;
}
public MercadoPago build() {
if (this.mContext == null) throw new IllegalStateException("context is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if ((!this.mKeyType.equals(MercadoPago.KEY_TYPE_PRIVATE)) &&
(!this.mKeyType.equals(MercadoPago.KEY_TYPE_PUBLIC))) throw new IllegalArgumentException("invalid key type");
return new MercadoPago(this);
}
}
public static class StartActivityBuilder {
private Activity mActivity;
private BigDecimal mAmount;
private List<Card> mCards;
private String mCheckoutPreferenceId;
private String mKey;
private String mKeyType;
private String mMerchantAccessToken;
private String mMerchantBaseUrl;
private String mMerchantGetCustomerUri;
private List<PayerCost> mPayerCosts;
private List<Issuer> mIssuers;
private Payment mPayment;
private PaymentMethod mPaymentMethod;
private List<PaymentMethod> mPaymentMethodList;
private Boolean mRequireIssuer;
private Boolean mRequireSecurityCode;
private Boolean mShowBankDeals;
private PaymentMethodSearch mPaymentMethodSearch;
private PaymentPreference mPaymentPreference;
private Token mToken;
private Issuer mIssuer;
private Site mSite;
private DecorationPreference mDecorationPreference;
private Boolean mInstallmentsEnabled;
private List<String> mSupportedPaymentTypes;
public StartActivityBuilder() {
mActivity = null;
mKey = null;
mKeyType = KEY_TYPE_PUBLIC;
}
public StartActivityBuilder setActivity(Activity activity) {
if (activity == null) throw new IllegalArgumentException("context is null");
this.mActivity = activity;
return this;
}
public StartActivityBuilder setIssuer(Issuer issuer) {
this.mIssuer = issuer;
return this;
}
public StartActivityBuilder setAmount(BigDecimal amount) {
this.mAmount = amount;
return this;
}
public StartActivityBuilder setCards(List<Card> cards) {
this.mCards = cards;
return this;
}
public StartActivityBuilder setCheckoutPreferenceId(String checkoutPreferenceId) {
this.mCheckoutPreferenceId = checkoutPreferenceId;
return this;
}
public StartActivityBuilder setPublicKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
return this;
}
public StartActivityBuilder setMerchantAccessToken(String merchantAccessToken) {
this.mMerchantAccessToken = merchantAccessToken;
return this;
}
public StartActivityBuilder setMerchantBaseUrl(String merchantBaseUrl) {
this.mMerchantBaseUrl = merchantBaseUrl;
return this;
}
public StartActivityBuilder setMerchantGetCustomerUri(String merchantGetCustomerUri) {
this.mMerchantGetCustomerUri = merchantGetCustomerUri;
return this;
}
public StartActivityBuilder setPayerCosts(List<PayerCost> payerCosts) {
this.mPayerCosts = payerCosts;
return this;
}
public StartActivityBuilder setIssuers(List<Issuer> issuers) {
this.mIssuers = issuers;
return this;
}
public StartActivityBuilder setPayment(Payment payment) {
this.mPayment = payment;
return this;
}
public StartActivityBuilder setPaymentMethod(PaymentMethod paymentMethod) {
this.mPaymentMethod = paymentMethod;
return this;
}
public StartActivityBuilder setSupportedPaymentMethods(List<PaymentMethod> paymentMethodList) {
this.mPaymentMethodList = paymentMethodList;
return this;
}
public StartActivityBuilder setRequireSecurityCode(Boolean requireSecurityCode) {
this.mRequireSecurityCode = requireSecurityCode;
return this;
}
public StartActivityBuilder setRequireIssuer(Boolean requireIssuer) {
this.mRequireIssuer = requireIssuer;
return this;
}
public StartActivityBuilder setShowBankDeals(boolean showBankDeals) {
this.mShowBankDeals = showBankDeals;
return this;
}
public StartActivityBuilder setPaymentMethodSearch(PaymentMethodSearch paymentMethodSearch) {
this.mPaymentMethodSearch = paymentMethodSearch;
return this;
}
public StartActivityBuilder setPaymentPreference(PaymentPreference paymentPreference) {
this.mPaymentPreference = paymentPreference;
return this;
}
public StartActivityBuilder setToken(Token token) {
this.mToken = token;
return this;
}
public StartActivityBuilder setSite(Site site) {
this.mSite = site;
return this;
}
public StartActivityBuilder setInstallmentsEnabled(Boolean installmentsEnabled) {
this.mInstallmentsEnabled = installmentsEnabled;
return this;
}
public StartActivityBuilder setDecorationPreference(DecorationPreference decorationPreference) {
this.mDecorationPreference = decorationPreference;
return this;
}
@Deprecated
public StartActivityBuilder setSupportedPaymentTypes(List<String> supportedPaymentTypes) {
this.mSupportedPaymentTypes = supportedPaymentTypes;
return this;
}
public void startBankDealsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startBankDealsActivity(this.mActivity, this.mKey, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCheckoutActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mCheckoutPreferenceId == null) throw new IllegalStateException("checkout preference id is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCheckoutActivity(this.mActivity, this.mKey,
this.mCheckoutPreferenceId, this.mShowBankDeals, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPaymentResultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentResultActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCongratsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCongratsActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCallForAuthorizeActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCallForAuthorizeActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPendingActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPendingActivity(this.mActivity, this.mKey, this.mPayment);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startRejectionActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startRejectionActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startInstructionsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startInstructionsActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCustomerCardsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mCards == null) throw new IllegalStateException("cards is null");
MercadoPago.startCustomerCardsActivity(this.mActivity, this.mCards);
}
public void startInstallmentsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if(mPayerCosts == null) {
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mIssuer == null) throw new IllegalStateException("issuer is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
}
MercadoPago.startInstallmentsActivity(mActivity, mAmount, mSite, mToken,
mKey, mPayerCosts, mPaymentPreference, mIssuer, mPaymentMethod, mDecorationPreference);
}
public void startIssuersActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
MercadoPago.startIssuersActivity(this.mActivity, this.mKey, this.mPaymentMethod,
this.mToken, this.mIssuers, this.mDecorationPreference);
}
public void startGuessingCardActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
MercadoPago.startGuessingCardActivity(this.mActivity, this.mKey, this.mRequireSecurityCode,
this.mRequireIssuer, this.mShowBankDeals, this.mPaymentPreference, this.mDecorationPreference,
this.mToken, this.mPaymentMethodList);
}
public void startCardVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mInstallmentsEnabled != null && this.mInstallmentsEnabled) {
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
}
MercadoPago.startCardVaultActivity(this.mActivity, this.mKey, this.mAmount, this.mSite, this.mInstallmentsEnabled,
this.mPaymentPreference, this.mDecorationPreference, this.mToken, this.mPaymentMethodList);
}
public void startPaymentMethodsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentMethodsActivity(this.mActivity, this.mKey,
this.mShowBankDeals, this.mPaymentPreference, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPaymentVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentVaultActivity(this.mActivity, this.mKey, this.mMerchantBaseUrl,
this.mMerchantGetCustomerUri, this.mMerchantAccessToken,
this.mAmount, this.mSite, this.mInstallmentsEnabled, this.mShowBankDeals,
this.mPaymentPreference, this.mDecorationPreference, this.mPaymentMethodSearch);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
@Deprecated
public void startNewCardActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
MercadoPago.startNewCardActivity(this.mActivity, this.mKeyType, this.mKey,
this.mPaymentMethod, this.mRequireSecurityCode);
}
public void startVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mSite == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startVaultActivity(this.mActivity, this.mKey, this.mMerchantBaseUrl,
this.mMerchantGetCustomerUri, this.mMerchantAccessToken,
this.mAmount, this.mSite, this.mSupportedPaymentTypes, this.mShowBankDeals);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
}
}
|
sdk/src/main/java/com/mercadopago/core/MercadoPago.java
|
package com.mercadopago.core;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.mercadopago.BankDealsActivity;
import com.mercadopago.CallForAuthorizeActivity;
import com.mercadopago.CardVaultActivity;
import com.mercadopago.CheckoutActivity;
import com.mercadopago.CongratsActivity;
import com.mercadopago.CustomerCardsActivity;
import com.mercadopago.GuessingCardActivity;
import com.mercadopago.InstallmentsActivity;
import com.mercadopago.InstructionsActivity;
import com.mercadopago.IssuersActivity;
import com.mercadopago.PaymentMethodsActivity;
import com.mercadopago.PaymentResultActivity;
import com.mercadopago.PaymentVaultActivity;
import com.mercadopago.PendingActivity;
import com.mercadopago.RejectionActivity;
import com.mercadopago.VaultActivity;
import com.mercadopago.adapters.ErrorHandlingCallAdapter;
import com.mercadopago.callbacks.Callback;
import com.mercadopago.model.BankDeal;
import com.mercadopago.model.Card;
import com.mercadopago.model.CardToken;
import com.mercadopago.model.CheckoutPreference;
import com.mercadopago.model.DecorationPreference;
import com.mercadopago.model.IdentificationType;
import com.mercadopago.model.Installment;
import com.mercadopago.model.Instruction;
import com.mercadopago.model.Issuer;
import com.mercadopago.model.PayerCost;
import com.mercadopago.model.Payment;
import com.mercadopago.model.PaymentIntent;
import com.mercadopago.model.PaymentMethod;
import com.mercadopago.model.PaymentMethodSearch;
import com.mercadopago.model.PaymentPreference;
import com.mercadopago.model.SavedCardToken;
import com.mercadopago.model.Site;
import com.mercadopago.model.Token;
import com.mercadopago.mptracker.MPTracker;
import com.mercadopago.services.BankDealService;
import com.mercadopago.services.GatewayService;
import com.mercadopago.services.IdentificationService;
import com.mercadopago.services.PaymentService;
import com.mercadopago.util.HttpClientUtil;
import com.mercadopago.util.JsonUtil;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class MercadoPago {
public static final String KEY_TYPE_PUBLIC = "public_key";
public static final String KEY_TYPE_PRIVATE = "private_key";
public static final int CUSTOMER_CARDS_REQUEST_CODE = 0;
public static final int PAYMENT_METHODS_REQUEST_CODE = 1;
public static final int INSTALLMENTS_REQUEST_CODE = 2;
public static final int ISSUERS_REQUEST_CODE = 3;
public static final int NEW_CARD_REQUEST_CODE = 4;
public static final int RESULT_REQUEST_CODE = 5;
public static final int VAULT_REQUEST_CODE = 6;
public static final int CALL_FOR_AUTHORIZE_REQUEST_CODE = 7;
public static final int PENDING_REQUEST_CODE = 8;
public static final int REJECTION_REQUEST_CODE = 9;
public static final int PAYMENT_VAULT_REQUEST_CODE = 10;
public static final int BANK_DEALS_REQUEST_CODE = 11;
public static final int CHECKOUT_REQUEST_CODE = 12;
public static final int GUESSING_CARD_REQUEST_CODE = 13;
public static final int INSTRUCTIONS_REQUEST_CODE = 14;
public static final int CARD_VAULT_REQUEST_CODE = 15;
public static final int CONGRATS_REQUEST_CODE = 16;
public static final int BIN_LENGTH = 6;
private static final String MP_API_BASE_URL = "https://api.mercadopago.com";
private String mKey = null;
private String mKeyType = null;
private Context mContext = null;
Retrofit mRetrofit;
private MercadoPago(Builder builder) {
this.mContext = builder.mContext;
this.mKey = builder.mKey;
this.mKeyType = builder.mKeyType;
System.setProperty("http.keepAlive", "false");
mRetrofit = new Retrofit.Builder()
.baseUrl(MP_API_BASE_URL)
.addConverterFactory(GsonConverterFactory.create(JsonUtil.getInstance().getGson()))
.client(HttpClientUtil.getClient(this.mContext, 10, 20, 20))
.addCallAdapterFactory(new ErrorHandlingCallAdapter.ErrorHandlingCallAdapterFactory())
.build();
}
public void getPreference(String checkoutPreferenceId, Callback<CheckoutPreference> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_PREFERENCE", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getPreference(checkoutPreferenceId, this.mKey).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createPayment(final PaymentIntent paymentIntent, final Callback<Payment> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "CREATE_PAYMENT", "1", mKeyType, "MLA", "1.0", mContext);
Retrofit paymentsRetrofitAdapter = new Retrofit.Builder()
.baseUrl(MP_API_BASE_URL)
.addConverterFactory(GsonConverterFactory.create(JsonUtil.getInstance().getGson()))
.client(HttpClientUtil.getClient(this.mContext, 10, 40, 40))
.addCallAdapterFactory(new ErrorHandlingCallAdapter.ErrorHandlingCallAdapterFactory())
.build();
PaymentService service = paymentsRetrofitAdapter.create(PaymentService.class);
service.createPayment(String.valueOf(paymentIntent.getTransactionId()), paymentIntent).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createToken(final SavedCardToken savedCardToken, final Callback<Token> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","CREATE_SAVED_TOKEN","1", mKeyType, "MLA", "1.0", mContext);
savedCardToken.setDevice(mContext);
GatewayService service = mRetrofit.create(GatewayService.class);
service.getToken(this.mKey, savedCardToken).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void createToken(final CardToken cardToken, final Callback<Token> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","CREATE_CARD_TOKEN","1", mKeyType, "MLA", "1.0", mContext);
cardToken.setDevice(mContext);
GatewayService service = mRetrofit.create(GatewayService.class);
service.getToken(this.mKey, cardToken).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getBankDeals(final Callback<List<BankDeal>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_BANK_DEALS","1", mKeyType, "MLA", "1.0", mContext);
BankDealService service = mRetrofit.create(BankDealService.class);
service.getBankDeals(this.mKey, mContext.getResources().getConfiguration().locale.toString()).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getIdentificationTypes(Callback<List<IdentificationType>> callback) {
IdentificationService service = mRetrofit.create(IdentificationService.class);
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_IDENTIFICATION_TYPES","1", mKeyType, "MLA", "1.0", mContext);
service.getIdentificationTypes(this.mKey, null).enqueue(callback);
} else {
service.getIdentificationTypes(null, this.mKey).enqueue(callback);
}
}
public void getInstallments(String bin, BigDecimal amount, Long issuerId, String paymentMethodId, Callback<List<Installment>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_INSTALLMENTS", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getInstallments(this.mKey, bin, amount, issuerId, paymentMethodId,
mContext.getResources().getConfiguration().locale.toString()).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getIssuers(String paymentMethodId, String bin, final Callback<List<Issuer>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN", "GET_ISSUERS", "1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getIssuers(this.mKey, paymentMethodId, bin).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getPaymentMethods(final Callback<List<PaymentMethod>> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_PAYMENT_METHODS","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getPaymentMethods(this.mKey).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getPaymentMethodSearch(BigDecimal amount, List<String> excludedPaymentTypes, List<String> excludedPaymentMethods, final Callback<PaymentMethodSearch> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_PAYMENT_METHOD_SEARCH","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
StringBuilder stringBuilder = new StringBuilder();
if(excludedPaymentTypes != null) {
for (String typeId : excludedPaymentTypes) {
stringBuilder.append(typeId);
if (!typeId.equals(excludedPaymentTypes.get(excludedPaymentTypes.size() - 1))) {
stringBuilder.append(",");
}
}
}
String excludedPaymentTypesAppended = stringBuilder.toString();
stringBuilder = new StringBuilder();
if(excludedPaymentMethods != null) {
for(String paymentMethodId : excludedPaymentMethods) {
stringBuilder.append(paymentMethodId);
if (!paymentMethodId.equals(excludedPaymentMethods.get(excludedPaymentMethods.size() - 1))) {
stringBuilder.append(",");
}
}
}
String excludedPaymentMethodsAppended = stringBuilder.toString();
service.getPaymentMethodSearch(this.mKey, amount, excludedPaymentTypesAppended, excludedPaymentMethodsAppended).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void getInstructions(Long paymentId, String paymentTypeId, final Callback<Instruction> callback) {
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MPTracker.getInstance().trackEvent("NO_SCREEN","GET_INSTRUCTIONS","1", mKeyType, "MLA", "1.0", mContext);
PaymentService service = mRetrofit.create(PaymentService.class);
service.getInstruction(paymentId, this.mKey, paymentTypeId).enqueue(callback);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public static List<PaymentMethod> getValidPaymentMethodsForBin(String bin, List<PaymentMethod> paymentMethods){
if(bin.length() == BIN_LENGTH) {
List<PaymentMethod> validPaymentMethods = new ArrayList<>();
for (PaymentMethod pm : paymentMethods) {
if (pm.isValidForBin(bin)) {
validPaymentMethods.add(pm);
}
}
return validPaymentMethods;
}
else
throw new RuntimeException("Invalid bin: " + BIN_LENGTH + " digits needed, " + bin.length() + " found");
}
// * Static methods for StartActivityBuilder implementation
private static void startBankDealsActivity(Activity activity, String publicKey, DecorationPreference decorationPreference) {
Intent bankDealsIntent = new Intent(activity, BankDealsActivity.class);
bankDealsIntent.putExtra("merchantPublicKey", publicKey);
bankDealsIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(bankDealsIntent, BANK_DEALS_REQUEST_CODE);
}
private static void startCheckoutActivity(Activity activity, String merchantPublicKey, String checkoutPreferenceId, Boolean showBankDeals, DecorationPreference decorationPreference) {
Intent checkoutIntent = new Intent(activity, CheckoutActivity.class);
checkoutIntent.putExtra("merchantPublicKey", merchantPublicKey);
checkoutIntent.putExtra("checkoutPreferenceId", checkoutPreferenceId);
checkoutIntent.putExtra("showBankDeals", showBankDeals);
checkoutIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(checkoutIntent, CHECKOUT_REQUEST_CODE);
}
private static void startPaymentResultActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent resultIntent = new Intent(activity, PaymentResultActivity.class);
resultIntent.putExtra("merchantPublicKey", merchantPublicKey);
resultIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
resultIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(resultIntent, RESULT_REQUEST_CODE);
}
private static void startCongratsActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent congratsIntent = new Intent(activity, CongratsActivity.class);
congratsIntent.putExtra("merchantPublicKey", merchantPublicKey);
congratsIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
congratsIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(congratsIntent, CONGRATS_REQUEST_CODE);
}
private static void startCallForAuthorizeActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent callForAuthorizeIntent = new Intent(activity, CallForAuthorizeActivity.class);
callForAuthorizeIntent.putExtra("merchantPublicKey", merchantPublicKey);
callForAuthorizeIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
callForAuthorizeIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(callForAuthorizeIntent, CALL_FOR_AUTHORIZE_REQUEST_CODE);
}
private static void startPendingActivity(Activity activity, String merchantPublicKey, Payment payment) {
Intent pendingIntent = new Intent(activity, PendingActivity.class);
pendingIntent.putExtra("merchantPublicKey", merchantPublicKey);
pendingIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
activity.startActivityForResult(pendingIntent, PENDING_REQUEST_CODE);
}
private static void startRejectionActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent rejectionIntent = new Intent(activity, RejectionActivity.class);
rejectionIntent.putExtra("merchantPublicKey", merchantPublicKey);
rejectionIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
rejectionIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(rejectionIntent, REJECTION_REQUEST_CODE);
}
private static void startInstructionsActivity(Activity activity, String merchantPublicKey, Payment payment, PaymentMethod paymentMethod) {
Intent instructionIntent = new Intent(activity, InstructionsActivity.class);
instructionIntent.putExtra("merchantPublicKey", merchantPublicKey);
instructionIntent.putExtra("payment", JsonUtil.getInstance().toJson(payment));
instructionIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
activity.startActivityForResult(instructionIntent, INSTRUCTIONS_REQUEST_CODE);
}
private static void startCustomerCardsActivity(Activity activity, List<Card> cards) {
if ((activity == null) || (cards == null)) {
throw new RuntimeException("Invalid parameters");
}
Intent paymentMethodsIntent = new Intent(activity, CustomerCardsActivity.class);
Gson gson = new Gson();
paymentMethodsIntent.putExtra("cards", gson.toJson(cards));
activity.startActivityForResult(paymentMethodsIntent, CUSTOMER_CARDS_REQUEST_CODE);
}
private static void startInstallmentsActivity(Activity activity, BigDecimal amount, Site site,
Token token, String publicKey, List<PayerCost> payerCosts,
PaymentPreference paymentPreference, Issuer issuer,
PaymentMethod paymentMethod, DecorationPreference decorationPreference) {
Intent intent = new Intent(activity, InstallmentsActivity.class);
if(amount != null) {
intent.putExtra("amount", amount.toString());
}
intent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
intent.putExtra("token", JsonUtil.getInstance().toJson(token));
intent.putExtra("publicKey", publicKey);
intent.putExtra("issuer", JsonUtil.getInstance().toJson(issuer));
intent.putExtra("site", JsonUtil.getInstance().toJson(site));
intent.putExtra("payerCosts", JsonUtil.getInstance().toJson(payerCosts));
intent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
intent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(intent, INSTALLMENTS_REQUEST_CODE);
}
private static void startIssuersActivity(Activity activity, String publicKey,
PaymentMethod paymentMethod, Token token,
List<Issuer> issuers, DecorationPreference decorationPreference) {
Intent intent = new Intent(activity, IssuersActivity.class);
intent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
intent.putExtra("token", JsonUtil.getInstance().toJson(token));
intent.putExtra("publicKey", publicKey);
intent.putExtra("issuers", JsonUtil.getInstance().toJson(issuers));
intent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(intent, ISSUERS_REQUEST_CODE);
}
private static void startGuessingCardActivity(Activity activity, String key, Boolean requireSecurityCode,
Boolean requireIssuer, Boolean showBankDeals,
PaymentPreference paymentPreference, DecorationPreference decorationPreference,
Token token, List<PaymentMethod> paymentMethodList) {
Intent guessingCardIntent = new Intent(activity, GuessingCardActivity.class);
guessingCardIntent.putExtra("publicKey", key);
if (requireSecurityCode != null) {
guessingCardIntent.putExtra("requireSecurityCode", requireSecurityCode);
}
if (requireIssuer != null) {
guessingCardIntent.putExtra("requireIssuer", requireIssuer);
}
if(showBankDeals != null){
guessingCardIntent.putExtra("showBankDeals", showBankDeals);
}
guessingCardIntent.putExtra("showBankDeals", showBankDeals);
guessingCardIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
guessingCardIntent.putExtra("token", JsonUtil.getInstance().toJson(token));
guessingCardIntent.putExtra("paymentMethodList", JsonUtil.getInstance().toJson(paymentMethodList));
guessingCardIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(guessingCardIntent, GUESSING_CARD_REQUEST_CODE);
}
private static void startCardVaultActivity(Activity activity, String key, BigDecimal amount, Site site, Boolean installmentsEnabled,
PaymentPreference paymentPreference, DecorationPreference decorationPreference,
Token token, List<PaymentMethod> paymentMethodList) {
Intent cardVaultIntent = new Intent(activity, CardVaultActivity.class);
cardVaultIntent.putExtra("publicKey", key);
if(amount != null) {
cardVaultIntent.putExtra("amount", amount.toString());
}
cardVaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
cardVaultIntent.putExtra("installmentsEnabled", installmentsEnabled);
cardVaultIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
cardVaultIntent.putExtra("token", JsonUtil.getInstance().toJson(token));
cardVaultIntent.putExtra("paymentMethodList", JsonUtil.getInstance().toJson(paymentMethodList));
cardVaultIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(cardVaultIntent, CARD_VAULT_REQUEST_CODE);
}
private static void startPaymentMethodsActivity(Activity activity, String merchantPublicKey, Boolean showBankDeals, PaymentPreference paymentPreference, DecorationPreference decorationPreference) {
Intent paymentMethodsIntent = new Intent(activity, PaymentMethodsActivity.class);
paymentMethodsIntent.putExtra("merchantPublicKey", merchantPublicKey);
paymentMethodsIntent.putExtra("showBankDeals", showBankDeals);
paymentMethodsIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
paymentMethodsIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(paymentMethodsIntent, PAYMENT_METHODS_REQUEST_CODE);
}
private static void startPaymentVaultActivity(Activity activity, String merchantPublicKey, String merchantBaseUrl,
String merchantGetCustomerUri, String merchantAccessToken, BigDecimal amount,
Site site, Boolean installmentsEnabled, Boolean showBankDeals, PaymentPreference paymentPreference,
DecorationPreference decorationPreference, PaymentMethodSearch paymentMethodSearch) {
Intent vaultIntent = new Intent(activity, PaymentVaultActivity.class);
vaultIntent.putExtra("merchantPublicKey", merchantPublicKey);
vaultIntent.putExtra("merchantBaseUrl", merchantBaseUrl);
vaultIntent.putExtra("merchantGetCustomerUri", merchantGetCustomerUri);
vaultIntent.putExtra("merchantAccessToken", merchantAccessToken);
vaultIntent.putExtra("amount", amount.toString());
vaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
vaultIntent.putExtra("installmentsEnabled", installmentsEnabled);
vaultIntent.putExtra("showBankDeals", showBankDeals);
vaultIntent.putExtra("paymentMethodSearch", JsonUtil.getInstance().toJson(paymentMethodSearch));
vaultIntent.putExtra("paymentPreference", JsonUtil.getInstance().toJson(paymentPreference));
vaultIntent.putExtra("decorationPreference", JsonUtil.getInstance().toJson(decorationPreference));
activity.startActivityForResult(vaultIntent, PAYMENT_VAULT_REQUEST_CODE);
}
private static void startNewCardActivity(Activity activity, String keyType, String key, PaymentMethod paymentMethod, Boolean requireSecurityCode) {
Intent newCardIntent = new Intent(activity, com.mercadopago.NewCardActivity.class);
newCardIntent.putExtra("keyType", keyType);
newCardIntent.putExtra("key", key);
newCardIntent.putExtra("paymentMethod", JsonUtil.getInstance().toJson(paymentMethod));
if (requireSecurityCode != null) {
newCardIntent.putExtra("requireSecurityCode", requireSecurityCode);
}
activity.startActivityForResult(newCardIntent, NEW_CARD_REQUEST_CODE);
}
private static void startVaultActivity(Activity activity, String merchantPublicKey, String merchantBaseUrl, String merchantGetCustomerUri, String merchantAccessToken, BigDecimal amount, Site site, List<String> supportedPaymentTypes, Boolean showBankDeals) {
Intent vaultIntent = new Intent(activity, VaultActivity.class);
vaultIntent.putExtra("merchantPublicKey", merchantPublicKey);
vaultIntent.putExtra("merchantBaseUrl", merchantBaseUrl);
vaultIntent.putExtra("merchantGetCustomerUri", merchantGetCustomerUri);
vaultIntent.putExtra("merchantAccessToken", merchantAccessToken);
vaultIntent.putExtra("site", JsonUtil.getInstance().toJson(site));
vaultIntent.putExtra("amount", amount.toString());
putListExtra(vaultIntent, "supportedPaymentTypes", supportedPaymentTypes);
vaultIntent.putExtra("showBankDeals", showBankDeals);
activity.startActivityForResult(vaultIntent, VAULT_REQUEST_CODE);
}
private static void putListExtra(Intent intent, String listName, List<String> list) {
if (list != null) {
Gson gson = new Gson();
Type listType = new TypeToken<List<String>>(){}.getType();
intent.putExtra(listName, gson.toJson(list, listType));
}
}
public static class Builder {
private Context mContext;
private String mKey;
private String mKeyType;
public Builder() {
mContext = null;
mKey = null;
}
public Builder setContext(Context context) {
if (context == null) throw new IllegalArgumentException("context is null");
this.mContext = context;
return this;
}
public Builder setKey(String key, String keyType) {
this.mKey = key;
this.mKeyType = keyType;
return this;
}
public Builder setPrivateKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PRIVATE;
return this;
}
public Builder setPublicKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
return this;
}
public MercadoPago build() {
if (this.mContext == null) throw new IllegalStateException("context is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if ((!this.mKeyType.equals(MercadoPago.KEY_TYPE_PRIVATE)) &&
(!this.mKeyType.equals(MercadoPago.KEY_TYPE_PUBLIC))) throw new IllegalArgumentException("invalid key type");
return new MercadoPago(this);
}
}
public static class StartActivityBuilder {
private Activity mActivity;
private BigDecimal mAmount;
private List<Card> mCards;
private String mCheckoutPreferenceId;
private String mKey;
private String mKeyType;
private String mMerchantAccessToken;
private String mMerchantBaseUrl;
private String mMerchantGetCustomerUri;
private List<PayerCost> mPayerCosts;
private List<Issuer> mIssuers;
private Payment mPayment;
private PaymentMethod mPaymentMethod;
private List<PaymentMethod> mPaymentMethodList;
private Boolean mRequireIssuer;
private Boolean mRequireSecurityCode;
private Boolean mShowBankDeals;
private PaymentMethodSearch mPaymentMethodSearch;
private PaymentPreference mPaymentPreference;
private Token mToken;
private Issuer mIssuer;
private Site mSite;
private DecorationPreference mDecorationPreference;
private Boolean mInstallmentsEnabled;
private List<String> mSupportedPaymentTypes;
public StartActivityBuilder() {
mActivity = null;
mKey = null;
mKeyType = KEY_TYPE_PUBLIC;
}
public StartActivityBuilder setActivity(Activity activity) {
if (activity == null) throw new IllegalArgumentException("context is null");
this.mActivity = activity;
return this;
}
public StartActivityBuilder setIssuer(Issuer issuer) {
this.mIssuer = issuer;
return this;
}
public StartActivityBuilder setAmount(BigDecimal amount) {
this.mAmount = amount;
return this;
}
public StartActivityBuilder setCards(List<Card> cards) {
this.mCards = cards;
return this;
}
public StartActivityBuilder setCheckoutPreferenceId(String checkoutPreferenceId) {
this.mCheckoutPreferenceId = checkoutPreferenceId;
return this;
}
public StartActivityBuilder setPublicKey(String key) {
this.mKey = key;
this.mKeyType = MercadoPago.KEY_TYPE_PUBLIC;
return this;
}
public StartActivityBuilder setMerchantAccessToken(String merchantAccessToken) {
this.mMerchantAccessToken = merchantAccessToken;
return this;
}
public StartActivityBuilder setMerchantBaseUrl(String merchantBaseUrl) {
this.mMerchantBaseUrl = merchantBaseUrl;
return this;
}
public StartActivityBuilder setMerchantGetCustomerUri(String merchantGetCustomerUri) {
this.mMerchantGetCustomerUri = merchantGetCustomerUri;
return this;
}
public StartActivityBuilder setPayerCosts(List<PayerCost> payerCosts) {
this.mPayerCosts = payerCosts;
return this;
}
public StartActivityBuilder setIssuers(List<Issuer> issuers) {
this.mIssuers = issuers;
return this;
}
public StartActivityBuilder setPayment(Payment payment) {
this.mPayment = payment;
return this;
}
public StartActivityBuilder setPaymentMethod(PaymentMethod paymentMethod) {
this.mPaymentMethod = paymentMethod;
return this;
}
@Deprecated
public StartActivityBuilder setSupportedPaymentMethods(List<PaymentMethod> paymentMethodList) {
this.mPaymentMethodList = paymentMethodList;
return this;
}
public StartActivityBuilder setRequireSecurityCode(Boolean requireSecurityCode) {
this.mRequireSecurityCode = requireSecurityCode;
return this;
}
public StartActivityBuilder setRequireIssuer(Boolean requireIssuer) {
this.mRequireIssuer = requireIssuer;
return this;
}
public StartActivityBuilder setShowBankDeals(boolean showBankDeals) {
this.mShowBankDeals = showBankDeals;
return this;
}
public StartActivityBuilder setPaymentMethodSearch(PaymentMethodSearch paymentMethodSearch) {
this.mPaymentMethodSearch = paymentMethodSearch;
return this;
}
public StartActivityBuilder setPaymentPreference(PaymentPreference paymentPreference) {
this.mPaymentPreference = paymentPreference;
return this;
}
public StartActivityBuilder setToken(Token token) {
this.mToken = token;
return this;
}
public StartActivityBuilder setSite(Site site) {
this.mSite = site;
return this;
}
public StartActivityBuilder setInstallmentsEnabled(Boolean installmentsEnabled) {
this.mInstallmentsEnabled = installmentsEnabled;
return this;
}
public StartActivityBuilder setDecorationPreference(DecorationPreference decorationPreference) {
this.mDecorationPreference = decorationPreference;
return this;
}
public StartActivityBuilder setSupportedPaymentTypes(List<String> supportedPaymentTypes) {
this.mSupportedPaymentTypes = supportedPaymentTypes;
return this;
}
public void startBankDealsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startBankDealsActivity(this.mActivity, this.mKey, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCheckoutActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mCheckoutPreferenceId == null) throw new IllegalStateException("checkout preference id is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCheckoutActivity(this.mActivity, this.mKey,
this.mCheckoutPreferenceId, this.mShowBankDeals, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPaymentResultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentResultActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCongratsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCongratsActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCallForAuthorizeActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startCallForAuthorizeActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPendingActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPendingActivity(this.mActivity, this.mKey, this.mPayment);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startRejectionActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startRejectionActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startInstructionsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mPayment == null) throw new IllegalStateException("payment is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startInstructionsActivity(this.mActivity, this.mKey, this.mPayment, this.mPaymentMethod);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startCustomerCardsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mCards == null) throw new IllegalStateException("cards is null");
MercadoPago.startCustomerCardsActivity(this.mActivity, this.mCards);
}
public void startInstallmentsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if(mPayerCosts == null) {
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mIssuer == null) throw new IllegalStateException("issuer is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
}
MercadoPago.startInstallmentsActivity(mActivity, mAmount, mSite, mToken,
mKey, mPayerCosts, mPaymentPreference, mIssuer, mPaymentMethod, mDecorationPreference);
}
public void startIssuersActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
MercadoPago.startIssuersActivity(this.mActivity, this.mKey, this.mPaymentMethod,
this.mToken, this.mIssuers, this.mDecorationPreference);
}
public void startGuessingCardActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
MercadoPago.startGuessingCardActivity(this.mActivity, this.mKey, this.mRequireSecurityCode,
this.mRequireIssuer, this.mShowBankDeals, this.mPaymentPreference, this.mDecorationPreference,
this.mToken, this.mPaymentMethodList);
}
public void startCardVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mInstallmentsEnabled != null && this.mInstallmentsEnabled) {
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
}
MercadoPago.startCardVaultActivity(this.mActivity, this.mKey, this.mAmount, this.mSite, this.mInstallmentsEnabled,
this.mPaymentPreference, this.mDecorationPreference, this.mToken, this.mPaymentMethodList);
}
public void startPaymentMethodsActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentMethodsActivity(this.mActivity, this.mKey,
this.mShowBankDeals, this.mPaymentPreference, this.mDecorationPreference);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
public void startPaymentVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mSite == null) throw new IllegalStateException("site is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startPaymentVaultActivity(this.mActivity, this.mKey, this.mMerchantBaseUrl,
this.mMerchantGetCustomerUri, this.mMerchantAccessToken,
this.mAmount, this.mSite, this.mInstallmentsEnabled, this.mShowBankDeals,
this.mPaymentPreference, this.mDecorationPreference, this.mPaymentMethodSearch);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
@Deprecated
public void startNewCardActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mPaymentMethod == null) throw new IllegalStateException("payment method is null");
MercadoPago.startNewCardActivity(this.mActivity, this.mKeyType, this.mKey,
this.mPaymentMethod, this.mRequireSecurityCode);
}
public void startVaultActivity() {
if (this.mActivity == null) throw new IllegalStateException("activity is null");
if (this.mAmount == null) throw new IllegalStateException("amount is null");
if (this.mKey == null) throw new IllegalStateException("key is null");
if (this.mKeyType == null) throw new IllegalStateException("key type is null");
if (this.mSite == null) throw new IllegalStateException("key type is null");
if (this.mKeyType.equals(KEY_TYPE_PUBLIC)) {
MercadoPago.startVaultActivity(this.mActivity, this.mKey, this.mMerchantBaseUrl,
this.mMerchantGetCustomerUri, this.mMerchantAccessToken,
this.mAmount, this.mSite, this.mSupportedPaymentTypes, this.mShowBankDeals);
} else {
throw new RuntimeException("Unsupported key type for this method");
}
}
}
}
|
deprecated setSupportedPaymentTypes in MercadoPago class
|
sdk/src/main/java/com/mercadopago/core/MercadoPago.java
|
deprecated setSupportedPaymentTypes in MercadoPago class
|
|
Java
|
mit
|
d4bf8b0b6dba0eeff58e0d60f136ee850bd0e355
| 0
|
flutterflies/WaterlessRedstone
|
/**
* Created by Ashrynn Macke | Flutterflies on 11/28/2015.
*
* Main Plugin class
*/
package net.flutterflies.waterless;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.ArrayList;
import java.util.List;
public class Waterless extends JavaPlugin
{
FileConfiguration config;
List<Material> waterlessMats = new ArrayList<Material>();
@Override
public void onEnable()
{
config = this.getConfig();
List<String> stringList = config.getStringList("redstone");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Redstone blocks");
}
else
{
getLogger().info("No Redstone blocks to enable");
}
stringList = config.getStringList("minecart");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Minecart rails.");
}
else
{
getLogger().info("No Minecart rails to enable.");
}
Bukkit.getPluginManager().registerEvents(new WaterListener(waterlessMats), this);
}
@Override
public void onDisable()
{
config = null;
Bukkit.getScheduler().cancelTasks(this);
}
public List<Material> makeList(List<String> list)
{
List<Material> materialList = new ArrayList<Material>();
for(String string : list)
{
if(string != null)
{
materialList.add(Material.getMaterial(string));
}
}
return materialList;
}
}
|
src/main/java/net/flutterflies/waterless/Waterless.java
|
/**
* Created by Ashrynn Macke | Flutterflies on 11/28/2015.
*
* Main Plugin class
*/
package net.flutterflies.waterless;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.ArrayList;
import java.util.List;
public class Waterless extends JavaPlugin
{
FileConfiguration config;
List<Material> waterlessMats;
@Override
public void onEnable()
{
config = this.getConfig();
List<String> stringList = config.getStringList("redstone");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Redstone blocks");
}
else
{
getLogger().info("No Redstone blocks to enable");
}
stringList = config.getStringList("minecart");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Minecart rails.");
}
else
{
getLogger().info("No Minecart rails to enable.");
}
Bukkit.getPluginManager().registerEvents(new WaterListener(waterlessMats), this);
}
@Override
public void onDisable()
{
config = null;
Bukkit.getScheduler().cancelTasks(this);
}
public List<Material> makeList(List<String> list)
{
List<Material> materialList = new ArrayList<Material>();
for(String string : list)
{
if(string != null)
{
materialList.add(Material.getMaterial(string));
}
}
return materialList;
}
}
|
fixed config
|
src/main/java/net/flutterflies/waterless/Waterless.java
|
fixed config
|
|
Java
|
mit
|
46a9092a19dab3c32a7972b6498003dcc93cccf9
| 0
|
CS2103JAN2017-W14-B2/main,CS2103JAN2017-W14-B2/main
|
package seedu.taskboss.model.task;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import seedu.taskboss.commons.core.UnmodifiableObservableList;
import seedu.taskboss.commons.exceptions.DuplicateDataException;
import seedu.taskboss.commons.exceptions.IllegalValueException;
import seedu.taskboss.commons.util.CollectionUtil;
import seedu.taskboss.logic.commands.RenameCategoryCommand;
import seedu.taskboss.logic.commands.SortCommand;
import seedu.taskboss.logic.commands.exceptions.CommandException;
import seedu.taskboss.model.category.Category;
import seedu.taskboss.model.category.UniqueCategoryList;
import seedu.taskboss.model.category.UniqueCategoryList.DuplicateCategoryException;
/**
* A list of tasks that enforces uniqueness between its elements and does not allow nulls.
*
* Supports a minimal set of list operations.
*
* @see Task#equals(Object)
* @see CollectionUtil#elementsAreUnique(Collection)
*/
public class UniqueTaskList implements Iterable<Task> {
private final ObservableList<Task> internalList = FXCollections.observableArrayList();
//@@author A0143157J
public enum SortBy {
START_DATE_TIME, END_DATE_TIME, PRIORITY_LEVEL
}
/**
* Sorts tasks based on the specified sort type.
* Start and end dates are sorted in ascending order,
* whereas priority level is sorted in descending order
* (i.e tasks with high priority will be listed on top)
* @throws IllegalValueException
*/
public void sort(SortBy sortType) throws IllegalValueException {
Comparator<ReadOnlyTask> taskCmp = null;
switch(sortType) {
case START_DATE_TIME:
taskCmp = new Comparator<ReadOnlyTask>() {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
Date startDate1 = task1.getStartDateTime().getDate();
Date startDate2 = task2.getStartDateTime().getDate();
return compareDateTime(startDate1, startDate2);
}
};
break;
case END_DATE_TIME:
taskCmp = new Comparator<ReadOnlyTask> () {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
Date endDate1 = task1.getEndDateTime().getDate();
Date endDate2 = task2.getEndDateTime().getDate();
return compareDateTime(endDate1, endDate2);
}
};
break;
case PRIORITY_LEVEL:
taskCmp = new Comparator<ReadOnlyTask> () {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
String priorityLevel1 = task1.getPriorityLevel().toString();
String priorityLevel2 = task2.getPriorityLevel().toString();
return comparePriorityLevel(priorityLevel1, priorityLevel2);
}
};
break;
default:
throw new IllegalValueException(SortCommand.MESSAGE_USAGE);
}
FXCollections.sort(internalList, taskCmp);
}
/**
* Compares {@code date1} with {@code date2}.
* Earlier date will take precedence.
* A null value is seen as having a lower precedence than a non-null date.
*/
private int compareDateTime(Date date1, Date date2) {
if (date1 == null &&
date2 == null) {
return 0;
} else if (date1 == null) {
return 1;
} else if (date2 == null) {
return -1;
} else {
return date1.compareTo(date2);
}
}
/**
* Compares {@code priorityLevel1} with {@code priorityLevel2}.
* High priority will take precedence.
*/
private int comparePriorityLevel(String priorityLevel1, String priorityLevel2) {
if (priorityLevel1.equals(priorityLevel2)) {
return 0;
} else if (priorityLevel1.equals(PriorityLevel.PRIORITY_HIGH_VALUE)) {
return -1;
} else if (priorityLevel2.equals(PriorityLevel.PRIORITY_HIGH_VALUE)) {
return 1;
} else {
return 0;
}
}
//@@author
/**
* Returns true if the list contains an equivalent task as the given argument.
*/
public boolean contains(ReadOnlyTask toCheck) {
assert toCheck != null;
return internalList.contains(toCheck);
}
/**
* Adds a task to the list.
*
* @throws DuplicateTaskException if the task to add is a duplicate of an existing task in the list.
*/
public void add(Task toAdd) throws DuplicateTaskException {
assert toAdd != null;
if (contains(toAdd) && !toAdd.getCategories().contains(Category.done)) {
throw new DuplicateTaskException();
}
internalList.add(toAdd);
}
/**
* Updates the task in the list at position {@code index} with {@code editedTask}.
*
* @throws DuplicateTaskException if updating the task's details causes the task to be equivalent to
* another existing task in the list.
* @throws IndexOutOfBoundsException if {@code index} < 0 or >= the size of the list.
*/
public void updateTask(int index, ReadOnlyTask editedTask) throws DuplicateTaskException {
assert editedTask != null;
Task taskToUpdate = internalList.get(index);
if (!taskToUpdate.equals(editedTask) && internalList.contains(editedTask) &&
!editedTask.getCategories().contains(Category.done)) {
throw new DuplicateTaskException();
}
taskToUpdate.resetData(editedTask);
// TODO: The code below is just a workaround to notify observers of the updated task.
// The right way is to implement observable properties in the Task class.
// Then, TaskCard should then bind its text labels to those observable properties.
internalList.set(index, taskToUpdate);
}
/**
* Removes the equivalent task from the list.
*
* @throws TaskNotFoundException if no such task could be found in the list.
*/
public boolean remove(ReadOnlyTask toRemove) throws TaskNotFoundException {
assert toRemove != null;
final boolean taskFoundAndDeleted = internalList.remove(toRemove);
if (!taskFoundAndDeleted) {
throw new TaskNotFoundException();
}
return taskFoundAndDeleted;
}
//@@author A0143157J
/**
* Renames a certain category for all tasks in this category.
* @throws IllegalValueException
* @throws CommandException
*/
public void renameCategory(Category oldCategory, Category newCategory) throws IllegalValueException,
CommandException {
assert oldCategory != null;
boolean isOldCategoryFound = false;
for (Task task : this) {
UniqueCategoryList targetCategoryList = task.getCategories();
UniqueCategoryList newCategoryList = new UniqueCategoryList();
try {
isOldCategoryFound = initNewCategoryList(oldCategory, newCategory, isOldCategoryFound,
targetCategoryList, newCategoryList);
} catch (DuplicateCategoryException dce) {
throw new DuplicateCategoryException();
}
task.setCategories(newCategoryList);
}
errorDoesNotExistDetect(oldCategory, isOldCategoryFound);
}
/**
* Initialises {@code newCategoryList} and returns true if {@code oldCategory}
* is found in {@code targetCategoryList}
* @throws IllegalValueException, DuplicateCategoryException
*/
private boolean initNewCategoryList(Category oldCategory, Category newCategory, boolean isOldCategoryFound,
UniqueCategoryList targetCategoryList, UniqueCategoryList newCategoryList)
throws IllegalValueException, DuplicateCategoryException {
boolean isFound = isOldCategoryFound;
for (Category category : targetCategoryList) {
if (category.equals(oldCategory)) {
isFound = true;
newCategoryList.add(newCategory);
} else {
newCategoryList.add(category);
}
}
return isFound;
}
//@@author A0144904H
/**
* detects the category does not exist error
* @param oldCategory
* @param isFound
* @throws CommandException
*/
private void errorDoesNotExistDetect(Category oldCategory, boolean isFound) throws CommandException {
if (!isFound) {
throw new CommandException(oldCategory.toString()
+ " " + RenameCategoryCommand.MESSAGE_DOES_NOT_EXIST_CATEGORY);
}
}
//@@author
public void setTasks(UniqueTaskList replacement) {
this.internalList.setAll(replacement.internalList);
}
public void setTasks(List<? extends ReadOnlyTask> tasks) throws DuplicateTaskException {
final UniqueTaskList replacement = new UniqueTaskList();
for (final ReadOnlyTask task : tasks) {
replacement.add(new Task(task));
}
setTasks(replacement);
}
public UnmodifiableObservableList<Task> asObservableList() {
return new UnmodifiableObservableList<>(internalList);
}
@Override
public Iterator<Task> iterator() {
return internalList.iterator();
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof UniqueTaskList // instanceof handles nulls
&& this.internalList.equals(
((UniqueTaskList) other).internalList));
}
@Override
public int hashCode() {
return internalList.hashCode();
}
/**
* Signals that an operation would have violated the 'no duplicates' property of the list.
*/
public static class DuplicateTaskException extends DuplicateDataException {
protected DuplicateTaskException() {
super("Operation would result in duplicate tasks");
}
}
/**
* Signals that an operation targeting a specified task in the list would fail because
* there is no such matching task in the list.
*/
public static class TaskNotFoundException extends Exception {}
}
|
src/main/java/seedu/taskboss/model/task/UniqueTaskList.java
|
package seedu.taskboss.model.task;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import seedu.taskboss.commons.core.UnmodifiableObservableList;
import seedu.taskboss.commons.exceptions.DuplicateDataException;
import seedu.taskboss.commons.exceptions.IllegalValueException;
import seedu.taskboss.commons.util.CollectionUtil;
import seedu.taskboss.logic.commands.RenameCategoryCommand;
import seedu.taskboss.logic.commands.SortCommand;
import seedu.taskboss.logic.commands.exceptions.CommandException;
import seedu.taskboss.model.category.Category;
import seedu.taskboss.model.category.UniqueCategoryList;
import seedu.taskboss.model.category.UniqueCategoryList.DuplicateCategoryException;
/**
* A list of tasks that enforces uniqueness between its elements and does not allow nulls.
*
* Supports a minimal set of list operations.
*
* @see Task#equals(Object)
* @see CollectionUtil#elementsAreUnique(Collection)
*/
public class UniqueTaskList implements Iterable<Task> {
private final ObservableList<Task> internalList = FXCollections.observableArrayList();
//@@author A0143157J
public enum SortBy {
START_DATE_TIME, END_DATE_TIME, PRIORITY_LEVEL
}
/**
* Sorts tasks based on the specified sort type.
* Start and end dates are sorted in ascending order,
* whereas priority level is sorted in descending order
* (i.e tasks with high priority will be listed on top)
* @throws IllegalValueException
*/
public void sort(SortBy sortType) throws IllegalValueException {
Comparator<ReadOnlyTask> taskCmp = null;
switch(sortType) {
case START_DATE_TIME:
taskCmp = new Comparator<ReadOnlyTask>() {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
Date startDate1 = task1.getStartDateTime().getDate();
Date startDate2 = task2.getStartDateTime().getDate();
return compareDateTime(startDate1, startDate2);
}
};
break;
case END_DATE_TIME:
taskCmp = new Comparator<ReadOnlyTask> () {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
Date endDate1 = task1.getEndDateTime().getDate();
Date endDate2 = task2.getEndDateTime().getDate();
return compareDateTime(endDate1, endDate2);
}
};
break;
case PRIORITY_LEVEL:
taskCmp = new Comparator<ReadOnlyTask> () {
@Override
public int compare(ReadOnlyTask task1, ReadOnlyTask task2) {
String priorityLevel1 = task1.getPriorityLevel().toString();
String priorityLevel2 = task2.getPriorityLevel().toString();
return comparePriorityLevel(priorityLevel1, priorityLevel2);
}
};
break;
default:
throw new IllegalValueException(SortCommand.MESSAGE_USAGE);
}
FXCollections.sort(internalList, taskCmp);
}
/**
* Compares {@code date1} with {@code date2}.
* Earlier date will take precedence.
* A null value is seen as having a lower precedence than a non-null date.
*/
private int compareDateTime(Date date1, Date date2) {
if (date1 == null &&
date2 == null) {
return 0;
} else if (date1 == null) {
return 1;
} else if (date2 == null) {
return -1;
} else {
return date1.compareTo(date2);
}
}
/**
* Compares {@code priorityLevel1} with {@code priorityLevel2}.
* High priority will take precedence.
*/
private int comparePriorityLevel(String priorityLevel1, String priorityLevel2) {
if (priorityLevel1.equals(priorityLevel2)) {
return 0;
} else if (priorityLevel1.equals(PriorityLevel.PRIORITY_HIGH_VALUE)) {
return -1;
} else if (priorityLevel2.equals(PriorityLevel.PRIORITY_HIGH_VALUE)) {
return 1;
} else {
return 0;
}
}
//@@author
/**
* Returns true if the list contains an equivalent task as the given argument.
*/
public boolean contains(ReadOnlyTask toCheck) {
assert toCheck != null;
return internalList.contains(toCheck);
}
/**
* Adds a task to the list.
*
* @throws DuplicateTaskException if the task to add is a duplicate of an existing task in the list.
*/
public void add(Task toAdd) throws DuplicateTaskException {
assert toAdd != null;
if (contains(toAdd) && !toAdd.getCategories().contains(Category.done)) {
throw new DuplicateTaskException();
}
internalList.add(toAdd);
}
/**
* Updates the task in the list at position {@code index} with {@code editedTask}.
*
* @throws DuplicateTaskException if updating the task's details causes the task to be equivalent to
* another existing task in the list.
* @throws IndexOutOfBoundsException if {@code index} < 0 or >= the size of the list.
*/
public void updateTask(int index, ReadOnlyTask editedTask) throws DuplicateTaskException {
assert editedTask != null;
Task taskToUpdate = internalList.get(index);
if (!taskToUpdate.equals(editedTask) && internalList.contains(editedTask) &&
!editedTask.getCategories().contains(Category.done)) {
throw new DuplicateTaskException();
}
taskToUpdate.resetData(editedTask);
// TODO: The code below is just a workaround to notify observers of the updated task.
// The right way is to implement observable properties in the Task class.
// Then, TaskCard should then bind its text labels to those observable properties.
internalList.set(index, taskToUpdate);
}
/**
* Removes the equivalent task from the list.
*
* @throws TaskNotFoundException if no such task could be found in the list.
*/
public boolean remove(ReadOnlyTask toRemove) throws TaskNotFoundException {
assert toRemove != null;
final boolean taskFoundAndDeleted = internalList.remove(toRemove);
if (!taskFoundAndDeleted) {
throw new TaskNotFoundException();
}
return taskFoundAndDeleted;
}
//@@author A0143157J
/**
* Renames a certain category for all tasks in this category.
* @throws IllegalValueException
* @throws CommandException
*/
public void renameCategory(Category oldCategory, Category newCategory) throws IllegalValueException,
CommandException {
assert oldCategory != null;
boolean isOldCategoryFound = false;
for (Task task : this) {
UniqueCategoryList targetCategoryList = task.getCategories();
UniqueCategoryList newCategoryList = new UniqueCategoryList();
try {
isOldCategoryFound = initNewCategoryList(oldCategory, newCategory,
targetCategoryList, newCategoryList);
} catch (DuplicateCategoryException dce) {
throw new DuplicateCategoryException();
}
task.setCategories(newCategoryList);
}
errorDoesNotExistDetect(oldCategory, isOldCategoryFound);
}
/**
* Initialises {@code newCategoryList} and returns true if {@code oldCategory}
* is found in {@code targetCategoryList}
* @throws IllegalValueException, DuplicateCategoryException
*/
private boolean initNewCategoryList(Category oldCategory, Category newCategory,
UniqueCategoryList targetCategoryList, UniqueCategoryList newCategoryList)
throws IllegalValueException, DuplicateCategoryException {
boolean isOldCategoryFound = false;
for (Category category : targetCategoryList) {
if (category.equals(oldCategory)) {
isOldCategoryFound = true;
newCategoryList.add(newCategory);
} else {
newCategoryList.add(category);
}
}
return isOldCategoryFound;
}
//@@author A0144904H
/**
* detects the category does not exist error
* @param oldCategory
* @param isFound
* @throws CommandException
*/
private void errorDoesNotExistDetect(Category oldCategory, boolean isFound) throws CommandException {
if (!isFound) {
throw new CommandException(oldCategory.toString()
+ " " + RenameCategoryCommand.MESSAGE_DOES_NOT_EXIST_CATEGORY);
}
}
//@@author
public void setTasks(UniqueTaskList replacement) {
this.internalList.setAll(replacement.internalList);
}
public void setTasks(List<? extends ReadOnlyTask> tasks) throws DuplicateTaskException {
final UniqueTaskList replacement = new UniqueTaskList();
for (final ReadOnlyTask task : tasks) {
replacement.add(new Task(task));
}
setTasks(replacement);
}
public UnmodifiableObservableList<Task> asObservableList() {
return new UnmodifiableObservableList<>(internalList);
}
@Override
public Iterator<Task> iterator() {
return internalList.iterator();
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof UniqueTaskList // instanceof handles nulls
&& this.internalList.equals(
((UniqueTaskList) other).internalList));
}
@Override
public int hashCode() {
return internalList.hashCode();
}
/**
* Signals that an operation would have violated the 'no duplicates' property of the list.
*/
public static class DuplicateTaskException extends DuplicateDataException {
protected DuplicateTaskException() {
super("Operation would result in duplicate tasks");
}
}
/**
* Signals that an operation targeting a specified task in the list would fail because
* there is no such matching task in the list.
*/
public static class TaskNotFoundException extends Exception {}
}
|
Fix RenameCategoryCommandTest
|
src/main/java/seedu/taskboss/model/task/UniqueTaskList.java
|
Fix RenameCategoryCommandTest
|
|
Java
|
mit
|
3f160e8041f9b78dc774dd42bde56bf98fd11f1f
| 0
|
mairdl/jabref,mairdl/jabref,jhshinn/jabref,Mr-DLib/jabref,bartsch-dev/jabref,tobiasdiez/jabref,oscargus/jabref,jhshinn/jabref,JabRef/jabref,zellerdev/jabref,shitikanth/jabref,tobiasdiez/jabref,JabRef/jabref,bartsch-dev/jabref,tschechlovdev/jabref,JabRef/jabref,sauliusg/jabref,motokito/jabref,sauliusg/jabref,zellerdev/jabref,grimes2/jabref,jhshinn/jabref,jhshinn/jabref,mairdl/jabref,tschechlovdev/jabref,grimes2/jabref,tschechlovdev/jabref,zellerdev/jabref,Mr-DLib/jabref,JabRef/jabref,sauliusg/jabref,motokito/jabref,grimes2/jabref,mredaelli/jabref,grimes2/jabref,ayanai1/jabref,shitikanth/jabref,shitikanth/jabref,Siedlerchr/jabref,motokito/jabref,sauliusg/jabref,Mr-DLib/jabref,obraliar/jabref,jhshinn/jabref,ayanai1/jabref,tobiasdiez/jabref,ayanai1/jabref,mredaelli/jabref,mredaelli/jabref,obraliar/jabref,mairdl/jabref,oscargus/jabref,motokito/jabref,Braunch/jabref,ayanai1/jabref,oscargus/jabref,Braunch/jabref,bartsch-dev/jabref,Braunch/jabref,bartsch-dev/jabref,mredaelli/jabref,Braunch/jabref,obraliar/jabref,grimes2/jabref,Siedlerchr/jabref,ayanai1/jabref,obraliar/jabref,oscargus/jabref,Mr-DLib/jabref,zellerdev/jabref,oscargus/jabref,tschechlovdev/jabref,bartsch-dev/jabref,Mr-DLib/jabref,zellerdev/jabref,tschechlovdev/jabref,mredaelli/jabref,motokito/jabref,mairdl/jabref,Braunch/jabref,Siedlerchr/jabref,Siedlerchr/jabref,shitikanth/jabref,tobiasdiez/jabref,obraliar/jabref,shitikanth/jabref
|
/* Copyright (C) 2003-2015 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.importer;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.Arrays;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.filechooser.FileFilter;
import net.sf.jabref.Globals;
import net.sf.jabref.JabRefPreferences;
import net.sf.jabref.gui.JabRefFrame;
import net.sf.jabref.gui.actions.MnemonicAwareAction;
import net.sf.jabref.gui.keyboard.KeyBinding;
import net.sf.jabref.importer.fileformat.ImportFormat;
import net.sf.jabref.logic.l10n.Localization;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ImportFormats {
private static final Log LOGGER = LogFactory.getLog(ImportFormats.class);
private static JFileChooser createImportFileChooser(String currentDir) {
SortedSet<ImportFormat> importers = Globals.IMPORT_FORMAT_READER.getImportFormats();
String lastUsedFormat = Globals.prefs.get(JabRefPreferences.LAST_USED_IMPORT);
FileFilter defaultFilter = null;
JFileChooser fc = new JFileChooser(currentDir);
Set<ImportFileFilter> filters = new TreeSet<>();
for (ImportFormat format : importers) {
ImportFileFilter filter = new ImportFileFilter(format);
filters.add(filter);
if (format.getFormatName().equals(lastUsedFormat)) {
defaultFilter = filter;
}
}
for (ImportFileFilter filter : filters) {
fc.addChoosableFileFilter(filter);
}
if (defaultFilter == null) {
fc.setFileFilter(fc.getAcceptAllFileFilter());
} else {
fc.setFileFilter(defaultFilter);
}
return fc;
}
/**
* Create an AbstractAction for performing an Import operation.
* @param frame The JabRefFrame of this JabRef instance.
* @param openInNew Indicate whether the action should open into a new database or
* into the currently open one.
* @return The action.
*/
public static AbstractAction getImportAction(JabRefFrame frame, boolean openInNew) {
class ImportAction extends MnemonicAwareAction {
private final JabRefFrame frame;
private final boolean openInNew;
public ImportAction(JabRefFrame frame, boolean openInNew) {
this.frame = frame;
this.openInNew = openInNew;
putValue(Action.NAME, openInNew ? Localization.menuTitle("Import into new database") : Localization
.menuTitle("Import into current database"));
putValue(Action.ACCELERATOR_KEY,
openInNew ? Globals.getKeyPrefs().getKey(KeyBinding.IMPORT_INTO_NEW_DATABASE) : Globals.getKeyPrefs().getKey(KeyBinding.IMPORT_INTO_CURRENT_DATABASE));
}
@Override
public void actionPerformed(ActionEvent e) {
JFileChooser fileChooser = createImportFileChooser(Globals.prefs.get(JabRefPreferences.IMPORT_WORKING_DIRECTORY));
int result = fileChooser.showOpenDialog(frame);
if (result != JFileChooser.APPROVE_OPTION) {
return;
}
File file = fileChooser.getSelectedFile();
if (file == null) {
return;
}
FileFilter ff = fileChooser.getFileFilter();
ImportFormat format = null;
if (ff instanceof ImportFileFilter) {
format = ((ImportFileFilter) ff).getImportFormat();
}
try {
if (!file.exists()) {
// Warn that the file doesn't exists:
JOptionPane.showMessageDialog(frame,
Localization.lang("File not found") +
": '" + file.getName() + "'.",
Localization.lang("Import"), JOptionPane.ERROR_MESSAGE);
return;
}
ImportMenuItem imi = new ImportMenuItem(frame, openInNew, format);
imi.automatedImport(Arrays.asList(file.getAbsolutePath()));
// Make sure we remember which filter was used, to set the default
// for next time:
if (format == null) {
Globals.prefs.put(JabRefPreferences.LAST_USED_IMPORT, "__all");
} else {
Globals.prefs.put(JabRefPreferences.LAST_USED_IMPORT, format.getFormatName());
}
Globals.prefs.put(JabRefPreferences.IMPORT_WORKING_DIRECTORY, file.getParent());
} catch (Exception ex) {
LOGGER.warn("Problem with import format", ex);
}
}
}
return new ImportAction(frame, openInNew);
}
}
|
src/main/java/net/sf/jabref/importer/ImportFormats.java
|
/* Copyright (C) 2003-2015 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.importer;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.Arrays;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.filechooser.FileFilter;
import net.sf.jabref.Globals;
import net.sf.jabref.JabRefPreferences;
import net.sf.jabref.gui.JabRefFrame;
import net.sf.jabref.gui.actions.MnemonicAwareAction;
import net.sf.jabref.gui.keyboard.KeyBinding;
import net.sf.jabref.importer.fileformat.ImportFormat;
import net.sf.jabref.logic.l10n.Localization;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ImportFormats {
private static final Log LOGGER = LogFactory.getLog(ImportFormats.class);
private static JFileChooser createImportFileChooser(String currentDir) {
SortedSet<ImportFormat> importers = Globals.IMPORT_FORMAT_READER.getImportFormats();
String lastUsedFormat = Globals.prefs.get(JabRefPreferences.LAST_USED_IMPORT);
FileFilter defaultFilter = null;
JFileChooser fc = new JFileChooser(currentDir);
Set<ImportFileFilter> filters = new TreeSet<>();
for (ImportFormat format : importers) {
ImportFileFilter filter = new ImportFileFilter(format);
filters.add(filter);
if (format.getFormatName().equals(lastUsedFormat)) {
defaultFilter = filter;
}
}
for (ImportFileFilter filter : filters) {
fc.addChoosableFileFilter(filter);
}
if (defaultFilter == null) {
fc.setFileFilter(fc.getAcceptAllFileFilter());
} else {
fc.setFileFilter(defaultFilter);
}
return fc;
}
/**
* Create an AbstractAction for performing an Import operation.
* @param frame The JabRefFrame of this JabRef instance.
* @param openInNew Indicate whether the action should open into a new database or
* into the currently open one.
* @return The action.
*/
public static AbstractAction getImportAction(JabRefFrame frame, boolean openInNew) {
class ImportAction extends MnemonicAwareAction {
private final JabRefFrame frame;
private final boolean openInNew;
public ImportAction(JabRefFrame frame, boolean openInNew) {
this.frame = frame;
this.openInNew = openInNew;
putValue(Action.NAME, openInNew ? Localization.menuTitle("Import into new database") : Localization
.menuTitle("Import into current database"));
putValue(Action.ACCELERATOR_KEY,
openInNew ? Globals.getKeyPrefs().getKey(KeyBinding.IMPORT_INTO_NEW_DATABASE) : Globals.getKeyPrefs().getKey(KeyBinding.IMPORT_INTO_CURRENT_DATABASE));
}
@Override
public void actionPerformed(ActionEvent e) {
JFileChooser fc = ImportFormats.createImportFileChooser(Globals.prefs.get(JabRefPreferences.IMPORT_WORKING_DIRECTORY));
fc.showOpenDialog(frame);
File file = fc.getSelectedFile();
if (file == null) {
return;
}
FileFilter ff = fc.getFileFilter();
ImportFormat format = null;
if (ff instanceof ImportFileFilter) {
format = ((ImportFileFilter) ff).getImportFormat();
}
try {
if (!file.exists()) {
// Warn that the file doesn't exists:
JOptionPane.showMessageDialog(frame,
Localization.lang("File not found") +
": '" + file.getName() + "'.",
Localization.lang("Import"), JOptionPane.ERROR_MESSAGE);
return;
}
ImportMenuItem imi = new ImportMenuItem(frame,
openInNew, format);
imi.automatedImport(Arrays.asList(file.getAbsolutePath()));
// Make sure we remember which filter was used, to set the default
// for next time:
if (format == null) {
Globals.prefs.put(JabRefPreferences.LAST_USED_IMPORT, "__all");
} else {
Globals.prefs.put(JabRefPreferences.LAST_USED_IMPORT, format.getFormatName());
}
Globals.prefs.put(JabRefPreferences.IMPORT_WORKING_DIRECTORY, file.getParent());
} catch (Exception ex) {
LOGGER.warn("Problem with import format", ex);
}
}
}
return new ImportAction(frame, openInNew);
}
}
|
Fix #1268 Canceling an import into current/new database still triggers an import
|
src/main/java/net/sf/jabref/importer/ImportFormats.java
|
Fix #1268 Canceling an import into current/new database still triggers an import
|
|
Java
|
mit
|
173c9722fef9411fb54850aee4d0c7ce50a37ea7
| 0
|
tlaplus/tlaplus,tlaplus/tlaplus,lemmy/tlaplus,lemmy/tlaplus,lemmy/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus
|
// Copyright (c) 2003 Compaq Corporation. All rights reserved.
// Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved.
// Last modified on Mon 30 Apr 2007 at 13:18:34 PST by lamport
// modified on Fri Mar 2 23:46:22 PST 2001 by yuanyu
package tlc2.tool.distributed;
import java.io.EOFException;
import java.net.URI;
import java.rmi.RemoteException;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import tlc2.TLCGlobals;
import tlc2.output.EC;
import tlc2.output.MP;
import tlc2.tool.TLCState;
import tlc2.tool.TLCStateVec;
import tlc2.tool.WorkerException;
import tlc2.tool.distributed.selector.IBlockSelector;
import tlc2.tool.queue.StateQueue;
import tlc2.util.BitVector;
import tlc2.util.IdThread;
import tlc2.util.LongVec;
import util.ToolIO;
public class TLCServerThread extends IdThread {
/**
* Identifies the worker
*/
private int receivedStates, sentStates;
private final CyclicBarrier barrier;
private final IBlockSelector selector;
public TLCServerThread(int id, TLCWorkerRMI worker, TLCServer tlc) {
this(id, worker, tlc, null, null);
}
public TLCServerThread(int id, TLCWorkerRMI worker, TLCServer tlc, CyclicBarrier aBarrier, IBlockSelector aSelector) {
super(id);
this.setWorker(worker);
this.tlcServer = tlc;
this.barrier = aBarrier;
this.selector = aSelector;
}
private TLCWorkerRMI worker;
private TLCServer tlcServer;
private URI uri;
/**
* Current unit of work or null
*/
private TLCState[] states;
public final TLCWorkerRMI getWorker() {
return this.worker;
}
public final void setWorker(TLCWorkerRMI worker) {
this.worker = new TLCWorkerSmartProxy(worker);
try {
this.uri = worker.getURI();
} catch (RemoteException e) {
//TODO handle more gracefully
e.printStackTrace();
}
// update thread name
setName("TLCServerThread-[" + uri.toASCIIString() + "]");
}
/**
* This method gets a state from the queue, generates all the possible next
* states of the state, checks the invariants, and updates the state set and
* state queue.
*/
public void run() {
waitOnBarrier();
TLCGlobals.incNumWorkers(1);
TLCStateVec[] newStates = null;
LongVec[] newFps = null;
final StateQueue stateQueue = this.tlcServer.stateQueue;
try {
START: while (true) {
// blocks until more states available or all work is done
states = selector.getBlocks(stateQueue, worker);
if (states == null) {
synchronized (this.tlcServer) {
this.tlcServer.setDone();
this.tlcServer.notify();
}
stateQueue.finishAll();
return;
}
// without initial states no need to bother workers
if (states.length == 0) {
continue;
}
// count statistics
sentStates += states.length;
// real work happens here:
// worker computes next states for states
boolean workDone = false;
while (!workDone) {
try {
final Object[] res = this.worker.getNextStates(states);
newStates = (TLCStateVec[]) res[0];
receivedStates += newStates[0].size();
newFps = (LongVec[]) res[1];
workDone = true;
} catch (RemoteException e) {
ToolIO.err.println(e.getMessage());
// non recoverable errors
final Throwable cause = e.getCause();
if (cause instanceof EOFException && cause.getMessage() == null) {
ToolIO.err.println("Limiting max block size to: " + states.length / 2);
// states[] exceeds maximum transferable size
// (add states back to queue and retry)
stateQueue.sEnqueue(states);
// half the maximum size and use it as a limit from now on
selector.setMaxTXSize(states.length / 2);
// go back to beginning
continue START;
} else {
if (!this.tlcServer.reassignWorker(this)) {
handleRemoteWorkerLost(stateQueue);
return;
}
}
} catch (NullPointerException e) {
ToolIO.err.println(e.getMessage());
if (!this.tlcServer.reassignWorker(this)) {
handleRemoteWorkerLost(stateQueue);
return;
}
}
}
// add fingerprints to fingerprint manager (delegates to
// corresponding fingerprint server)
// TODO why isn't this done by workers directly?
BitVector[] visited = this.tlcServer.fpSetManager
.putBlock(newFps);
// recreate newly computed states and add them to queue
for (int i = 0; i < visited.length; i++) {
BitVector.Iter iter = new BitVector.Iter(visited[i]);
int index;
while ((index = iter.next()) != -1) {
TLCState state = newStates[i].elementAt(index);
// write state id and state fp to .st file for
// checkpointing
long fp = newFps[i].elementAt(index);
state.uid = this.tlcServer.trace.writeState(state,
fp);
// add state to state queue for further processing
stateQueue.sEnqueue(state);
}
}
}
} catch (Throwable e) {
TLCState state1 = null, state2 = null;
if (e instanceof WorkerException) {
state1 = ((WorkerException) e).state1;
state2 = ((WorkerException) e).state2;
}
if (this.tlcServer.setErrState(state1, true)) {
MP.printError(EC.GENERAL, e);
if (state1 != null) {
try {
ToolIO.out
.println("\nThe behavior up to this point is:");
this.tlcServer.trace.printTrace(state1,
state2);
} catch (Exception e1) {
ToolIO.err.println(e1.getMessage());
}
}
stateQueue.finishAll();
synchronized (this.tlcServer) {
this.tlcServer.notify();
}
}
}
}
/**
* Handles the case of a disconnected remote worker
* @param stateQueue
*/
private void handleRemoteWorkerLost(final StateQueue stateQueue) {
stateQueue.sEnqueue(states);
TLCGlobals.incNumWorkers(-1);
MP.printMessage(EC.TLC_DISTRIBUTED_WORKER_DEREGISTERED, getUri().toString());
}
/**
* Causes this thread to wait for all other worker threads before it starts
* computing states. The barrier may be null in which case threads start
* computing next states immediately after creation.
*/
private void waitOnBarrier() {
try {
if(barrier != null)
barrier.await();
} catch (InterruptedException e2) {
e2.printStackTrace();
} catch (BrokenBarrierException e2) {
e2.printStackTrace();
}
}
/**
* @return The current amount of states the corresponding worker is
* computing on
*/
public int getCurrentSize() {
if(states != null) {
return states.length;
}
return 0;
}
/**
* @return the url
*/
public URI getUri() {
return this.uri;
}
/**
* @return the receivedStates
*/
public int getReceivedStates() {
return receivedStates;
}
/**
* @return the sentStates
*/
public int getSentStates() {
return sentStates;
}
}
|
tlatools/src/tlc2/tool/distributed/TLCServerThread.java
|
// Copyright (c) 2003 Compaq Corporation. All rights reserved.
// Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved.
// Last modified on Mon 30 Apr 2007 at 13:18:34 PST by lamport
// modified on Fri Mar 2 23:46:22 PST 2001 by yuanyu
package tlc2.tool.distributed;
import java.io.EOFException;
import java.net.URI;
import java.rmi.RemoteException;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import tlc2.TLCGlobals;
import tlc2.output.EC;
import tlc2.output.MP;
import tlc2.tool.TLCState;
import tlc2.tool.TLCStateVec;
import tlc2.tool.WorkerException;
import tlc2.tool.distributed.selector.IBlockSelector;
import tlc2.tool.queue.StateQueue;
import tlc2.util.BitVector;
import tlc2.util.IdThread;
import tlc2.util.LongVec;
import util.ToolIO;
public class TLCServerThread extends IdThread {
/**
* Identifies the worker
*/
private int receivedStates, sentStates;
private final CyclicBarrier barrier;
private final IBlockSelector selector;
public TLCServerThread(int id, TLCWorkerRMI worker, TLCServer tlc) {
this(id, worker, tlc, null, null);
}
public TLCServerThread(int id, TLCWorkerRMI worker, TLCServer tlc, CyclicBarrier aBarrier, IBlockSelector aSelector) {
super(id);
this.setWorker(worker);
this.tlcServer = tlc;
this.barrier = aBarrier;
this.selector = aSelector;
}
private TLCWorkerRMI worker;
private TLCServer tlcServer;
private URI uri;
/**
* Current unit of work or null
*/
private TLCState[] states;
public final TLCWorkerRMI getWorker() {
return this.worker;
}
public final void setWorker(TLCWorkerRMI worker) {
this.worker = new TLCWorkerSmartProxy(worker);
try {
this.uri = worker.getURI();
} catch (RemoteException e) {
//TODO handle more gracefully
e.printStackTrace();
}
// update thread name
setName("TLCServerThread-[" + uri.toASCIIString() + "]");
}
/**
* This method gets a state from the queue, generates all the possible next
* states of the state, checks the invariants, and updates the state set and
* state queue.
*/
public void run() {
waitOnBarrier();
TLCGlobals.incNumWorkers(1);
TLCStateVec[] newStates = null;
LongVec[] newFps = null;
final StateQueue stateQueue = this.tlcServer.stateQueue;
try {
START: while (true) {
// blocks until more states available or all work is done
states = selector.getBlocks(stateQueue, worker);
if (states == null) {
synchronized (this.tlcServer) {
this.tlcServer.setDone();
this.tlcServer.notify();
}
stateQueue.finishAll();
return;
}
// without initial states no need to bother workers
if (states.length == 0) {
continue;
}
// count statistics
sentStates += states.length;
// real work happens here:
// worker computes next states for states
boolean workDone = false;
while (!workDone) {
try {
final Object[] res = this.worker.getNextStates(states);
newStates = (TLCStateVec[]) res[0];
receivedStates += newStates[0].size();
newFps = (LongVec[]) res[1];
workDone = true;
} catch (RemoteException e) {
ToolIO.err.println(e.getMessage());
// non recoverable errors
final Throwable cause = e.getCause();
if (cause instanceof EOFException && cause.getMessage() == null) {
ToolIO.err.println("Limiting max block size to: " + states.length / 2);
// states[] exceeds maximum transferable size
// (add states back to queue and retry)
stateQueue.sEnqueue(states);
// half the maximum size and use it as a limit from now on
selector.setMaxTXSize(states.length / 2);
// go back to beginning
continue START;
} else {
if (!this.tlcServer.reassignWorker(this)) {
stateQueue.sEnqueue(states);
MP.printMessage(EC.TLC_DISTRIBUTED_WORKER_DEREGISTERED, getUri().toString());
return;
}
}
} catch (NullPointerException e) {
ToolIO.err.println(e.getMessage());
if (!this.tlcServer.reassignWorker(this)) {
stateQueue.sEnqueue(states);
MP.printMessage(EC.TLC_DISTRIBUTED_WORKER_DEREGISTERED, getUri().toString());
return;
}
}
}
// add fingerprints to fingerprint manager (delegates to
// corresponding fingerprint server)
// TODO why isn't this done by workers directly?
BitVector[] visited = this.tlcServer.fpSetManager
.putBlock(newFps);
// recreate newly computed states and add them to queue
for (int i = 0; i < visited.length; i++) {
BitVector.Iter iter = new BitVector.Iter(visited[i]);
int index;
while ((index = iter.next()) != -1) {
TLCState state = newStates[i].elementAt(index);
// write state id and state fp to .st file for
// checkpointing
long fp = newFps[i].elementAt(index);
state.uid = this.tlcServer.trace.writeState(state,
fp);
// add state to state queue for further processing
stateQueue.sEnqueue(state);
}
}
}
} catch (Throwable e) {
TLCState state1 = null, state2 = null;
if (e instanceof WorkerException) {
state1 = ((WorkerException) e).state1;
state2 = ((WorkerException) e).state2;
}
if (this.tlcServer.setErrState(state1, true)) {
MP.printError(EC.GENERAL, e);
if (state1 != null) {
try {
ToolIO.out
.println("\nThe behavior up to this point is:");
this.tlcServer.trace.printTrace(state1,
state2);
} catch (Exception e1) {
ToolIO.err.println(e1.getMessage());
}
}
stateQueue.finishAll();
synchronized (this.tlcServer) {
this.tlcServer.notify();
}
}
}
}
/**
* Causes this thread to wait for all other worker threads before it starts
* computing states. The barrier may be null in which case threads start
* computing next states immediately after creation.
*/
private void waitOnBarrier() {
try {
if(barrier != null)
barrier.await();
} catch (InterruptedException e2) {
e2.printStackTrace();
} catch (BrokenBarrierException e2) {
e2.printStackTrace();
}
}
/**
* @return The current amount of states the corresponding worker is
* computing on
*/
public int getCurrentSize() {
if(states != null) {
return states.length;
}
return 0;
}
/**
* @return the url
*/
public URI getUri() {
return this.uri;
}
/**
* @return the receivedStates
*/
public int getReceivedStates() {
return receivedStates;
}
/**
* @return the sentStates
*/
public int getSentStates() {
return sentStates;
}
}
|
NEW - bug 175: Deadlock in DiskStateQueue if subset of workers die
(disconnect) during model run
http://bugzilla.tlaplus.net/show_bug.cgi?id=175
- refactor work lost code
git-svn-id: 7acc490bd371dbc82047a939b87dc892fdc31f59@22921 76a6fc44-f60b-0410-a9a8-e67b0e8fc65c
|
tlatools/src/tlc2/tool/distributed/TLCServerThread.java
|
NEW - bug 175: Deadlock in DiskStateQueue if subset of workers die (disconnect) during model run http://bugzilla.tlaplus.net/show_bug.cgi?id=175 - refactor work lost code
|
|
Java
|
mit
|
d721b10c3fcac4d898747f29419acc0ca368af3e
| 0
|
ytimesru/kkm-pc-client
|
package org.bitbucket.ytimes.client.kkm.printer;
import org.bitbucket.ytimes.client.utils.Utils;
import org.bitbucket.ytimes.client.kkm.record.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import ru.atol.drivers10.fptr.Fptr;
import ru.atol.drivers10.fptr.IFptr;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Created by root on 27.05.17.
*/
public class AtolPrinter implements Printer {
protected Logger logger = LoggerFactory.getLogger(getClass());
protected IFptr fptr;
private String port;
private String wifiIP;
private Integer wifiPort;
private int model;
private Map<String, Integer> modelList = new HashMap<String, Integer>();
private VAT vat = VAT.NO;
private OFDChannel ofdChannel = null;
public AtolPrinter(String model, String port, String wifiIP, Integer wifiPort) throws PrinterException {
this.port = port;
this.wifiIP = wifiIP;
this.wifiPort = wifiPort;
modelList.put("ATOLAUTO", 500);
modelList.put("ATOLENVD", 500);
modelList.put("ATOL11F", 67);
modelList.put("ATOL15F", 78);
modelList.put("ATOL20F", 81);
modelList.put("ATOL22F", 63);
modelList.put("ATOL25F", 57);
modelList.put("ATOL30F", 61);
modelList.put("ATOL50F", 80);
modelList.put("ATOL55F", 62);
modelList.put("ATOL90F", 72);
modelList.put("ATOL91F", 82);
if (!modelList.containsKey(model)) {
throw new PrinterException(0, "Модель не поддерживается в данной версии коммуникационного модуля");
}
this.model = modelList.get(model);
}
public void setVat(VAT vat) {
this.vat = vat;
}
public void setOfdChannel(OFDChannel ofdChannel) {
this.ofdChannel = ofdChannel;
}
synchronized public boolean isConnected() throws PrinterException {
return fptr.isOpened();
}
synchronized public ModelInfoRecord getInfo() throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_DATA_TYPE, IFptr.LIBFPTR_DT_STATUS);
if (fptr.queryData() < 0) {
checkError(fptr);
}
ModelInfoRecord record = new ModelInfoRecord();
record.serialNumber = fptr.getParamString(IFptr.LIBFPTR_PARAM_SERIAL_NUMBER);
record.modelName = fptr.getParamString(IFptr.LIBFPTR_PARAM_MODEL_NAME);
record.unitVersion = fptr.getParamString(IFptr.LIBFPTR_PARAM_UNIT_VERSION);
//ОФД
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_REG_INFO);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
record.ofdName = fptr.getParamString(1046);
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_OFD_EXCHANGE_STATUS);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
record.ofdUnsentCount = fptr.getParamInt(IFptr.LIBFPTR_PARAM_DOCUMENTS_COUNT);
Date unsentDateTime = fptr.getParamDateTime(IFptr.LIBFPTR_PARAM_DATE_TIME);
if (unsentDateTime != null) {
record.ofdUnsentDatetime = Utils.toDateString(unsentDateTime);
}
//ФФД
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_FFD_VERSIONS);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
long deviceFfdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_DEVICE_FFD_VERSION);
record.deviceFfdVersion = getFFDVersion(deviceFfdVersion);
long fnFfdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_FN_FFD_VERSION);
record.fnFfdVersion = getFFDVersion(fnFfdVersion);
long ffdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_FFD_VERSION);
record.ffdVersion = getFFDVersion(ffdVersion);
//ФН
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_FN_INFO);
fptr.fnQueryData();
record.fnSerial = fptr.getParamString(IFptr.LIBFPTR_PARAM_SERIAL_NUMBER);
record.fnVersion = fptr.getParamString(IFptr.LIBFPTR_PARAM_FN_VERSION);
//ФН Дата окончания
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_VALIDITY);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
Date dateTime = fptr.getParamDateTime(IFptr.LIBFPTR_PARAM_DATE_TIME);
if (dateTime != null) {
record.fnDate = Utils.toDateString(dateTime);
}
return record;
}
private String getFFDVersion(long version) {
if (version == IFptr.LIBFPTR_FFD_1_0) {
return "1.0";
}
if (version == IFptr.LIBFPTR_FFD_1_0_5) {
return "1.05";
}
if (version == IFptr.LIBFPTR_FFD_1_1) {
return "1.1";
}
return "неизвестная";
}
synchronized public void applySettingsAndConnect() throws PrinterException {
if (fptr != null) {
try {
disconnect();
}
catch (Throwable e) {
logger.error(e.getMessage(), e);
}
}
logger.info("START ATOL PRINTER");
logger.info("PORT: " + port);
fptr = new Fptr();
fptr.setSingleSetting(fptr.LIBFPTR_SETTING_LIBRARY_PATH, System.getProperty("java.library.path"));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_MODEL, String.valueOf(model));
if (port.equals("TCPIP")) {
logger.info("Connect to: " + wifiIP + ":" + wifiPort);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_TCPIP));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_IPADDRESS, wifiIP);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_IPPORT, String.valueOf(wifiPort));
}
else if (port.equals("USBAUTO")) {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_USB));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_USB_DEVICE_PATH, "auto");
}
else if (port.startsWith("COM")) {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_COM));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_COM_FILE, port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_BAUDRATE, String.valueOf(IFptr.LIBFPTR_PORT_BR_115200));
}
else {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_USB));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_USB_DEVICE_PATH, port);
}
if (ofdChannel != null) {
if (ofdChannel.equals(OFDChannel.PROTO)) {
logger.info("ОФД средвами транспортного протокола (OFD PROTO 1)");
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_OFD_CHANNEL, String.valueOf(IFptr.LIBFPTR_OFD_CHANNEL_PROTO));
}
else if (ofdChannel.equals(OFDChannel.ASIS)) {
logger.info("ОФД используя настройки ККМ (OFD NONE 2)");
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_OFD_CHANNEL, String.valueOf(IFptr.LIBFPTR_OFD_CHANNEL_NONE));
}
else {
throw new PrinterException(0, "Не поддерживаемое значение параметра связи с ОФД");
}
}
if (fptr.applySingleSettings() < 0) {
checkError(fptr);
}
logger.info("ATOL PRINTER STARTED");
connect();
if (!ofdChannel.equals(OFDChannel.ASIS)) {
if (ofdChannel.equals(OFDChannel.USB)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 1);
fptr.writeDeviceSetting();
logger.info("ОФД через USB (установить EoU модуль)");
}
if (ofdChannel.equals(OFDChannel.ETHERNET)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 2);
fptr.writeDeviceSetting();
logger.info("ОФД через Ethernet");
}
else if (ofdChannel.equals(OFDChannel.WIFI)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 3);
fptr.writeDeviceSetting();
logger.info("ОФД через WiFi");
}
else if (ofdChannel.equals(OFDChannel.GSM)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 4);
fptr.writeDeviceSetting();
logger.info("ОФД через GSM");
}
else if (ofdChannel.equals(OFDChannel.TRANSPORT)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 5);
fptr.writeDeviceSetting();
logger.info("ОФД через транспортный протокол");
}
else if (ofdChannel.equals(OFDChannel.PROTO)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 5);
fptr.writeDeviceSetting();
//publishProgress("ОФД через транспортный протокол");
}
else {
throw new PrinterException(0, "Не поддерживаемое значение параметра связи с ОФД: " + ofdChannel.name());
}
}
}
public void connect() throws PrinterException {
if (fptr.open() < 0) {
checkError(fptr);
}
cancelCheck();
logger.info("ATOL PRINTER CONNECTED");
}
public void destroy() throws Throwable {
disconnect();
}
protected void disconnect() throws PrinterException {
if (fptr.close() < 0) {
checkError(fptr);
}
}
synchronized public void reportX(ReportCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_X);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void reportZ(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_CLOSE_SHIFT);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void startShift(ReportCommandRecord record) throws PrinterException {
loginOperator(record);
if (fptr.openShift() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void cashIncome(CashChangeRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, record.sum);
if (fptr.cashIncome() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void cashOutcome(CashChangeRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, record.sum);
if (fptr.cashOutcome() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void copyLastDoc(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_LAST_DOCUMENT);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void demoReport(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_KKT_DEMO);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void ofdTestReport(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_OFD_TEST);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
//выставление счета
synchronized public void printPredCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintPredCheck(record);
}
private void doPrintPredCheck(PrintCheckCommandRecord record) throws PrinterException {
checkRecord(record);
if (fptr.beginNonfiscalDocument() < 0) {
checkError(fptr);
}
printText("СЧЕТ (ПРЕДЧЕК)");
printText("");
printText("ПОЗИЦИИ ОПЛАТЫ", IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
for(int i = 0; i < record.itemList.size(); i++) {
ItemRecord r = record.itemList.get(i);
printText((i + 1) + ". " + r.name, IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_WORDS);
double total = r.price * r.quantity;
printText(r.price + " x " + r.quantity + " = " + total, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
if (r.discountSum != null && r.discountSum > 0) {
printText("Скидка: " + r.discountSum, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
}
if (r.discountPercent != null && r.discountPercent > 0) {
printText("Скидка: " + r.discountPercent + "%", IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
}
}
printBoldText("ИТОГО: " + record.moneySum, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
if (GuestType.TIME.equals(record.type) && record.guestInfoList != null) {
printText("");
printText("РАССЧИТЫВАЕМЫЕ ГОСТИ", IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_CHARS);
int i = 1;
for(GuestRecord r: record.guestInfoList) {
String name = r.name;
if (!StringUtils.isEmpty(r.card)) {
name += " (" + r.card + ")";
};
printText(i + ". " + name, IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_CHARS);
printText("время прихода: " + r.startTime, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
printText("проведенное время: " + r.minutes + " мин.", IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
i++;
}
printText("");
printText("");
}
if (GuestType.TOGO.equals(record.type) && record.guestInfoList != null) {
printText("");
for(GuestRecord r: record.guestInfoList) {
String name = r.name;
if (!StringUtils.isEmpty(r.phone)) {
name += ", " + r.phone;
}
printText(name, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_CHARS);
printText(r.message);
}
printText("");
printText("");
}
if (record.additionalInfo != null) {
printText("");
for(String s: record.additionalInfo) {
printText(s, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
}
printText("");
}
if (fptr.endNonfiscalDocument() < 0) {
checkError(fptr);
}
}
synchronized public void printCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintCheck(record, IFptr.LIBFPTR_RT_SELL);
}
synchronized public void printReturnCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintCheck(record, IFptr.LIBFPTR_RT_SELL_RETURN);
}
private void doPrintCheck(PrintCheckCommandRecord record, int checkType) throws PrinterException {
checkRecord(record);
cancelCheck();
loginOperator(record);
// Открываем чек продажи, попутно обработав превышение смены
try {
openCheck(record, checkType);
} catch (PrinterException e) {
// Проверка на превышение смены
if (e.getCode() == IFptr.LIBFPTR_ERROR_SHIFT_EXPIRED) {
reportZ(record);
openCheck(record, checkType);
} else {
throw e;
}
}
try {
BigDecimal totalPrice = new BigDecimal(0.0);
for (ItemRecord r : record.itemList) {
BigDecimal price = new BigDecimal(r.price);
BigDecimal discountPosition = new BigDecimal(0.0);
if (r.discountSum != null) {
discountPosition = new BigDecimal(r.discountSum);
} else if (r.discountPercent != null) {
if (r.discountPercent > 100) {
r.discountPercent = 100.0;
}
BigDecimal value = new BigDecimal(r.price).multiply(new BigDecimal(r.quantity));
discountPosition = value.multiply(new BigDecimal(r.discountPercent)).divide(new BigDecimal(100.0));
}
BigDecimal priceWithDiscount = price.subtract(discountPosition);
logger.info("Name: " + r.name + ", price=" + price + ", discount = " + discountPosition + ", priceWithDiscount = " + priceWithDiscount);
registrationFZ54(r.name, priceWithDiscount.doubleValue(), r.quantity, r.vatValue, r.type);
totalPrice = totalPrice.add(priceWithDiscount.multiply(new BigDecimal(r.quantity)));
}
if (record.creditSum != null && record.creditSum > 0) {
payment(record.creditSum, IFptr.LIBFPTR_PT_ELECTRONICALLY);
}
if (record.moneySum != null && record.moneySum > 0) {
payment(record.moneySum, IFptr.LIBFPTR_PT_CASH);
}
logger.info("Total price = " + totalPrice);
if (Boolean.TRUE.equals(record.dropPenny)) {
double totalWithoutPenny = totalPrice.setScale(0, BigDecimal.ROUND_HALF_DOWN).doubleValue();
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, totalWithoutPenny);
if (fptr.receiptTotal() < 0) {
checkError(fptr);
}
}
// Закрываем чек
if (Boolean.TRUE.equals(record.testMode)) {
cancelCheck();
}
else {
if (fptr.closeReceipt() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
cancelCheck();
}
continuePrint();
}
}
catch (PrinterException e) {
logger.error(e.getMessage(), e);
cancelCheck();
throw e;
}
}
protected void checkRecord(PrintCheckCommandRecord record) throws PrinterException {
if (record.itemList == null || record.itemList.isEmpty()) {
throw new PrinterException(0, "Список оплаты пустой");
}
if (record.moneySum == null && record.creditSum == null) {
throw new PrinterException(0, "Итоговое значение для оплаты не задано");
}
for(ItemRecord r: record.itemList) {
if (StringUtils.isEmpty(r.name)) {
throw new PrinterException(0, "Не задано наименование позиции");
}
if (r.price == null) {
throw new PrinterException(0, "Не задана цена позиции: " + r.name);
}
if (r.quantity == null) {
throw new PrinterException(0, "Не задано количество позиции: " + r.name);
}
if (r.discountPercent != null && r.discountSum != null) {
throw new PrinterException(0, "Нужно задать только один тип скидки - либо в процентах, либо в сумме. Позиция: " + r.name);
}
}
if (Boolean.TRUE.equals(record.onlyElectronically)) {
if (StringUtils.isEmpty(record.phone) && StringUtils.isEmpty(record.email)) {
throw new PrinterException(0, "Для электронных чеков обязателно задание телефона или email покупателя");
}
}
}
private int getVatNumber(VAT vatValue) throws PrinterException {
if (vatValue == null) {
return IFptr.LIBFPTR_TAX_NO;
}
if (vatValue.equals(VAT.NO)) {
return IFptr.LIBFPTR_TAX_NO;
}
if (vatValue.equals(VAT.VAT0)) {
return IFptr.LIBFPTR_TAX_VAT0;
}
if (vatValue.equals(VAT.VAT10)) {
return IFptr.LIBFPTR_TAX_VAT10;
}
if (vatValue.equals(VAT.VAT18)) {
return IFptr.LIBFPTR_TAX_VAT18;
}
if (vatValue.equals(VAT.VAT110)) {
return IFptr.LIBFPTR_TAX_VAT110;
}
if (vatValue.equals(VAT.VAT118)) {
return IFptr.LIBFPTR_TAX_VAT118;
}
throw new PrinterException(0, "Неизвестный тип налога: " + vatValue);
}
private void registrationFZ54(String name, double price, double quantity, VAT itemVat, ItemType type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_COMMODITY_NAME, name);
fptr.setParam(IFptr.LIBFPTR_PARAM_PRICE, price);
fptr.setParam(IFptr.LIBFPTR_PARAM_QUANTITY, quantity);
VAT vatValue = this.vat;
if (itemVat != null) {
vatValue = itemVat;
}
int vatNumber = getVatNumber(vatValue);
fptr.setParam(IFptr.LIBFPTR_PARAM_TAX_TYPE, vatNumber);
if (ItemType.SERVICE.equals(type)) {
fptr.setParam(1212, 4);
}
if (fptr.registration() < 0) {
checkError(fptr);
}
}
private void payment(double sum, int type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_PAYMENT_TYPE, type);
fptr.setParam(IFptr.LIBFPTR_PARAM_PAYMENT_SUM, sum);
if (fptr.payment() < 0) {
checkError(fptr);
}
}
private void openCheck(PrintCheckCommandRecord record, int type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_RECEIPT_TYPE, type);
if (!StringUtils.isEmpty(record.phone)) {
if (record.phone.length() == 10 && record.phone.startsWith("9")) {
record.phone = "+7" + record.phone;
}
fptr.setParam(1008, record.phone);
}
else if (!StringUtils.isEmpty(record.email)) {
fptr.setParam(1008, record.email);
}
if (Boolean.TRUE.equals(record.onlyElectronically)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_RECEIPT_ELECTRONICALLY, true);
}
if (!StringUtils.isEmpty(record.emailFrom)) {
fptr.setParam(1117, record.emailFrom);
}
if (!StringUtils.isEmpty(record.billingLocation)) {
fptr.setParam(1187, record.billingLocation);
}
if (fptr.openReceipt() < 1) {
checkError(fptr);
}
}
protected void checkError(IFptr fptr) throws PrinterException {
checkError(fptr, true);
}
private void checkError(IFptr fptr, boolean log) throws PrinterException {
int rc = fptr.errorCode();
if (rc > 0) {
if (log) {
logger.error(fptr.errorDescription());
}
throw new PrinterException(rc, fptr.errorDescription());
}
}
private boolean waitDocumentClosed() {
int count = 0;
while (fptr.checkDocumentClosed() < 0) {
// Не удалось проверить состояние документа.
// Вывести пользователю текст ошибки,
// попросить устранить неполадку и повторить запрос
String errorDescription = fptr.errorDescription();
logger.error(errorDescription);
if ("Не поддерживается в данной версии".equalsIgnoreCase(errorDescription)) {
break;
}
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
break;
}
count++;
if (count > 20) {
break;
}
}
if (!fptr.getParamBool(IFptr.LIBFPTR_PARAM_DOCUMENT_CLOSED)) {
return false;
}
return true;
}
private void continuePrint() {
int count = 0;
if (!fptr.getParamBool(IFptr.LIBFPTR_PARAM_DOCUMENT_PRINTED)) {
while (fptr.continuePrint() < 0) {
String errorDescription = fptr.errorDescription();
logger.error(errorDescription);
if ("Не поддерживается в данной версии".equalsIgnoreCase(errorDescription)) {
break;
}
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
break;
}
count++;
if (count > 20) {
break;
}
}
}
}
private void loginOperator(AbstractCommandRecord record) {
if (StringUtils.isEmpty(record.userFIO)) {
return;
}
String fio = record.userFIO;
if (!StringUtils.isEmpty(record.userPosition)) {
fio = record.userPosition + " " + fio;
}
fptr.setParam(1021, fio);
if (!StringUtils.isEmpty(record.userINN)) {
fptr.setParam(1203, record.userINN);
}
fptr.operatorLogin();
}
private void cancelCheck() throws PrinterException {
// Отменяем чек, если уже открыт. Ошибки "Неверный режим" и "Чек уже закрыт"
// не являются ошибками, если мы хотим просто отменить чек
try {
if (fptr.cancelReceipt() < 0)
checkError(fptr, false);
} catch (PrinterException e) {
int rc = e.getCode();
if (rc != IFptr.LIBFPTR_ERROR_DENIED_IN_CLOSED_RECEIPT) {
throw e;
}
}
}
protected void printText(String text) throws PrinterException {
printText(text, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
}
protected void printText(String text, int alignment, int wrap) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT, text);
fptr.setParam(IFptr.LIBFPTR_PARAM_ALIGNMENT, alignment);
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT_WRAP, wrap);
fptr.printText();
}
protected void printBoldText(String text, int alignment, int wrap) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT, text);
fptr.setParam(IFptr.LIBFPTR_PARAM_ALIGNMENT, alignment);
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT_WRAP, wrap);
fptr.setParam(IFptr.LIBFPTR_PARAM_FONT_DOUBLE_WIDTH, true);
fptr.setParam(IFptr.LIBFPTR_PARAM_FONT_DOUBLE_HEIGHT, true);
fptr.printText();
}
}
|
src/main/java/org/bitbucket/ytimes/client/kkm/printer/AtolPrinter.java
|
package org.bitbucket.ytimes.client.kkm.printer;
import org.bitbucket.ytimes.client.utils.Utils;
import org.bitbucket.ytimes.client.kkm.record.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import ru.atol.drivers10.fptr.Fptr;
import ru.atol.drivers10.fptr.IFptr;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Created by root on 27.05.17.
*/
public class AtolPrinter implements Printer {
protected Logger logger = LoggerFactory.getLogger(getClass());
protected IFptr fptr;
private String port;
private String wifiIP;
private Integer wifiPort;
private int model;
private Map<String, Integer> modelList = new HashMap<String, Integer>();
private VAT vat = VAT.NO;
private OFDChannel ofdChannel = null;
public AtolPrinter(String model, String port, String wifiIP, Integer wifiPort) throws PrinterException {
this.port = port;
this.wifiIP = wifiIP;
this.wifiPort = wifiPort;
modelList.put("ATOLAUTO", 500);
modelList.put("ATOLENVD", 500);
modelList.put("ATOL11F", 67);
modelList.put("ATOL15F", 78);
modelList.put("ATOL20F", 81);
modelList.put("ATOL22F", 63);
modelList.put("ATOL25F", 57);
modelList.put("ATOL30F", 61);
modelList.put("ATOL50F", 80);
modelList.put("ATOL55F", 62);
modelList.put("ATOL90F", 72);
modelList.put("ATOL91F", 82);
if (!modelList.containsKey(model)) {
throw new PrinterException(0, "Модель не поддерживается в данной версии коммуникационного модуля");
}
this.model = modelList.get(model);
}
public void setVat(VAT vat) {
this.vat = vat;
}
public void setOfdChannel(OFDChannel ofdChannel) {
this.ofdChannel = ofdChannel;
}
synchronized public boolean isConnected() throws PrinterException {
return fptr.isOpened();
}
synchronized public ModelInfoRecord getInfo() throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_DATA_TYPE, IFptr.LIBFPTR_DT_STATUS);
if (fptr.queryData() < 0) {
checkError(fptr);
}
ModelInfoRecord record = new ModelInfoRecord();
record.serialNumber = fptr.getParamString(IFptr.LIBFPTR_PARAM_SERIAL_NUMBER);
record.modelName = fptr.getParamString(IFptr.LIBFPTR_PARAM_MODEL_NAME);
record.unitVersion = fptr.getParamString(IFptr.LIBFPTR_PARAM_UNIT_VERSION);
//ОФД
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_REG_INFO);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
record.ofdName = fptr.getParamString(1046);
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_OFD_EXCHANGE_STATUS);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
record.ofdUnsentCount = fptr.getParamInt(IFptr.LIBFPTR_PARAM_DOCUMENTS_COUNT);
Date unsentDateTime = fptr.getParamDateTime(IFptr.LIBFPTR_PARAM_DATE_TIME);
if (unsentDateTime != null) {
record.ofdUnsentDatetime = Utils.toDateString(unsentDateTime);
}
//ФФД
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_FFD_VERSIONS);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
long deviceFfdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_DEVICE_FFD_VERSION);
record.deviceFfdVersion = getFFDVersion(deviceFfdVersion);
long fnFfdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_FN_FFD_VERSION);
record.fnFfdVersion = getFFDVersion(fnFfdVersion);
long ffdVersion = fptr.getParamInt(IFptr.LIBFPTR_PARAM_FFD_VERSION);
record.ffdVersion = getFFDVersion(ffdVersion);
//ФН
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_FN_INFO);
fptr.fnQueryData();
record.fnSerial = fptr.getParamString(IFptr.LIBFPTR_PARAM_SERIAL_NUMBER);
record.fnVersion = fptr.getParamString(IFptr.LIBFPTR_PARAM_FN_VERSION);
//ФН Дата окончания
fptr.setParam(IFptr.LIBFPTR_PARAM_FN_DATA_TYPE, IFptr.LIBFPTR_FNDT_VALIDITY);
if (fptr.fnQueryData() < 0) {
checkError(fptr);
}
Date dateTime = fptr.getParamDateTime(IFptr.LIBFPTR_PARAM_DATE_TIME);
if (dateTime != null) {
record.fnDate = Utils.toDateString(dateTime);
}
return record;
}
private String getFFDVersion(long version) {
if (version == IFptr.LIBFPTR_FFD_1_0) {
return "1.0";
}
if (version == IFptr.LIBFPTR_FFD_1_0_5) {
return "1.05";
}
if (version == IFptr.LIBFPTR_FFD_1_1) {
return "1.1";
}
return "неизвестная";
}
synchronized public void applySettingsAndConnect() throws PrinterException {
if (fptr != null) {
try {
disconnect();
}
catch (Throwable e) {
logger.error(e.getMessage(), e);
}
}
logger.info("START ATOL PRINTER");
logger.info("PORT: " + port);
fptr = new Fptr();
fptr.setSingleSetting(fptr.LIBFPTR_SETTING_LIBRARY_PATH, System.getProperty("java.library.path"));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_MODEL, String.valueOf(model));
if (port.equals("TCPIP")) {
logger.info("Connect to: " + wifiIP + ":" + wifiPort);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_TCPIP));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_IPADDRESS, wifiIP);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_IPPORT, String.valueOf(wifiPort));
}
else if (port.equals("USBAUTO")) {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_USB));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_USB_DEVICE_PATH, "auto");
}
else if (port.startsWith("COM")) {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_COM));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_COM_FILE, port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_BAUDRATE, String.valueOf(IFptr.LIBFPTR_PORT_BR_115200));
}
else {
logger.info("Connect to port: " + port);
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_PORT, String.valueOf(IFptr.LIBFPTR_PORT_USB));
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_USB_DEVICE_PATH, port);
}
if (ofdChannel != null) {
if (ofdChannel.equals(OFDChannel.PROTO)) {
logger.info("ОФД средвами транспортного протокола (OFD PROTO 1)");
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_OFD_CHANNEL, String.valueOf(IFptr.LIBFPTR_OFD_CHANNEL_PROTO));
}
else if (ofdChannel.equals(OFDChannel.ASIS)) {
logger.info("ОФД используя настройки ККМ (OFD NONE 2)");
fptr.setSingleSetting(IFptr.LIBFPTR_SETTING_OFD_CHANNEL, String.valueOf(IFptr.LIBFPTR_OFD_CHANNEL_NONE));
}
else {
throw new PrinterException(0, "Не поддерживаемое значение параметра связи с ОФД");
}
}
if (fptr.applySingleSettings() < 0) {
checkError(fptr);
}
logger.info("ATOL PRINTER STARTED");
connect();
if (!ofdChannel.equals(OFDChannel.ASIS)) {
if (ofdChannel.equals(OFDChannel.USB)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 1);
fptr.writeDeviceSetting();
logger.info("ОФД через USB (установить EoU модуль)");
}
if (ofdChannel.equals(OFDChannel.ETHERNET)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 2);
fptr.writeDeviceSetting();
logger.info("ОФД через Ethernet");
}
else if (ofdChannel.equals(OFDChannel.WIFI)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 3);
fptr.writeDeviceSetting();
logger.info("ОФД через WiFi");
}
else if (ofdChannel.equals(OFDChannel.GSM)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 4);
fptr.writeDeviceSetting();
logger.info("ОФД через GSM");
}
else if (ofdChannel.equals(OFDChannel.TRANSPORT)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 5);
fptr.writeDeviceSetting();
logger.info("ОФД через транспортный протокол");
}
else if (ofdChannel.equals(OFDChannel.PROTO)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_ID, 276);
fptr.setParam(IFptr.LIBFPTR_PARAM_SETTING_VALUE, 5);
fptr.writeDeviceSetting();
//publishProgress("ОФД через транспортный протокол");
}
else {
throw new PrinterException(0, "Не поддерживаемое значение параметра связи с ОФД: " + ofdChannel.name());
}
}
}
public void connect() throws PrinterException {
if (fptr.open() < 0) {
checkError(fptr);
}
cancelCheck();
logger.info("ATOL PRINTER CONNECTED");
}
public void destroy() throws Throwable {
disconnect();
}
protected void disconnect() throws PrinterException {
if (fptr.close() < 0) {
checkError(fptr);
}
}
@Override
protected void finalize() throws Throwable {
logger.info("ATOL PRINTER DESTROY");
fptr.destroy();
}
synchronized public void reportX(ReportCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_X);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void reportZ(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_CLOSE_SHIFT);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void startShift(ReportCommandRecord record) throws PrinterException {
loginOperator(record);
if (fptr.openShift() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void cashIncome(CashChangeRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, record.sum);
if (fptr.cashIncome() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void cashOutcome(CashChangeRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, record.sum);
if (fptr.cashOutcome() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void copyLastDoc(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_LAST_DOCUMENT);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void demoReport(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_KKT_DEMO);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
synchronized public void ofdTestReport(AbstractCommandRecord record) throws PrinterException {
loginOperator(record);
fptr.setParam(IFptr.LIBFPTR_PARAM_REPORT_TYPE, IFptr.LIBFPTR_RT_OFD_TEST);
if (fptr.report() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
checkError(fptr);
}
}
//выставление счета
synchronized public void printPredCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintPredCheck(record);
}
private void doPrintPredCheck(PrintCheckCommandRecord record) throws PrinterException {
checkRecord(record);
if (fptr.beginNonfiscalDocument() < 0) {
checkError(fptr);
}
printText("СЧЕТ (ПРЕДЧЕК)");
printText("");
printText("ПОЗИЦИИ ОПЛАТЫ", IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
for(int i = 0; i < record.itemList.size(); i++) {
ItemRecord r = record.itemList.get(i);
printText((i + 1) + ". " + r.name, IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_WORDS);
double total = r.price * r.quantity;
printText(r.price + " x " + r.quantity + " = " + total, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
if (r.discountSum != null && r.discountSum > 0) {
printText("Скидка: " + r.discountSum, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
}
if (r.discountPercent != null && r.discountPercent > 0) {
printText("Скидка: " + r.discountPercent + "%", IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
}
}
printBoldText("ИТОГО: " + record.moneySum, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
if (GuestType.TIME.equals(record.type) && record.guestInfoList != null) {
printText("");
printText("РАССЧИТЫВАЕМЫЕ ГОСТИ", IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_CHARS);
int i = 1;
for(GuestRecord r: record.guestInfoList) {
String name = r.name;
if (!StringUtils.isEmpty(r.card)) {
name += " (" + r.card + ")";
};
printText(i + ". " + name, IFptr.LIBFPTR_ALIGNMENT_LEFT, IFptr.LIBFPTR_TW_CHARS);
printText("время прихода: " + r.startTime, IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
printText("проведенное время: " + r.minutes + " мин.", IFptr.LIBFPTR_ALIGNMENT_RIGHT, IFptr.LIBFPTR_TW_CHARS);
i++;
}
printText("");
printText("");
}
if (GuestType.TOGO.equals(record.type) && record.guestInfoList != null) {
printText("");
for(GuestRecord r: record.guestInfoList) {
String name = r.name;
if (!StringUtils.isEmpty(r.phone)) {
name += ", " + r.phone;
}
printText(name, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_CHARS);
printText(r.message);
}
printText("");
printText("");
}
if (record.additionalInfo != null) {
printText("");
for(String s: record.additionalInfo) {
printText(s, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
}
printText("");
}
if (fptr.endNonfiscalDocument() < 0) {
checkError(fptr);
}
}
synchronized public void printCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintCheck(record, IFptr.LIBFPTR_RT_SELL);
}
synchronized public void printReturnCheck(PrintCheckCommandRecord record) throws PrinterException {
doPrintCheck(record, IFptr.LIBFPTR_RT_SELL_RETURN);
}
private void doPrintCheck(PrintCheckCommandRecord record, int checkType) throws PrinterException {
checkRecord(record);
cancelCheck();
loginOperator(record);
// Открываем чек продажи, попутно обработав превышение смены
try {
openCheck(record, checkType);
} catch (PrinterException e) {
// Проверка на превышение смены
if (e.getCode() == IFptr.LIBFPTR_ERROR_SHIFT_EXPIRED) {
reportZ(record);
openCheck(record, checkType);
} else {
throw e;
}
}
try {
BigDecimal totalPrice = new BigDecimal(0.0);
for (ItemRecord r : record.itemList) {
BigDecimal price = new BigDecimal(r.price);
BigDecimal discountPosition = new BigDecimal(0.0);
if (r.discountSum != null) {
discountPosition = new BigDecimal(r.discountSum);
} else if (r.discountPercent != null) {
if (r.discountPercent > 100) {
r.discountPercent = 100.0;
}
BigDecimal value = new BigDecimal(r.price).multiply(new BigDecimal(r.quantity));
discountPosition = value.multiply(new BigDecimal(r.discountPercent)).divide(new BigDecimal(100.0));
}
BigDecimal priceWithDiscount = price.subtract(discountPosition);
logger.info("Name: " + r.name + ", price=" + price + ", discount = " + discountPosition + ", priceWithDiscount = " + priceWithDiscount);
registrationFZ54(r.name, priceWithDiscount.doubleValue(), r.quantity, r.vatValue, r.type);
totalPrice = totalPrice.add(priceWithDiscount.multiply(new BigDecimal(r.quantity)));
}
if (record.creditSum != null && record.creditSum > 0) {
payment(record.creditSum, IFptr.LIBFPTR_PT_ELECTRONICALLY);
}
if (record.moneySum != null && record.moneySum > 0) {
payment(record.moneySum, IFptr.LIBFPTR_PT_CASH);
}
logger.info("Total price = " + totalPrice);
if (Boolean.TRUE.equals(record.dropPenny)) {
double totalWithoutPenny = totalPrice.setScale(0, BigDecimal.ROUND_HALF_DOWN).doubleValue();
fptr.setParam(IFptr.LIBFPTR_PARAM_SUM, totalWithoutPenny);
if (fptr.receiptTotal() < 0) {
checkError(fptr);
}
}
// Закрываем чек
if (Boolean.TRUE.equals(record.testMode)) {
cancelCheck();
}
else {
if (fptr.closeReceipt() < 0) {
checkError(fptr);
}
if (!waitDocumentClosed()) {
cancelCheck();
}
continuePrint();
}
}
catch (PrinterException e) {
cancelCheck();
throw e;
}
}
protected void checkRecord(PrintCheckCommandRecord record) throws PrinterException {
if (record.itemList == null || record.itemList.isEmpty()) {
throw new PrinterException(0, "Список оплаты пустой");
}
if (record.moneySum == null && record.creditSum == null) {
throw new PrinterException(0, "Итоговое значение для оплаты не задано");
}
for(ItemRecord r: record.itemList) {
if (StringUtils.isEmpty(r.name)) {
throw new PrinterException(0, "Не задано наименование позиции");
}
if (r.price == null) {
throw new PrinterException(0, "Не задана цена позиции: " + r.name);
}
if (r.quantity == null) {
throw new PrinterException(0, "Не задано количество позиции: " + r.name);
}
if (r.discountPercent != null && r.discountSum != null) {
throw new PrinterException(0, "Нужно задать только один тип скидки - либо в процентах, либо в сумме. Позиция: " + r.name);
}
}
if (Boolean.TRUE.equals(record.onlyElectronically)) {
if (StringUtils.isEmpty(record.phone) && StringUtils.isEmpty(record.email)) {
throw new PrinterException(0, "Для электронных чеков обязателно задание телефона или email покупателя");
}
}
}
private int getVatNumber(VAT vatValue) throws PrinterException {
if (vatValue == null) {
return IFptr.LIBFPTR_TAX_NO;
}
if (vatValue.equals(VAT.NO)) {
return IFptr.LIBFPTR_TAX_NO;
}
if (vatValue.equals(VAT.VAT0)) {
return IFptr.LIBFPTR_TAX_VAT0;
}
if (vatValue.equals(VAT.VAT10)) {
return IFptr.LIBFPTR_TAX_VAT10;
}
if (vatValue.equals(VAT.VAT18)) {
return IFptr.LIBFPTR_TAX_VAT18;
}
if (vatValue.equals(VAT.VAT110)) {
return IFptr.LIBFPTR_TAX_VAT110;
}
if (vatValue.equals(VAT.VAT118)) {
return IFptr.LIBFPTR_TAX_VAT118;
}
throw new PrinterException(0, "Неизвестный тип налога: " + vatValue);
}
private void registrationFZ54(String name, double price, double quantity, VAT itemVat, ItemType type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_COMMODITY_NAME, name);
fptr.setParam(IFptr.LIBFPTR_PARAM_PRICE, price);
fptr.setParam(IFptr.LIBFPTR_PARAM_QUANTITY, quantity);
VAT vatValue = this.vat;
if (itemVat != null) {
vatValue = itemVat;
}
int vatNumber = getVatNumber(vatValue);
fptr.setParam(IFptr.LIBFPTR_PARAM_TAX_TYPE, vatNumber);
if (ItemType.SERVICE.equals(type)) {
fptr.setParam(1212, 4);
}
if (fptr.registration() < 0) {
checkError(fptr);
}
}
private void payment(double sum, int type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_PAYMENT_TYPE, type);
fptr.setParam(IFptr.LIBFPTR_PARAM_PAYMENT_SUM, sum);
fptr.payment();
}
private void openCheck(PrintCheckCommandRecord record, int type) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_RECEIPT_TYPE, type);
if (!StringUtils.isEmpty(record.phone)) {
if (record.phone.length() == 10 && record.phone.startsWith("9")) {
record.phone = "+7" + record.phone;
}
fptr.setParam(1008, record.phone);
}
else if (!StringUtils.isEmpty(record.email)) {
fptr.setParam(1008, record.email);
}
if (Boolean.TRUE.equals(record.onlyElectronically)) {
fptr.setParam(IFptr.LIBFPTR_PARAM_RECEIPT_ELECTRONICALLY, true);
}
if (!StringUtils.isEmpty(record.emailFrom)) {
fptr.setParam(1117, record.emailFrom);
}
if (!StringUtils.isEmpty(record.billingLocation)) {
fptr.setParam(1187, record.billingLocation);
}
if (fptr.openReceipt() < 1) {
checkError(fptr);
}
}
protected void checkError(IFptr fptr) throws PrinterException {
checkError(fptr, true);
}
private void checkError(IFptr fptr, boolean log) throws PrinterException {
int rc = fptr.errorCode();
if (rc > 0) {
if (log) {
logger.error(fptr.errorDescription());
}
throw new PrinterException(rc, fptr.errorDescription());
}
}
private boolean waitDocumentClosed() {
int count = 0;
while (fptr.checkDocumentClosed() < 0) {
// Не удалось проверить состояние документа.
// Вывести пользователю текст ошибки,
// попросить устранить неполадку и повторить запрос
String errorDescription = fptr.errorDescription();
logger.error(errorDescription);
if ("Не поддерживается в данной версии".equalsIgnoreCase(errorDescription)) {
break;
}
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
break;
}
count++;
if (count > 20) {
break;
}
}
if (!fptr.getParamBool(IFptr.LIBFPTR_PARAM_DOCUMENT_CLOSED)) {
return false;
}
return true;
}
private void continuePrint() {
int count = 0;
if (!fptr.getParamBool(IFptr.LIBFPTR_PARAM_DOCUMENT_PRINTED)) {
while (fptr.continuePrint() < 0) {
String errorDescription = fptr.errorDescription();
logger.error(errorDescription);
if ("Не поддерживается в данной версии".equalsIgnoreCase(errorDescription)) {
break;
}
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
break;
}
count++;
if (count > 20) {
break;
}
}
}
}
private void loginOperator(AbstractCommandRecord record) {
if (StringUtils.isEmpty(record.userFIO)) {
return;
}
String fio = record.userFIO;
if (!StringUtils.isEmpty(record.userPosition)) {
fio = record.userPosition + " " + fio;
}
fptr.setParam(1021, fio);
if (!StringUtils.isEmpty(record.userINN)) {
fptr.setParam(1203, record.userINN);
}
fptr.operatorLogin();
}
private void cancelCheck() throws PrinterException {
// Отменяем чек, если уже открыт. Ошибки "Неверный режим" и "Чек уже закрыт"
// не являются ошибками, если мы хотим просто отменить чек
try {
if (fptr.cancelReceipt() < 0)
checkError(fptr, false);
} catch (PrinterException e) {
int rc = e.getCode();
if (rc != IFptr.LIBFPTR_ERROR_DENIED_IN_CLOSED_RECEIPT) {
throw e;
}
}
}
protected void printText(String text) throws PrinterException {
printText(text, IFptr.LIBFPTR_ALIGNMENT_CENTER, IFptr.LIBFPTR_TW_WORDS);
}
protected void printText(String text, int alignment, int wrap) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT, text);
fptr.setParam(IFptr.LIBFPTR_PARAM_ALIGNMENT, alignment);
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT_WRAP, wrap);
fptr.printText();
}
protected void printBoldText(String text, int alignment, int wrap) throws PrinterException {
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT, text);
fptr.setParam(IFptr.LIBFPTR_PARAM_ALIGNMENT, alignment);
fptr.setParam(IFptr.LIBFPTR_PARAM_TEXT_WRAP, wrap);
fptr.setParam(IFptr.LIBFPTR_PARAM_FONT_DOUBLE_WIDTH, true);
fptr.setParam(IFptr.LIBFPTR_PARAM_FONT_DOUBLE_HEIGHT, true);
fptr.printText();
}
}
|
add additional error processing
|
src/main/java/org/bitbucket/ytimes/client/kkm/printer/AtolPrinter.java
|
add additional error processing
|
|
Java
|
epl-1.0
|
7bdf0a81f0a48221b4a3eb2ebdfe5b286381882b
| 0
|
tmen13/PA_TP
|
package logica;
public abstract class Carta {
int posX, posY;
boolean visivel = false;
Nave nave;
public boolean isVisivel() {
return visivel;
}
public void setVisivel(boolean visivel) {
this.visivel = visivel;
}
public int getPosX() {
return posX;
}
public void setPosX(int posX) {
this.posX = posX;
}
public int getPosY() {
return posY;
}
public void setPosY(int posY) {
this.posY = posY;
}
public Nave getNave() {
return nave;
}
public void setNave(Nave nave) {
this.nave = nave;
}
}
|
TP_PA/src/logica/Carta.java
|
package logica;
public abstract class Carta {
int posX, posY;
Nave nave;
public int getPosX() {
return posX;
}
public void setPosX(int posX) {
this.posX = posX;
}
public int getPosY() {
return posY;
}
public void setPosY(int posY) {
this.posY = posY;
}
public Nave getNave() {
return nave;
}
public void setNave(Nave nave) {
this.nave = nave;
}
}
|
visivel
|
TP_PA/src/logica/Carta.java
|
visivel
|
|
Java
|
mpl-2.0
|
117d96c35eb2470a615bf37380965f4edb32b908
| 0
|
richardwilkes/gcs,smithkm/gcs,richardwilkes/gcs
|
/*
* Copyright (c) 1998-2014 by Richard A. Wilkes. All rights reserved.
*
* This Source Code Form is subject to the terms of the Mozilla Public License,
* version 2.0. If a copy of the MPL was not distributed with this file, You
* can obtain one at http://mozilla.org/MPL/2.0/.
*
* This Source Code Form is "Incompatible With Secondary Licenses", as defined
* by the Mozilla Public License, version 2.0.
*/
package com.trollworks.gcs.character;
import com.trollworks.gcs.app.CommonDockable;
import com.trollworks.gcs.widgets.outline.ListOutline;
import com.trollworks.gcs.widgets.outline.ListRow;
import com.trollworks.gcs.widgets.outline.RowItemRenderer;
import com.trollworks.gcs.widgets.search.Search;
import com.trollworks.gcs.widgets.search.SearchTarget;
import com.trollworks.toolkit.annotation.Localize;
import com.trollworks.toolkit.ui.menu.file.ExportToCommand;
import com.trollworks.toolkit.ui.menu.file.PrintProxy;
import com.trollworks.toolkit.ui.widget.Toolbar;
import com.trollworks.toolkit.ui.widget.WindowUtils;
import com.trollworks.toolkit.ui.widget.outline.Outline;
import com.trollworks.toolkit.ui.widget.outline.OutlineModel;
import com.trollworks.toolkit.ui.widget.outline.Row;
import com.trollworks.toolkit.ui.widget.outline.RowIterator;
import com.trollworks.toolkit.utility.Localization;
import com.trollworks.toolkit.utility.PathUtils;
import com.trollworks.toolkit.utility.undo.StdUndoManager;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.KeyboardFocusManager;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.swing.JScrollPane;
import javax.swing.ListCellRenderer;
/** A list of advantages and disadvantages from a library. */
public class SheetDockable extends CommonDockable implements SearchTarget {
@Localize("Untitled Sheet")
private static String UNTITLED;
@Localize("An error occurred while trying to save the sheet as a PNG.")
private static String SAVE_AS_PNG_ERROR;
@Localize("An error occurred while trying to save the sheet as a PDF.")
private static String SAVE_AS_PDF_ERROR;
@Localize("An error occurred while trying to save the sheet as HTML.")
private static String SAVE_AS_HTML_ERROR;
static {
Localization.initialize();
}
private CharacterSheet mSheet;
private Toolbar mToolbar;
private Search mSearch;
private PrerequisitesThread mPrereqThread;
/** Creates a new {@link SheetDockable}. */
public SheetDockable(GURPSCharacter character) {
super(character);
GURPSCharacter dataFile = getDataFile();
mToolbar = new Toolbar();
mSearch = new Search(this);
mToolbar.add(mSearch, Toolbar.LAYOUT_FILL);
add(mToolbar, BorderLayout.NORTH);
mSheet = new CharacterSheet(dataFile);
JScrollPane scroller = new JScrollPane(mSheet);
scroller.setBorder(null);
scroller.getViewport().setBackground(Color.LIGHT_GRAY);
scroller.getViewport().addChangeListener(mSheet);
add(scroller, BorderLayout.CENTER);
mSheet.rebuild();
mPrereqThread = new PrerequisitesThread(mSheet);
mPrereqThread.start();
PrerequisitesThread.waitForProcessingToFinish(dataFile);
dataFile.setModified(false);
StdUndoManager undoManager = getUndoManager();
undoManager.discardAllEdits();
dataFile.setUndoManager(undoManager);
}
@Override
public GURPSCharacter getDataFile() {
return (GURPSCharacter) super.getDataFile();
}
/** @return The {@link CharacterSheet}. */
public CharacterSheet getSheet() {
return mSheet;
}
@Override
protected String getUntitledBaseName() {
return UNTITLED;
}
@Override
public PrintProxy getPrintProxy() {
return mSheet;
}
@Override
public String getDescriptor() {
// RAW: Implement
return null;
}
@Override
public String[] getAllowedExtensions() {
return new String[] { GURPSCharacter.EXTENSION, ExportToCommand.PDF_EXTENSION, ExportToCommand.HTML_EXTENSION, ExportToCommand.PNG_EXTENSION };
}
@Override
public String getPreferredSavePath() {
String name = getDataFile().getDescription().getName();
if (name.length() == 0) {
name = getTitle();
}
return PathUtils.getFullPath(PathUtils.getParent(PathUtils.getFullPath(getBackingFile())), name);
}
@Override
public File[] saveTo(File file) {
ArrayList<File> result = new ArrayList<>();
String extension = PathUtils.getExtension(file.getName());
if (ExportToCommand.HTML_EXTENSION.equals(extension)) {
if (mSheet.saveAsHTML(file, null, null)) {
result.add(file);
} else {
WindowUtils.showError(this, SAVE_AS_HTML_ERROR);
}
} else if (ExportToCommand.PNG_EXTENSION.equals(extension)) {
if (!mSheet.saveAsPNG(file, result)) {
WindowUtils.showError(this, SAVE_AS_PNG_ERROR);
}
} else if (ExportToCommand.PDF_EXTENSION.equals(extension)) {
if (mSheet.saveAsPDF(file)) {
result.add(file);
} else {
WindowUtils.showError(this, SAVE_AS_PDF_ERROR);
}
} else {
return super.saveTo(file);
}
return result.toArray(new File[result.size()]);
}
@Override
public boolean isJumpToSearchAvailable() {
return mSearch.isEnabled() && mSearch != KeyboardFocusManager.getCurrentKeyboardFocusManager().getPermanentFocusOwner();
}
@Override
public void jumpToSearchField() {
mSearch.requestFocusInWindow();
}
@Override
public ListCellRenderer<Object> getSearchRenderer() {
return new RowItemRenderer();
}
@Override
public List<Object> search(String filter) {
ArrayList<Object> list = new ArrayList<>();
filter = filter.toLowerCase();
searchOne(mSheet.getAdvantageOutline(), filter, list);
searchOne(mSheet.getSkillOutline(), filter, list);
searchOne(mSheet.getSpellOutline(), filter, list);
searchOne(mSheet.getEquipmentOutline(), filter, list);
return list;
}
private static void searchOne(ListOutline outline, String text, ArrayList<Object> list) {
for (ListRow row : new RowIterator<ListRow>(outline.getModel())) {
if (row.contains(text, true)) {
list.add(row);
}
}
}
@Override
public void searchSelect(List<Object> selection) {
HashMap<OutlineModel, ArrayList<Row>> map = new HashMap<>();
Outline primary = null;
ArrayList<Row> list;
mSheet.getAdvantageOutline().getModel().deselect();
mSheet.getSkillOutline().getModel().deselect();
mSheet.getSpellOutline().getModel().deselect();
mSheet.getEquipmentOutline().getModel().deselect();
for (Object obj : selection) {
Row row = (Row) obj;
Row parent = row.getParent();
OutlineModel model = row.getOwner();
while (parent != null) {
parent.setOpen(true);
model = parent.getOwner();
parent = parent.getParent();
}
list = map.get(model);
if (list == null) {
list = new ArrayList<>();
list.add(row);
map.put(model, list);
} else {
list.add(row);
}
if (primary == null) {
primary = mSheet.getAdvantageOutline();
if (model != primary.getModel()) {
primary = mSheet.getSkillOutline();
if (model != primary.getModel()) {
primary = mSheet.getSpellOutline();
if (model != primary.getModel()) {
primary = mSheet.getEquipmentOutline();
}
}
}
}
}
for (OutlineModel model : map.keySet()) {
model.select(map.get(model), false);
}
if (primary != null) {
final Outline outline = primary;
EventQueue.invokeLater(() -> outline.scrollSelectionIntoView());
primary.requestFocus();
}
}
}
|
src/com/trollworks/gcs/character/SheetDockable.java
|
/*
* Copyright (c) 1998-2014 by Richard A. Wilkes. All rights reserved.
*
* This Source Code Form is subject to the terms of the Mozilla Public License,
* version 2.0. If a copy of the MPL was not distributed with this file, You
* can obtain one at http://mozilla.org/MPL/2.0/.
*
* This Source Code Form is "Incompatible With Secondary Licenses", as defined
* by the Mozilla Public License, version 2.0.
*/
package com.trollworks.gcs.character;
import com.trollworks.gcs.app.CommonDockable;
import com.trollworks.gcs.widgets.outline.ListOutline;
import com.trollworks.gcs.widgets.outline.ListRow;
import com.trollworks.gcs.widgets.outline.RowItemRenderer;
import com.trollworks.gcs.widgets.search.Search;
import com.trollworks.gcs.widgets.search.SearchTarget;
import com.trollworks.toolkit.annotation.Localize;
import com.trollworks.toolkit.ui.menu.file.ExportToCommand;
import com.trollworks.toolkit.ui.menu.file.PrintProxy;
import com.trollworks.toolkit.ui.widget.Toolbar;
import com.trollworks.toolkit.ui.widget.WindowUtils;
import com.trollworks.toolkit.ui.widget.outline.Outline;
import com.trollworks.toolkit.ui.widget.outline.OutlineModel;
import com.trollworks.toolkit.ui.widget.outline.Row;
import com.trollworks.toolkit.ui.widget.outline.RowIterator;
import com.trollworks.toolkit.utility.Localization;
import com.trollworks.toolkit.utility.PathUtils;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.KeyboardFocusManager;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.swing.JScrollPane;
import javax.swing.ListCellRenderer;
/** A list of advantages and disadvantages from a library. */
public class SheetDockable extends CommonDockable implements SearchTarget {
@Localize("Untitled Sheet")
private static String UNTITLED;
@Localize("An error occurred while trying to save the sheet as a PNG.")
private static String SAVE_AS_PNG_ERROR;
@Localize("An error occurred while trying to save the sheet as a PDF.")
private static String SAVE_AS_PDF_ERROR;
@Localize("An error occurred while trying to save the sheet as HTML.")
private static String SAVE_AS_HTML_ERROR;
static {
Localization.initialize();
}
private CharacterSheet mSheet;
private Toolbar mToolbar;
private Search mSearch;
private PrerequisitesThread mPrereqThread;
/** Creates a new {@link SheetDockable}. */
public SheetDockable(GURPSCharacter character) {
super(character);
GURPSCharacter dataFile = getDataFile();
mToolbar = new Toolbar();
mSearch = new Search(this);
mToolbar.add(mSearch, Toolbar.LAYOUT_FILL);
add(mToolbar, BorderLayout.NORTH);
mSheet = new CharacterSheet(dataFile);
JScrollPane scroller = new JScrollPane(mSheet);
scroller.setBorder(null);
scroller.getViewport().setBackground(Color.LIGHT_GRAY);
scroller.getViewport().addChangeListener(mSheet);
add(scroller, BorderLayout.CENTER);
mSheet.rebuild();
mPrereqThread = new PrerequisitesThread(mSheet);
mPrereqThread.start();
PrerequisitesThread.waitForProcessingToFinish(dataFile);
getUndoManager().discardAllEdits();
dataFile.setModified(false);
}
@Override
public GURPSCharacter getDataFile() {
return (GURPSCharacter) super.getDataFile();
}
/** @return The {@link CharacterSheet}. */
public CharacterSheet getSheet() {
return mSheet;
}
@Override
protected String getUntitledBaseName() {
return UNTITLED;
}
@Override
public PrintProxy getPrintProxy() {
return mSheet;
}
@Override
public String getDescriptor() {
// RAW: Implement
return null;
}
@Override
public String[] getAllowedExtensions() {
return new String[] { GURPSCharacter.EXTENSION, ExportToCommand.PDF_EXTENSION, ExportToCommand.HTML_EXTENSION, ExportToCommand.PNG_EXTENSION };
}
@Override
public String getPreferredSavePath() {
String name = getDataFile().getDescription().getName();
if (name.length() == 0) {
name = getTitle();
}
return PathUtils.getFullPath(PathUtils.getParent(PathUtils.getFullPath(getBackingFile())), name);
}
@Override
public File[] saveTo(File file) {
ArrayList<File> result = new ArrayList<>();
String extension = PathUtils.getExtension(file.getName());
if (ExportToCommand.HTML_EXTENSION.equals(extension)) {
if (mSheet.saveAsHTML(file, null, null)) {
result.add(file);
} else {
WindowUtils.showError(this, SAVE_AS_HTML_ERROR);
}
} else if (ExportToCommand.PNG_EXTENSION.equals(extension)) {
if (!mSheet.saveAsPNG(file, result)) {
WindowUtils.showError(this, SAVE_AS_PNG_ERROR);
}
} else if (ExportToCommand.PDF_EXTENSION.equals(extension)) {
if (mSheet.saveAsPDF(file)) {
result.add(file);
} else {
WindowUtils.showError(this, SAVE_AS_PDF_ERROR);
}
} else {
return super.saveTo(file);
}
return result.toArray(new File[result.size()]);
}
@Override
public boolean isJumpToSearchAvailable() {
return mSearch.isEnabled() && mSearch != KeyboardFocusManager.getCurrentKeyboardFocusManager().getPermanentFocusOwner();
}
@Override
public void jumpToSearchField() {
mSearch.requestFocusInWindow();
}
@Override
public ListCellRenderer<Object> getSearchRenderer() {
return new RowItemRenderer();
}
@Override
public List<Object> search(String filter) {
ArrayList<Object> list = new ArrayList<>();
filter = filter.toLowerCase();
searchOne(mSheet.getAdvantageOutline(), filter, list);
searchOne(mSheet.getSkillOutline(), filter, list);
searchOne(mSheet.getSpellOutline(), filter, list);
searchOne(mSheet.getEquipmentOutline(), filter, list);
return list;
}
private static void searchOne(ListOutline outline, String text, ArrayList<Object> list) {
for (ListRow row : new RowIterator<ListRow>(outline.getModel())) {
if (row.contains(text, true)) {
list.add(row);
}
}
}
@Override
public void searchSelect(List<Object> selection) {
HashMap<OutlineModel, ArrayList<Row>> map = new HashMap<>();
Outline primary = null;
ArrayList<Row> list;
mSheet.getAdvantageOutline().getModel().deselect();
mSheet.getSkillOutline().getModel().deselect();
mSheet.getSpellOutline().getModel().deselect();
mSheet.getEquipmentOutline().getModel().deselect();
for (Object obj : selection) {
Row row = (Row) obj;
Row parent = row.getParent();
OutlineModel model = row.getOwner();
while (parent != null) {
parent.setOpen(true);
model = parent.getOwner();
parent = parent.getParent();
}
list = map.get(model);
if (list == null) {
list = new ArrayList<>();
list.add(row);
map.put(model, list);
} else {
list.add(row);
}
if (primary == null) {
primary = mSheet.getAdvantageOutline();
if (model != primary.getModel()) {
primary = mSheet.getSkillOutline();
if (model != primary.getModel()) {
primary = mSheet.getSpellOutline();
if (model != primary.getModel()) {
primary = mSheet.getEquipmentOutline();
}
}
}
}
}
for (OutlineModel model : map.keySet()) {
model.select(map.get(model), false);
}
if (primary != null) {
final Outline outline = primary;
EventQueue.invokeLater(() -> outline.scrollSelectionIntoView());
primary.requestFocus();
}
}
}
|
Make sure the UndoManager is setup properly.
|
src/com/trollworks/gcs/character/SheetDockable.java
|
Make sure the UndoManager is setup properly.
|
|
Java
|
agpl-3.0
|
850710ffba3b71ba11b2e3c560b316cb8fba3aeb
| 0
|
aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,tdefilip/opennms,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,aihua/opennms,aihua/opennms,rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,tdefilip/opennms,aihua/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2007 Jul 03: Check for something passing in a Class as an object (as I tend to do,
// which breaks tests under Maven 2, but not under Eclipse) and suggest
// that 'this' be used instead. - dj@opennms.org
// 2007 Apr 05: Add methods to get the opennms-daemon src/main/filtered/etc directory
// and to set an absolute home directory. - dj@opennms.org
// 2007 Apr 05: Add methods to get the current directory, top-level project directory,
// and daemon directory. Add methods to set some common system properties
// used in tests. - dj@opennms.org
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <license@opennms.org>
// http://www.opennms.org/
// http://www.opennms.com/
//
package org.opennms.test;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import junit.framework.Assert;
public class ConfigurationTestUtils extends Assert {
private static final String POM_FILE = "pom.xml";
private static final String DAEMON_DIRECTORY = "opennms-daemon";
public static Reader getReaderForResource(Object obj, String resource) {
return new InputStreamReader(getInputStreamForResource(obj, resource));
}
public static InputStream getInputStreamForResource(Object obj,
String resource) {
assertFalse("obj should not be an instance of java.lang.Class; you usually want to use 'this'", obj instanceof Class);
InputStream is = obj.getClass().getResourceAsStream(resource);
assertNotNull("could not get resource '" + resource + "' as an input stream", is);
return is;
}
public static Reader getReaderForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
String newConfig = getConfigForResourceWithReplacements(obj, resource,
replacements);
return new StringReader(newConfig);
}
public static InputStream getInputStreamForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
String newConfig = getConfigForResourceWithReplacements(obj, resource,
replacements);
return new ByteArrayInputStream(newConfig.getBytes());
}
public static String getConfigForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
Reader inputReader = getReaderForResource(obj, resource);
BufferedReader bufferedReader = new BufferedReader(inputReader);
StringBuffer buffer = new StringBuffer();
String line;
while ((line = bufferedReader.readLine()) != null) {
buffer.append(line);
buffer.append("\n");
}
String newConfig = buffer.toString();
for (String[] replacement : replacements) {
newConfig = newConfig.replaceAll(replacement[0], replacement[1]);
}
return newConfig;
}
public static Reader getReaderForConfigFile(String configFile) throws FileNotFoundException {
return new InputStreamReader(getInputStreamForConfigFile(configFile));
}
public static InputStream getInputStreamForConfigFile(String configFile) throws FileNotFoundException {
File file = new File(getDaemonEtcDirectory(), configFile);
assertTrue("configuration file '" + configFile + "' does not exist at " + file.getAbsolutePath(), file.exists());
InputStream is = new FileInputStream(file);
return is;
}
public static File getDaemonEtcDirectory() {
return new File(getDaemonProjectDirectory(), "src/main/filtered/etc");
}
public static void setRelativeHomeDirectory(String relativeHomeDirectory) {
setAbsoluteHomeDirectory(new File(getCurrentDirectory().getAbsolutePath(), relativeHomeDirectory).getAbsolutePath());
}
public static void setAbsoluteHomeDirectory(final String absoluteHomeDirectory) {
System.setProperty("opennms.home", absoluteHomeDirectory);
}
public static File getTopProjectDirectory() {
File currentDirectory = getCurrentDirectory();
File pomFile = new File(currentDirectory, POM_FILE);
assertTrue("pom.xml in current directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return findTopProjectDirectory(currentDirectory);
}
private static File getCurrentDirectory() {
File currentDirectory = new File(System.getProperty("user.dir"));
assertTrue("current directory should exist: " + currentDirectory.getAbsolutePath(), currentDirectory.exists());
assertTrue("current directory should be a directory: " + currentDirectory.getAbsolutePath(), currentDirectory.isDirectory());
return currentDirectory;
}
public static File getDaemonProjectDirectory() {
File topLevelDirectory = getTopProjectDirectory();
File daemonDirectory = new File(topLevelDirectory, DAEMON_DIRECTORY);
if (!daemonDirectory.exists()) {
throw new IllegalStateException("Could not find a " + DAEMON_DIRECTORY + " in the location top-level directory: " + topLevelDirectory);
}
File pomFile = new File(daemonDirectory, POM_FILE);
assertTrue("pom.xml in " + DAEMON_DIRECTORY + " directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return daemonDirectory;
}
private static File findTopProjectDirectory(File currentDirectory) {
File buildFile = new File(currentDirectory, "build.sh");
if (buildFile.exists()) {
File pomFile = new File(currentDirectory, POM_FILE);
assertTrue("pom.xml in " + DAEMON_DIRECTORY + " directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return currentDirectory;
} else {
File parentDirectory = currentDirectory.getParentFile();
if (parentDirectory == null || parentDirectory == currentDirectory) {
return null;
} else {
return findTopProjectDirectory(parentDirectory);
}
}
}
public static void setRrdBinary(String path) {
System.setProperty("rrd.binary", path);
}
public static void setRelativeRrdBaseDirectory(String relativePath) {
File rrdDir = new File(getCurrentDirectory(), relativePath);
if (!rrdDir.exists()) {
rrdDir.mkdirs();
}
System.setProperty("rrd.base.dir", rrdDir.getAbsolutePath());
}
}
|
opennms-test/src/main/java/org/opennms/test/ConfigurationTestUtils.java
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2007 Apr 05: Add methods to get the opennms-daemon src/main/filtered/etc directory
// and to set an absolute home directory. - dj@opennms.org
// 2007 Apr 05: Add methods to get the current directory, top-level project directory,
// and daemon directory. Add methods to set some common system properties
// used in tests. - dj@opennms.org
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <license@opennms.org>
// http://www.opennms.org/
// http://www.opennms.com/
//
package org.opennms.test;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import junit.framework.Assert;
public class ConfigurationTestUtils extends Assert {
private static final String POM_FILE = "pom.xml";
private static final String DAEMON_DIRECTORY = "opennms-daemon";
public static Reader getReaderForResource(Object obj, String resource) {
return new InputStreamReader(getInputStreamForResource(obj, resource));
}
public static InputStream getInputStreamForResource(Object obj,
String resource) {
InputStream is = obj.getClass().getResourceAsStream(resource);
assertNotNull("could not get resource '" + resource + "' as an input stream", is);
return is;
}
public static Reader getReaderForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
String newConfig = getConfigForResourceWithReplacements(obj, resource,
replacements);
return new StringReader(newConfig);
}
public static InputStream getInputStreamForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
String newConfig = getConfigForResourceWithReplacements(obj, resource,
replacements);
return new ByteArrayInputStream(newConfig.getBytes());
}
public static String getConfigForResourceWithReplacements(Object obj,
String resource, String[] ... replacements) throws IOException {
Reader inputReader = getReaderForResource(obj, resource);
BufferedReader bufferedReader = new BufferedReader(inputReader);
StringBuffer buffer = new StringBuffer();
String line;
while ((line = bufferedReader.readLine()) != null) {
buffer.append(line);
buffer.append("\n");
}
String newConfig = buffer.toString();
for (String[] replacement : replacements) {
newConfig = newConfig.replaceAll(replacement[0], replacement[1]);
}
return newConfig;
}
public static Reader getReaderForConfigFile(String configFile) throws FileNotFoundException {
return new InputStreamReader(getInputStreamForConfigFile(configFile));
}
public static InputStream getInputStreamForConfigFile(String configFile) throws FileNotFoundException {
File file = new File(getDaemonEtcDirectory(), configFile);
assertTrue("configuration file '" + configFile + "' does not exist at " + file.getAbsolutePath(), file.exists());
InputStream is = new FileInputStream(file);
return is;
}
public static File getDaemonEtcDirectory() {
return new File(getDaemonProjectDirectory(), "src/main/filtered/etc");
}
public static void setRelativeHomeDirectory(String relativeHomeDirectory) {
setAbsoluteHomeDirectory(new File(getCurrentDirectory().getAbsolutePath(), relativeHomeDirectory).getAbsolutePath());
}
public static void setAbsoluteHomeDirectory(final String absoluteHomeDirectory) {
System.setProperty("opennms.home", absoluteHomeDirectory);
}
public static File getTopProjectDirectory() {
File currentDirectory = getCurrentDirectory();
File pomFile = new File(currentDirectory, POM_FILE);
assertTrue("pom.xml in current directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return findTopProjectDirectory(currentDirectory);
}
private static File getCurrentDirectory() {
File currentDirectory = new File(System.getProperty("user.dir"));
assertTrue("current directory should exist: " + currentDirectory.getAbsolutePath(), currentDirectory.exists());
assertTrue("current directory should be a directory: " + currentDirectory.getAbsolutePath(), currentDirectory.isDirectory());
return currentDirectory;
}
public static File getDaemonProjectDirectory() {
File topLevelDirectory = getTopProjectDirectory();
File daemonDirectory = new File(topLevelDirectory, DAEMON_DIRECTORY);
if (!daemonDirectory.exists()) {
throw new IllegalStateException("Could not find a " + DAEMON_DIRECTORY + " in the location top-level directory: " + topLevelDirectory);
}
File pomFile = new File(daemonDirectory, POM_FILE);
assertTrue("pom.xml in " + DAEMON_DIRECTORY + " directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return daemonDirectory;
}
private static File findTopProjectDirectory(File currentDirectory) {
File buildFile = new File(currentDirectory, "build.sh");
if (buildFile.exists()) {
File pomFile = new File(currentDirectory, POM_FILE);
assertTrue("pom.xml in " + DAEMON_DIRECTORY + " directory should exist: " + pomFile.getAbsolutePath(), pomFile.exists());
return currentDirectory;
} else {
File parentDirectory = currentDirectory.getParentFile();
if (parentDirectory == null || parentDirectory == currentDirectory) {
return null;
} else {
return findTopProjectDirectory(parentDirectory);
}
}
}
public static void setRrdBinary(String path) {
System.setProperty("rrd.binary", path);
}
public static void setRelativeRrdBaseDirectory(String relativePath) {
File rrdDir = new File(getCurrentDirectory(), relativePath);
if (!rrdDir.exists()) {
rrdDir.mkdirs();
}
System.setProperty("rrd.base.dir", rrdDir.getAbsolutePath());
}
}
|
Check for something passing in a Class as an object (as I tend to do, which breaks tests under Maven 2, but not under Eclipse) and suggest that 'this' be used instead.
|
opennms-test/src/main/java/org/opennms/test/ConfigurationTestUtils.java
|
Check for something passing in a Class as an object (as I tend to do, which breaks tests under Maven 2, but not under Eclipse) and suggest that 'this' be used instead.
|
|
Java
|
agpl-3.0
|
3dde331f9d18af81791bca263b2d9253db1b1939
| 0
|
virustotalop/mcMMO,Maximvdw/mcMMO,EvilOlaf/mcMMO,jhonMalcom79/mcMMO_pers,isokissa3/mcMMO
|
package com.gmail.nossr50.runnables;
import java.util.ArrayList;
import java.util.HashMap;
import org.bukkit.Bukkit;
import org.bukkit.plugin.Plugin;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.config.Config;
import com.gmail.nossr50.util.Database;
public class UserPurgeTask implements Runnable {
private Plugin plugin;
private Database database = mcMMO.getPlayerDatabase();
private String tablePrefix = Config.getInstance().getMySQLTablePrefix();
private String databaseName = Config.getInstance().getMySQLDatabaseName();
public UserPurgeTask(Plugin plugin) {
this.plugin = plugin;
}
@Override
public void run() {
if (Config.getInstance().getUseMySQL()) {
purgePowerlessSQL();
purgeOldSQL();
}
else {
//TODO: Make this work for Flatfile data.
}
}
private void purgePowerlessSQL() {
System.out.println("Purging powerless users...");
String query = "taming+mining+woodcutting+repair+unarmed+herbalism+excavation+archery+swords+axes+acrobatics+fishing";
HashMap<Integer, ArrayList<String>> userslist = database.read("SELECT " + query + ", user_id FROM " + tablePrefix + "skills WHERE " + query + " > 0 ORDER BY " + query + " DESC ");
int purgedUsers = 0;
for (int i = 1; i <= userslist.size(); i++) {
System.out.println("Checking user " + i + "/" + userslist.size());
int userId = Integer.valueOf(userslist.get(i).get(1));
HashMap<Integer, ArrayList<String>> username = database.read("SELECT user FROM " + tablePrefix + "users WHERE id = '" + userId + "'");
if (username != null && Bukkit.getOfflinePlayer(username.get(1).get(0)).isOnline()) {
continue;
}
deleteFromSQL(userId);
purgedUsers++;
}
plugin.getLogger().info("Purged " + purgedUsers + "users from the database.");
}
private void purgeOldSQL() {
System.out.println("Purging old users...");
long currentTime = System.currentTimeMillis();
String query = "taming+mining+woodcutting+repair+unarmed+herbalism+excavation+archery+swords+axes+acrobatics+fishing";
HashMap<Integer, ArrayList<String>> userslist = database.read("SELECT " + query + ", user_id FROM " + tablePrefix + "skills WHERE " + query + " > 0 ORDER BY " + query + " DESC ");
int purgedUsers = 0;
for (int i = 1; i <= userslist.size(); i++) {
System.out.println("Checking user " + i + "/" + userslist.size());
int userId = Integer.valueOf(userslist.get(i).get(1));
long lastLoginTime = database.getInt("SELECT lastlogin FROM " + tablePrefix + "users WHERE id = '" + userId + "'") * 1000L;
long loginDifference = currentTime - lastLoginTime;
if (loginDifference > 2630000000L) {
deleteFromSQL(userId);
purgedUsers++;
}
}
plugin.getLogger().info("Purged " + purgedUsers + "users from the database.");
}
private void deleteFromSQL(int userId) {
System.out.println("Deleting user " + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "users WHERE "
+ tablePrefix + "users.id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "cooldowns WHERE "
+ tablePrefix + "cooldowns.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "huds WHERE "
+ tablePrefix + "huds.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "skills WHERE "
+ tablePrefix + "skills.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "experience WHERE "
+ tablePrefix + "experience.user_id=" + userId);
System.out.println("User " + userId + " was successfully removed!");
}
}
|
src/main/java/com/gmail/nossr50/runnables/UserPurgeTask.java
|
package com.gmail.nossr50.runnables;
import java.util.ArrayList;
import java.util.HashMap;
import org.bukkit.Bukkit;
import org.bukkit.plugin.Plugin;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.config.Config;
import com.gmail.nossr50.util.Database;
public class UserPurgeTask implements Runnable {
private Plugin plugin;
private Database database = mcMMO.getPlayerDatabase();
private String tablePrefix = Config.getInstance().getMySQLTablePrefix();
private String databaseName = Config.getInstance().getMySQLDatabaseName();
public UserPurgeTask(Plugin plugin) {
this.plugin = plugin;
}
@Override
public void run() {
if (Config.getInstance().getUseMySQL()) {
purgePowerlessSQL();
purgeOldSQL();
}
else {
//TODO: Make this work for Flatfile data.
}
}
private void purgePowerlessSQL() {
String query = "taming+mining+woodcutting+repair+unarmed+herbalism+excavation+archery+swords+axes+acrobatics+fishing";
HashMap<Integer, ArrayList<String>> userslist = database.read("SELECT " + query + ", user_id FROM " + tablePrefix + "skills WHERE " + query + " > 0 ORDER BY " + query + " DESC ");
int purgedUsers = 0;
for (int i = 1; i <= userslist.size(); i++) {
int userId = Integer.valueOf(userslist.get(i).get(1));
HashMap<Integer, ArrayList<String>> username = database.read("SELECT user FROM " + tablePrefix + "users WHERE id = '" + userId + "'");
if (username != null && Bukkit.getOfflinePlayer(username.get(1).get(0)).isOnline()) {
continue;
}
deleteFromSQL(userId);
purgedUsers++;
}
plugin.getLogger().info("Purged " + purgedUsers + "users from the database.");
}
private void purgeOldSQL() {
String query = "taming+mining+woodcutting+repair+unarmed+herbalism+excavation+archery+swords+axes+acrobatics+fishing";
HashMap<Integer, ArrayList<String>> userslist = database.read("SELECT " + query + ", user_id FROM " + tablePrefix + "skills WHERE " + query + " > 0 ORDER BY " + query + " DESC ");
int purgedUsers = 0;
for (int i = 1; i <= userslist.size(); i++) {
int userId = Integer.valueOf(userslist.get(i).get(1));
long lastLoginTime = database.getInt("SELECT lastlogin FROM " + tablePrefix + "users WHERE id = '" + userId + "'") * 1000L;
long loginDifference = System.currentTimeMillis() - lastLoginTime;
if (loginDifference > 2630000000L) {
deleteFromSQL(userId);
purgedUsers++;
}
}
plugin.getLogger().info("Purged " + purgedUsers + "users from the database.");
}
private void deleteFromSQL(int userId) {
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "users WHERE "
+ tablePrefix + "users.id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "cooldowns WHERE "
+ tablePrefix + "cooldowns.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "huds WHERE "
+ tablePrefix + "huds.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "skills WHERE "
+ tablePrefix + "skills.user_id=" + userId);
database.write("DELETE FROM "
+ databaseName + "."
+ tablePrefix + "experience WHERE "
+ tablePrefix + "experience.user_id=" + userId);
}
}
|
Add some debug so we can find the problem...
|
src/main/java/com/gmail/nossr50/runnables/UserPurgeTask.java
|
Add some debug so we can find the problem...
|
|
Java
|
agpl-3.0
|
c97020183371941e4056298b5270490bbf77d8df
| 0
|
haiqu/bitsquare,bisq-network/exchange,bisq-network/exchange,ManfredKarrer/exchange,bitsquare/bitsquare,bitsquare/bitsquare,ManfredKarrer/exchange,sakazz/exchange,sakazz/exchange,haiqu/bitsquare
|
/*
* This file is part of Bitsquare.
*
* Bitsquare is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* Bitsquare is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Bitsquare. If not, see <http://www.gnu.org/licenses/>.
*/
package io.bitsquare.locale;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class BankUtil {
private static final Logger log = LoggerFactory.getLogger(BankUtil.class);
// BankName
public static boolean isBankNameRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "NZ":
case "AU":
case "CA":
case "SE":
case "HK":
// We show always the bank name as it is needed in specific banks.
// Though that handling should be optimized in futures.
return true;
// return false;
case "MX":
case "BR":
return true;
default:
return true;
}
}
public static String getBankNameLabel(String countryCode) {
switch (countryCode) {
case "BR":
return "Bank name:";
default:
return isBankNameRequired(countryCode) ? "Bank name:" : "Bank name (optional):";
}
}
// BankId
public static boolean isBankIdRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "NZ":
case "AU":
case "SE":
return false;
case "CA":
case "MX":
case "HK":
return true;
default:
return true;
}
}
public static String getBankIdLabel(String countryCode) {
switch (countryCode) {
case "CA":
return "Institution Number:";
case "MX":
case "HK":
return "Bank code:";
default:
return isBankIdRequired(countryCode) ? "Bank ID (BIC/SWIFT):" : "Bank ID (BIC/SWIFT) (optional):";
}
}
// BranchId
public static boolean isBranchIdRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "AU":
case "CA":
return true;
case "NZ":
case "MX":
case "HK":
case "SE":
return false;
default:
return true;
}
}
public static String getBranchIdLabel(String countryCode) {
switch (countryCode) {
case "GB":
return "UK sort code:";
case "US":
return "Routing Number:";
case "BR":
return "Branch code:";
case "AU":
return "BSB code:";
case "CA":
return "Transit Number:";
default:
return isBranchIdRequired(countryCode) ? "Branch no.:" : "Branch no. (optional):";
}
}
// AccountNr
public static boolean isAccountNrRequired(String countryCode) {
switch (countryCode) {
default:
return true;
}
}
public static String getAccountNrLabel(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "NZ":
case "AU":
case "CA":
case "HK":
return "Account number:";
case "SE":
return "Bankgiro number:";
case "MX":
return "CLABE:";
default:
return "Account no. (IBAN):";
}
}
// AccountType
public static boolean isAccountTypeRequired(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return true;
default:
return false;
}
}
public static String getAccountTypeLabel(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return "Account type:";
default:
return "";
}
}
public static List<String> getAccountTypeValues(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return Arrays.asList("Checking", "Savings");
default:
return new ArrayList<>();
}
}
// HolderId
public static boolean isHolderIdRequired(String countryCode) {
switch (countryCode) {
case "BR":
case "CL":
return true;
default:
return false;
}
}
public static String getHolderIdLabel(String countryCode) {
switch (countryCode) {
case "BR":
return "Tax Registration Number (CPF):";
case "CL":
return "RUT Number:";
default:
return "Personal ID:";
}
}
// Validation
public static boolean useValidation(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "AU":
case "CA":
case "NZ":
case "MX":
case "HK":
case "SE":
return true;
default:
return false;
}
}
}
|
core/src/main/java/io/bitsquare/locale/BankUtil.java
|
/*
* This file is part of Bitsquare.
*
* Bitsquare is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* Bitsquare is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Bitsquare. If not, see <http://www.gnu.org/licenses/>.
*/
package io.bitsquare.locale;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class BankUtil {
private static final Logger log = LoggerFactory.getLogger(BankUtil.class);
// BankName
public static boolean isBankNameRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "NZ":
case "AU":
case "CA":
case "SE":
case "HK":
return false;
case "MX":
case "BR":
return true;
default:
return true;
}
}
public static String getBankNameLabel(String countryCode) {
switch (countryCode) {
case "BR":
return "Bank name:";
default:
return isBankNameRequired(countryCode) ? "Bank name:" : "Bank name (optional):";
}
}
// BankId
public static boolean isBankIdRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "NZ":
case "AU":
case "SE":
return false;
case "CA":
case "MX":
case "HK":
return true;
default:
return true;
}
}
public static String getBankIdLabel(String countryCode) {
switch (countryCode) {
case "CA":
return "Institution Number:";
case "MX":
case "HK":
return "Bank code:";
default:
return isBankIdRequired(countryCode) ? "Bank ID (BIC/SWIFT):" : "Bank ID (BIC/SWIFT) (optional):";
}
}
// BranchId
public static boolean isBranchIdRequired(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "AU":
case "CA":
return true;
case "NZ":
case "MX":
case "HK":
case "SE":
return false;
default:
return true;
}
}
public static String getBranchIdLabel(String countryCode) {
switch (countryCode) {
case "GB":
return "UK sort code:";
case "US":
return "Routing Number:";
case "BR":
return "Branch code:";
case "AU":
return "BSB code:";
case "CA":
return "Transit Number:";
default:
return isBranchIdRequired(countryCode) ? "Branch no.:" : "Branch no. (optional):";
}
}
// AccountNr
public static boolean isAccountNrRequired(String countryCode) {
switch (countryCode) {
default:
return true;
}
}
public static String getAccountNrLabel(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "NZ":
case "AU":
case "CA":
case "HK":
return "Account number:";
case "SE":
return "Bankgiro number:";
case "MX":
return "CLABE:";
default:
return "Account no. (IBAN):";
}
}
// AccountType
public static boolean isAccountTypeRequired(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return true;
default:
return false;
}
}
public static String getAccountTypeLabel(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return "Account type:";
default:
return "";
}
}
public static List<String> getAccountTypeValues(String countryCode) {
switch (countryCode) {
case "US":
case "BR":
case "CA":
return Arrays.asList("Checking", "Savings");
default:
return new ArrayList<>();
}
}
// HolderId
public static boolean isHolderIdRequired(String countryCode) {
switch (countryCode) {
case "BR":
case "CL":
return true;
default:
return false;
}
}
public static String getHolderIdLabel(String countryCode) {
switch (countryCode) {
case "BR":
return "Tax Registration Number (CPF):";
case "CL":
return "RUT Number:";
default:
return "Personal ID:";
}
}
// Validation
public static boolean useValidation(String countryCode) {
switch (countryCode) {
case "GB":
case "US":
case "BR":
case "AU":
case "CA":
case "NZ":
case "MX":
case "HK":
case "SE":
return true;
default:
return false;
}
}
}
|
Show always bank name
|
core/src/main/java/io/bitsquare/locale/BankUtil.java
|
Show always bank name
|
|
Java
|
lgpl-2.1
|
a1b4a0c63ee0ece3c872704516596388549d2ed6
| 0
|
jolie/jolie,jolie/jolie,jolie/jolie
|
/***************************************************************************
* Copyright (C) 2009 by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package joliex.plasma;
import joliex.plasma.impl.InterfaceVisitor;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Arrays;
import java.util.logging.Logger;
import jolie.CommandLineException;
import jolie.CommandLineParser;
import jolie.lang.parse.OLParser;
import jolie.lang.parse.ParserException;
import jolie.lang.parse.Scanner;
import jolie.lang.parse.SemanticVerifier;
import jolie.lang.parse.ast.Program;
/**
*
* @author Fabrizio Montesi
*/
public class Jolie2Plasma
{
public static void main( String[] args )
{
try {
CommandLineParser cmdParser = new CommandLineParser( args, Jolie2Plasma.class.getClassLoader() );
args = cmdParser.arguments();
if ( args.length < 2 ) {
throw new CommandLineException( "Insufficient number of arguments" );
}
Writer writer = new BufferedWriter( new FileWriter( args[0] ) );
OLParser parser = new OLParser(
new Scanner( cmdParser.programStream(), cmdParser.programFilepath() ),
cmdParser.includePaths(),
Jolie2Plasma.class.getClassLoader()
);
Program program = parser.parse();
new SemanticVerifier( program ).validate();
new InterfaceConverter(
program,
Arrays.copyOfRange( args, 1, args.length ),
Logger.getLogger( "jolie2plasma" )
).convert( writer );
} catch( CommandLineException e ) {
System.out.println( e.getMessage() );
System.out.println( "Syntax is: jolie2plasma [jolie options] <jolie filename> <output filename> [interface name list]" );
} catch( IOException e ) {
e.printStackTrace();
} catch( ParserException e ) {
e.printStackTrace();
} catch( InterfaceVisitor.InterfaceNotFound e ) {
e.printStackTrace();
}
}
}
|
tools/jolie2plasma/src/joliex/plasma/Jolie2Plasma.java
|
/***************************************************************************
* Copyright (C) 2009 by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package joliex.plasma;
import joliex.plasma.impl.InterfaceVisitor;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Arrays;
import java.util.logging.Logger;
import jolie.CommandLineException;
import jolie.CommandLineParser;
import jolie.lang.parse.OLParser;
import jolie.lang.parse.ParserException;
import jolie.lang.parse.Scanner;
import jolie.lang.parse.ast.Program;
/**
*
* @author Fabrizio Montesi
*/
public class Jolie2Plasma
{
public static void main( String[] args )
{
try {
CommandLineParser cmdParser = new CommandLineParser( args, Jolie2Plasma.class.getClassLoader() );
args = cmdParser.arguments();
if ( args.length < 2 ) {
throw new CommandLineException( "Insufficient number of arguments" );
}
Writer writer = new BufferedWriter( new FileWriter( args[0] ) );
OLParser parser = new OLParser(
new Scanner( cmdParser.programStream(), cmdParser.programFilepath() ),
cmdParser.includePaths(),
Jolie2Plasma.class.getClassLoader()
);
Program program = parser.parse();
new InterfaceConverter(
program,
Arrays.copyOfRange( args, 1, args.length ),
Logger.getLogger( "jolie2plasma" )
).convert( writer );
} catch( CommandLineException e ) {
System.out.println( e.getMessage() );
System.out.println( "Syntax is: jolie2plasma [jolie options] <jolie filename> <output filename> [interface name list]" );
} catch( IOException e ) {
e.printStackTrace();
} catch( ParserException e ) {
e.printStackTrace();
} catch( InterfaceVisitor.InterfaceNotFound e ) {
e.printStackTrace();
}
}
}
|
Former-commit-id: 1f6ac5d88ebdf52fcfc3e0bd318ec4046c761df7
|
tools/jolie2plasma/src/joliex/plasma/Jolie2Plasma.java
| ||
Java
|
lgpl-2.1
|
78ca37a5297560a2da9d2a83875176cb84f1298c
| 0
|
adamallo/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc
|
/*
* SubtreeLeapOperatorParser.java
*
* Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodelxml.operators;
import dr.evomodel.operators.SubtreeLeapOperator;
import dr.evomodel.tree.TreeModel;
import dr.inference.operators.CoercableMCMCOperator;
import dr.inference.operators.CoercionMode;
import dr.inference.operators.MCMCOperator;
import dr.xml.*;
/**
*/
public class SubtreeLeapOperatorParser extends AbstractXMLObjectParser {
public static final String SUBTREE_LEAP = "subtreeLeap";
public static final String SIZE = "size";
public static final String TARGET_ACCEPTANCE = "targetAcceptance";
public String getParserName() {
return SUBTREE_LEAP;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
CoercionMode mode = CoercionMode.parseMode(xo);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
// size attribute is mandatory
final double size = xo.getAttribute(SIZE, Double.NaN);
final double targetAcceptance = xo.getAttribute(TARGET_ACCEPTANCE, 0.234);
if (size <= 0.0) {
throw new XMLParseException("The SubTreeLeap size attribute must be positive and non-zero.");
}
if (targetAcceptance <= 0.0 || targetAcceptance >= 1.0) {
throw new XMLParseException("Target acceptance probability has to lie in (0, 1)");
}
SubtreeLeapOperator operator = new SubtreeLeapOperator(treeModel, weight, size, targetAcceptance, mode);
return operator;
}
public String getParserDescription() {
return "An operator that moves a subtree a certain patristic distance.";
}
public Class getReturnType() {
return SubtreeLeapOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
AttributeRule.newDoubleRule(SIZE, false),
AttributeRule.newDoubleRule(TARGET_ACCEPTANCE, true),
AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
new ElementRule(TreeModel.class)
};
}
|
src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
|
/*
* SubtreeLeapOperatorParser.java
*
* Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodelxml.operators;
import dr.evomodel.operators.SubtreeLeapOperator;
import dr.evomodel.tree.TreeModel;
import dr.inference.operators.CoercableMCMCOperator;
import dr.inference.operators.CoercionMode;
import dr.inference.operators.MCMCOperator;
import dr.xml.*;
/**
*/
public class SubtreeLeapOperatorParser extends AbstractXMLObjectParser {
public static final String SUBTREE_LEAP = "subtreeLeap";
public static final String SIZE = "size";
public static final String TARGET_ACCEPTANCE = "targetAcceptance";
public String getParserName() {
return SUBTREE_LEAP;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
CoercionMode mode = CoercionMode.parseMode(xo);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
// size attribute is manditory
final double size = xo.getAttribute(SIZE, Double.NaN);
final double targetAcceptance = xo.getAttribute(TARGET_ACCEPTANCE, 0.234);
if (size <= 0.0) {
throw new XMLParseException("The SubTreeLeap size attribute must be positive and non-zero.");
}
if (targetAcceptance <= 0.0 || targetAcceptance >= 1.0) {
throw new XMLParseException("Target acceptance probability has to lie in (0, 1)");
}
SubtreeLeapOperator operator = new SubtreeLeapOperator(treeModel, weight, size, targetAcceptance, mode);
return operator;
}
public String getParserDescription() {
return "An operator that moves a subtree a certain patristic distance.";
}
public Class getReturnType() {
return SubtreeLeapOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
AttributeRule.newDoubleRule(SIZE, false),
AttributeRule.newDoubleRule(TARGET_ACCEPTANCE, true),
AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
new ElementRule(TreeModel.class)
};
}
|
typo fix
|
src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
|
typo fix
|
|
Java
|
apache-2.0
|
332c1d1857cfe857b2b18949bc3b2b9327b19638
| 0
|
tokee/lucene,tokee/lucene,tokee/lucene,tokee/lucene
|
package org.apache.lucene.search;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Lower-level search API.
* <br>HitCollectors are primarily meant to be used to implement queries,
* sorting and filtering.
* @see Searcher#search(Query,HitCollector)
* @version $Id$
*/
public abstract class HitCollector {
/** Called once for every document matching a query, with the document
* number and its raw score.
*
* <P>If, for example, an application wished to collect all of the hits for a
* query in a BitSet, then it might:<pre>
* Searcher searcher = new IndexSearcher(indexReader);
* final BitSet bits = new BitSet(indexReader.maxDoc());
* searcher.search(query, new HitCollector() {
* public void collect(int doc, float score) {
* bits.set(doc);
* }
* });
* </pre>
*
* <p>Note: This is called in an inner search loop. For good search
* performance, implementations of this method should not call
* {@link Searcher#doc(int)} or
* {@link org.apache.lucene.index.IndexReader#document(int)} on every
* document number encountered. Doing so can slow searches by an order
* of magnitude or more.
* <p>Note: The <code>score</code> passed to this method is a raw score.
* In other words, the score will not necessarily be a float whose value is
* between 0 and 1.
*/
public abstract void collect(int doc, float score);
}
|
src/java/org/apache/lucene/search/HitCollector.java
|
package org.apache.lucene.search;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Lower-level search API.
* <br>HitCollectors are primarily meant to be used to implement queries,
* sorting and filtering.
* @see Searcher#search(Query,HitCollector)
* @version $Id$
*/
public abstract class HitCollector {
/** Called once for every non-zero scoring document, with the document number
* and its score.
*
* <P>If, for example, an application wished to collect all of the hits for a
* query in a BitSet, then it might:<pre>
* Searcher searcher = new IndexSearcher(indexReader);
* final BitSet bits = new BitSet(indexReader.maxDoc());
* searcher.search(query, new HitCollector() {
* public void collect(int doc, float score) {
* bits.set(doc);
* }
* });
* </pre>
*
* <p>Note: This is called in an inner search loop. For good search
* performance, implementations of this method should not call
* {@link Searcher#doc(int)} or
* {@link org.apache.lucene.index.IndexReader#document(int)} on every
* document number encountered. Doing so can slow searches by an order
* of magnitude or more.
* <p>Note: The <code>score</code> passed to this method is a raw score.
* In other words, the score will not necessarily be a float whose value is
* between 0 and 1.
*/
public abstract void collect(int doc, float score);
}
|
documents with a score <= 0 can in fact be collected as matches
git-svn-id: 4c5078813df38efa56971a28e09a55254294f104@596462 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/lucene/search/HitCollector.java
|
documents with a score <= 0 can in fact be collected as matches
|
|
Java
|
apache-2.0
|
cb37dae9d64ebfe69e4fca8294cccf416c16b2e6
| 0
|
cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x
|
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.aop.framework;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.aopalliance.aop.Advice;
import org.aopalliance.intercept.Interceptor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.aop.Advisor;
import org.springframework.aop.TargetSource;
import org.springframework.aop.framework.adapter.AdvisorAdapterRegistry;
import org.springframework.aop.framework.adapter.GlobalAdvisorAdapterRegistry;
import org.springframework.aop.framework.adapter.UnknownAdviceTypeException;
import org.springframework.aop.target.SingletonTargetSource;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.FactoryBeanNotInitializedException;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.core.OrderComparator;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
/**
* {@link org.springframework.beans.factory.FactoryBean} implementation that builds an
* AOP proxy based on beans in Spring {@link org.springframework.beans.factory.BeanFactory}.
*
* <p>{@link org.aopalliance.intercept.MethodInterceptor MethodInterceptors} and
* {@link org.springframework.aop.Advisor Advisors} are identified by a list of bean
* names in the current bean factory, specified through the "interceptorNames" property.
* The last entry in the list can be the name of a target bean or a
* {@link org.springframework.aop.TargetSource}; however, it is normally preferable
* to use the "targetName"/"target"/"targetSource" properties instead.
*
* <p>Global interceptors and advisors can be added at the factory level. The specified
* ones are expanded in an interceptor list where an "xxx*" entry is included in the
* list, matching the given prefix with the bean names (e.g. "global*" would match
* both "globalBean1" and "globalBean2", "*" all defined interceptors). The matching
* interceptors get applied according to their returned order value, if they implement
* the {@link org.springframework.core.Ordered} interface.
*
* <p>Creates a JDK proxy when proxy interfaces are given, and a CGLIB proxy for the
* actual target class if not. Note that the latter will only work if the target class
* does not have final methods, as a dynamic subclass will be created at runtime.
*
* <p>It's possible to cast a proxy obtained from this factory to {@link Advised},
* or to obtain the ProxyFactoryBean reference and programmatically manipulate it.
* This won't work for existing prototype references, which are independent. However,
* it will work for prototypes subsequently obtained from the factory. Changes to
* interception will work immediately on singletons (including existing references).
* However, to change interfaces or target it's necessary to obtain a new instance
* from the factory. This means that singleton instances obtained from the factory
* do not have the same object identity. However, they do have the same interceptors
* and target, and changing any reference will change all objects.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see #setInterceptorNames
* @see #setProxyInterfaces
* @see org.aopalliance.intercept.MethodInterceptor
* @see org.springframework.aop.Advisor
* @see Advised
*/
public class ProxyFactoryBean extends ProxyCreatorSupport
implements FactoryBean, BeanClassLoaderAware, BeanFactoryAware {
/**
* This suffix in a value in an interceptor list indicates to expand globals.
*/
public static final String GLOBAL_SUFFIX = "*";
protected final Log logger = LogFactory.getLog(getClass());
private String[] interceptorNames;
private String targetName;
private boolean autodetectInterfaces = true;
private boolean singleton = true;
private AdvisorAdapterRegistry advisorAdapterRegistry = GlobalAdvisorAdapterRegistry.getInstance();
/**
* Indicates whether the proxy should be frozen before creation.
*/
private boolean freezeProxy = false;
private ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
/**
* Owning bean factory, which cannot be changed after this
* object is initialized.
*/
private BeanFactory beanFactory;
/** Whether the advisor chain has already been initialized */
private boolean advisorChainInitialized = false;
/** If this is a singleton, the cached singleton proxy instance */
private Object singletonInstance;
/**
* Set the names of the interfaces we're proxying. If no interface
* is given, a CGLIB for the actual class will be created.
* <p>This is essentially equivalent to the "setInterfaces" method,
* but mirrors TransactionProxyFactoryBean's "setProxyInterfaces".
* @see #setInterfaces
* @see AbstractSingletonProxyFactoryBean#setProxyInterfaces
*/
public void setProxyInterfaces(Class[] proxyInterfaces) throws ClassNotFoundException {
setInterfaces(proxyInterfaces);
}
/**
* Set the list of Advice/Advisor bean names. This must always be set
* to use this factory bean in a bean factory.
* <p>The referenced beans should be of type Interceptor, Advisor or Advice
* The last entry in the list can be the name of any bean in the factory.
* If it's neither an Advice nor an Advisor, a new SingletonTargetSource
* is added to wrap it. Such a target bean cannot be used if the "target"
* or "targetSource" or "targetName" property is set, in which case the
* "interceptorNames" array must contain only Advice/Advisor bean names.
* @see org.aopalliance.intercept.MethodInterceptor
* @see org.springframework.aop.Advisor
* @see org.aopalliance.aop.Advice
* @see org.springframework.aop.target.SingletonTargetSource
*/
public void setInterceptorNames(String[] interceptorNames) {
this.interceptorNames = interceptorNames;
}
/**
* Set the name of the target bean. This is an alternative to specifying
* the target name at the end of the "interceptorNames" array.
* <p>You can also specify a target object or a TargetSource object
* directly, via the "target"/"targetSource" property, respectively.
* @see #setInterceptorNames(String[])
* @see #setTarget(Object)
* @see #setTargetSource(org.springframework.aop.TargetSource)
*/
public void setTargetName(String targetName) {
this.targetName = targetName;
}
/**
* Set whether to autodetect proxy interfaces if none specified.
* <p>Default is "true". Turn this flag off to create a CGLIB
* proxy for the full target class if no interfaces specified.
* @see #setProxyTargetClass
*/
public void setAutodetectInterfaces(boolean autodetectInterfaces) {
this.autodetectInterfaces = autodetectInterfaces;
}
/**
* Set the value of the singleton property. Governs whether this factory
* should always return the same proxy instance (which implies the same target)
* or whether it should return a new prototype instance, which implies that
* the target and interceptors may be new instances also, if they are obtained
* from prototype bean definitions. This allows for fine control of
* independence/uniqueness in the object graph.
*/
public void setSingleton(boolean singleton) {
this.singleton = singleton;
}
/**
* Specify the AdvisorAdapterRegistry to use.
* Default is the global AdvisorAdapterRegistry.
* @see org.springframework.aop.framework.adapter.GlobalAdvisorAdapterRegistry
*/
public void setAdvisorAdapterRegistry(AdvisorAdapterRegistry advisorAdapterRegistry) {
this.advisorAdapterRegistry = advisorAdapterRegistry;
}
public void setFrozen(boolean frozen) {
this.freezeProxy = frozen;
}
public void setBeanClassLoader(ClassLoader classLoader) {
this.beanClassLoader = classLoader;
}
public void setBeanFactory(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
checkInterceptorNames();
}
/**
* Return a proxy. Invoked when clients obtain beans from this factory bean.
* Create an instance of the AOP proxy to be returned by this factory.
* The instance will be cached for a singleton, and create on each call to
* <code>getObject()</code> for a proxy.
* @return a fresh AOP proxy reflecting the current state of this factory
*/
public Object getObject() throws BeansException {
initializeAdvisorChain();
if (isSingleton()) {
return getSingletonInstance();
}
else {
if (this.targetName == null) {
logger.warn("Using non-singleton proxies with singleton targets is often undesirable." +
"Enable prototype proxies by setting the 'targetName' property.");
}
return newPrototypeInstance();
}
}
/**
* Return the type of the proxy. Will check the singleton instance if
* already created, else fall back to the proxy interface (in case of just
* a single one), the target bean type, or the TargetSource's target class.
* @see org.springframework.aop.TargetSource#getTargetClass
*/
public Class getObjectType() {
synchronized (this) {
if (this.singletonInstance != null) {
return this.singletonInstance.getClass();
}
}
Class[] ifcs = getProxiedInterfaces();
if (ifcs.length == 1) {
return ifcs[0];
}
else if (ifcs.length > 1) {
return createCompositeInterface(ifcs);
}
else if (this.targetName != null && this.beanFactory != null) {
return this.beanFactory.getType(this.targetName);
}
else {
return getTargetClass();
}
}
public boolean isSingleton() {
return this.singleton;
}
/**
* Create a composite interface Class for the given interfaces,
* implementing the given interfaces in one single Class.
* <p>The default implementation builds a JDK proxy class for the
* given interfaces.
* @param interfaces the interfaces to merge
* @return the merged interface as Class
* @see java.lang.reflect.Proxy#getProxyClass
*/
protected Class createCompositeInterface(Class[] interfaces) {
return ClassUtils.createCompositeInterface(interfaces, this.beanClassLoader);
}
/**
* Return the singleton instance of this class's proxy object,
* lazily creating it if it hasn't been created already.
* @return the shared singleton proxy
*/
private synchronized Object getSingletonInstance() {
if (this.singletonInstance == null) {
this.targetSource = freshTargetSource();
if (this.autodetectInterfaces && getProxiedInterfaces().length == 0 && !isProxyTargetClass()) {
// Rely on AOP infrastructure to tell us what interfaces to proxy.
Class targetClass = getTargetClass();
if (targetClass == null) {
throw new FactoryBeanNotInitializedException("Cannot determine target class for proxy");
}
setInterfaces(ClassUtils.getAllInterfacesForClass(targetClass));
}
// Initialize the shared singleton instance.
super.setFrozen(this.freezeProxy);
this.singletonInstance = getProxy(createAopProxy());
}
return this.singletonInstance;
}
/**
* Create a new prototype instance of this class's created proxy object,
* backed by an independent AdvisedSupport configuration.
* @return a totally independent proxy, whose advice we may manipulate in isolation
*/
private synchronized Object newPrototypeInstance() {
// In the case of a prototype, we need to give the proxy
// an independent instance of the configuration.
// In this case, no proxy will have an instance of this object's configuration,
// but will have an independent copy.
if (logger.isTraceEnabled()) {
logger.trace("Creating copy of prototype ProxyFactoryBean config: " + this);
}
ProxyCreatorSupport copy = new ProxyCreatorSupport(getAopProxyFactory());
// The copy needs a fresh advisor chain, and a fresh TargetSource.
TargetSource targetSource = freshTargetSource();
copy.copyConfigurationFrom(this, targetSource, freshAdvisorChain());
if (this.autodetectInterfaces && getProxiedInterfaces().length == 0 && !isProxyTargetClass()) {
// Rely on AOP infrastructure to tell us what interfaces to proxy.
copy.setInterfaces(ClassUtils.getAllInterfacesForClass(targetSource.getTargetClass()));
}
copy.setFrozen(this.freezeProxy);
if (logger.isTraceEnabled()) {
logger.trace("Using ProxyCreatorSupport copy: " + copy);
}
return getProxy(copy.createAopProxy());
}
/**
* Return the proxy object to expose.
* <p>The default implementation uses a <code>getProxy</code> call with
* the factory's bean class loader. Can be overridden to specify a
* custom class loader.
* @param aopProxy the prepared AopProxy instance to get the proxy from
* @return the proxy object to expose
* @see AopProxy#getProxy(ClassLoader)
*/
protected Object getProxy(AopProxy aopProxy) {
return aopProxy.getProxy(this.beanClassLoader);
}
/**
* Check the interceptorNames list whether it contains a target name as final element.
* If found, remove the final name from the list and set it as targetName.
*/
private void checkInterceptorNames() {
if (!ObjectUtils.isEmpty(this.interceptorNames)) {
String finalName = this.interceptorNames[this.interceptorNames.length - 1];
if (this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) {
// The last name in the chain may be an Advisor/Advice or a target/TargetSource.
// Unfortunately we don't know; we must look at type of the bean.
if (!finalName.endsWith(GLOBAL_SUFFIX) && !isNamedBeanAnAdvisorOrAdvice(finalName)) {
// Must be an interceptor.
this.targetName = finalName;
if (logger.isDebugEnabled()) {
logger.debug("Bean with name '" + finalName + "' concluding interceptor chain " +
"is not an advisor class: treating it as a target or TargetSource");
}
String[] newNames = new String[this.interceptorNames.length - 1];
System.arraycopy(this.interceptorNames, 0, newNames, 0, newNames.length);
this.interceptorNames = newNames;
}
}
}
}
/**
* Look at bean factory metadata to work out whether this bean name,
* which concludes the interceptorNames list, is an Advisor or Advice,
* or may be a target.
* @param beanName bean name to check
* @return true if it's an Advisor or Advice
*/
private boolean isNamedBeanAnAdvisorOrAdvice(String beanName) {
Class namedBeanClass = this.beanFactory.getType(beanName);
if (namedBeanClass != null) {
return Advisor.class.isAssignableFrom(namedBeanClass) ||
Advice.class.isAssignableFrom(namedBeanClass);
}
// Treat it as an Advisor if we can't tell.
return true;
}
/**
* Create the advisor (interceptor) chain. Aadvisors that are sourced
* from a BeanFactory will be refreshed each time a new prototype instance
* is added. Interceptors added programmatically through the factory API
* are unaffected by such changes.
*/
private synchronized void initializeAdvisorChain() throws AopConfigException, BeansException {
if (this.advisorChainInitialized) {
return;
}
if (!ObjectUtils.isEmpty(this.interceptorNames)) {
// Globals can't be last unless we specified a targetSource using the property...
if (this.interceptorNames[this.interceptorNames.length - 1].endsWith(GLOBAL_SUFFIX) &&
this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) {
throw new AopConfigException("Target required after globals");
}
// Materialize interceptor chain from bean names.
for (int i = 0; i < this.interceptorNames.length; i++) {
String name = this.interceptorNames[i];
if (logger.isTraceEnabled()) {
logger.trace("Configuring advisor or advice '" + name + "'");
}
if (name.endsWith(GLOBAL_SUFFIX)) {
if (!(this.beanFactory instanceof ListableBeanFactory)) {
throw new AopConfigException(
"Can only use global advisors or interceptors with a ListableBeanFactory");
}
addGlobalAdvisor((ListableBeanFactory) this.beanFactory,
name.substring(0, name.length() - GLOBAL_SUFFIX.length()));
}
else {
// If we get here, we need to add a named interceptor.
// We must check if it's a singleton or prototype.
Object advice = null;
if (this.singleton || this.beanFactory.isSingleton(this.interceptorNames[i])) {
// Add the real Advisor/Advice to the chain.
advice = this.beanFactory.getBean(this.interceptorNames[i]);
}
else {
// It's a prototype Advice or Advisor: replace with a prototype.
// Avoid unnecessary creation of prototype bean just for advisor chain initialization.
advice = new PrototypePlaceholderAdvisor(interceptorNames[i]);
}
addAdvisorOnChainCreation(advice, this.interceptorNames[i]);
}
}
}
this.advisorChainInitialized = true;
}
/**
* Return an independent advisor chain.
* We need to do this every time a new prototype instance is returned,
* to return distinct instances of prototype Advisors and Advices.
*/
private List freshAdvisorChain() {
Advisor[] advisors = getAdvisors();
List freshAdvisors = new ArrayList(advisors.length);
for (int i = 0; i < advisors.length; i++) {
if (advisors[i] instanceof PrototypePlaceholderAdvisor) {
PrototypePlaceholderAdvisor pa = (PrototypePlaceholderAdvisor) advisors[i];
if (logger.isDebugEnabled()) {
logger.debug("Refreshing bean named '" + pa.getBeanName() + "'");
}
// Replace the placeholder with a fresh prototype instance resulting
// from a getBean() lookup
Object bean = this.beanFactory.getBean(pa.getBeanName());
Advisor refreshedAdvisor = namedBeanToAdvisor(bean);
freshAdvisors.add(refreshedAdvisor);
}
else {
// Add the shared instance.
freshAdvisors.add(advisors[i]);
}
}
return freshAdvisors;
}
/**
* Add all global interceptors and pointcuts.
*/
private void addGlobalAdvisor(ListableBeanFactory beanFactory, String prefix) {
String[] globalAdvisorNames =
BeanFactoryUtils.beanNamesForTypeIncludingAncestors(beanFactory, Advisor.class);
String[] globalInterceptorNames =
BeanFactoryUtils.beanNamesForTypeIncludingAncestors(beanFactory, Interceptor.class);
List beans = new ArrayList(globalAdvisorNames.length + globalInterceptorNames.length);
Map names = new HashMap();
for (int i = 0; i < globalAdvisorNames.length; i++) {
String name = globalAdvisorNames[i];
Object bean = beanFactory.getBean(name);
beans.add(bean);
names.put(bean, name);
}
for (int i = 0; i < globalInterceptorNames.length; i++) {
String name = globalInterceptorNames[i];
Object bean = beanFactory.getBean(name);
beans.add(bean);
names.put(bean, name);
}
Collections.sort(beans, new OrderComparator());
for (Iterator it = beans.iterator(); it.hasNext();) {
Object bean = it.next();
String name = (String) names.get(bean);
if (name.startsWith(prefix)) {
addAdvisorOnChainCreation(bean, name);
}
}
}
/**
* Invoked when advice chain is created.
* <p>Add the given advice, advisor or object to the interceptor list.
* Because of these three possibilities, we can't type the signature
* more strongly.
* @param next advice, advisor or target object
* @param name bean name from which we obtained this object in our owning
* bean factory
*/
private void addAdvisorOnChainCreation(Object next, String name) {
// We need to convert to an Advisor if necessary so that our source reference
// matches what we find from superclass interceptors.
Advisor advisor = namedBeanToAdvisor(next);
if (logger.isTraceEnabled()) {
logger.trace("Adding advisor with name '" + name + "'");
}
addAdvisor((Advisor) advisor);
}
/**
* Return a TargetSource to use when creating a proxy. If the target was not
* specified at the end of the interceptorNames list, the TargetSource will be
* this class's TargetSource member. Otherwise, we get the target bean and wrap
* it in a TargetSource if necessary.
*/
private TargetSource freshTargetSource() {
if (this.targetName == null) {
if (logger.isTraceEnabled()) {
logger.trace("Not refreshing target: Bean name not specified in 'interceptorNames'.");
}
return this.targetSource;
}
else {
if (logger.isDebugEnabled()) {
logger.debug("Refreshing target with name '" + this.targetName + "'");
}
Object target = this.beanFactory.getBean(this.targetName);
return (target instanceof TargetSource ? (TargetSource) target : new SingletonTargetSource(target));
}
}
/**
* Convert the following object sourced from calling getBean() on a name in the
* interceptorNames array to an Advisor or TargetSource.
*/
private Advisor namedBeanToAdvisor(Object next) {
try {
return this.advisorAdapterRegistry.wrap(next);
}
catch (UnknownAdviceTypeException ex) {
// We expected this to be an Advisor or Advice,
// but it wasn't. This is a configuration error.
throw new AopConfigException("Unknown advisor type " + next.getClass() +
"; Can only include Advisor or Advice type beans in interceptorNames chain except for last entry," +
"which may also be target or TargetSource", ex);
}
}
/**
* Blow away and recache singleton on an advice change.
*/
protected void adviceChanged() {
super.adviceChanged();
if (this.singleton) {
logger.debug("Advice has changed; recaching singleton instance");
synchronized (this) {
this.singletonInstance = null;
}
}
}
/**
* Used in the interceptor chain where we need to replace a bean with a prototype
* on creating a proxy.
*/
private static class PrototypePlaceholderAdvisor implements Advisor {
private final String beanName;
private final String message;
public PrototypePlaceholderAdvisor(String beanName) {
this.beanName = beanName;
this.message = "Placeholder for prototype Advisor/Advice with bean name '" + beanName + "'";
}
public String getBeanName() {
return beanName;
}
public Advice getAdvice() {
throw new UnsupportedOperationException("Cannot invoke methods: " + this.message);
}
public boolean isPerInstance() {
throw new UnsupportedOperationException("Cannot invoke methods: " + this.message);
}
public String toString() {
return this.message;
}
}
}
|
src/org/springframework/aop/framework/ProxyFactoryBean.java
|
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.aop.framework;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.aopalliance.aop.Advice;
import org.aopalliance.intercept.Interceptor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.aop.Advisor;
import org.springframework.aop.TargetSource;
import org.springframework.aop.framework.adapter.AdvisorAdapterRegistry;
import org.springframework.aop.framework.adapter.GlobalAdvisorAdapterRegistry;
import org.springframework.aop.framework.adapter.UnknownAdviceTypeException;
import org.springframework.aop.target.SingletonTargetSource;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.FactoryBeanNotInitializedException;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.core.OrderComparator;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
/**
* {@link org.springframework.beans.factory.FactoryBean} implementation that builds an
* AOP proxy based on beans in Spring {@link org.springframework.beans.factory.BeanFactory}.
*
* <p>{@link org.aopalliance.intercept.MethodInterceptor MethodInterceptors} and
* {@link org.springframework.aop.Advisor Advisors} are identified by a list of bean
* names in the current bean factory, specified through the "interceptorNames" property.
* The last entry in the list can be the name of a target bean or a
* {@link org.springframework.aop.TargetSource}; however, it is normally preferable
* to use the "targetName"/"target"/"targetSource" properties instead.
*
* <p>Global interceptors and advisors can be added at the factory level. The specified
* ones are expanded in an interceptor list where an "xxx*" entry is included in the
* list, matching the given prefix with the bean names (e.g. "global*" would match
* both "globalBean1" and "globalBean2", "*" all defined interceptors). The matching
* interceptors get applied according to their returned order value, if they implement
* the {@link org.springframework.core.Ordered} interface.
*
* <p>Creates a JDK proxy when proxy interfaces are given, and a CGLIB proxy for the
* actual target class if not. Note that the latter will only work if the target class
* does not have final methods, as a dynamic subclass will be created at runtime.
*
* <p>It's possible to cast a proxy obtained from this factory to {@link Advised},
* or to obtain the ProxyFactoryBean reference and programmatically manipulate it.
* This won't work for existing prototype references, which are independent. However,
* it will work for prototypes subsequently obtained from the factory. Changes to
* interception will work immediately on singletons (including existing references).
* However, to change interfaces or target it's necessary to obtain a new instance
* from the factory. This means that singleton instances obtained from the factory
* do not have the same object identity. However, they do have the same interceptors
* and target, and changing any reference will change all objects.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see #setInterceptorNames
* @see #setProxyInterfaces
* @see org.aopalliance.intercept.MethodInterceptor
* @see org.springframework.aop.Advisor
* @see Advised
*/
public class ProxyFactoryBean extends ProxyCreatorSupport
implements FactoryBean, BeanClassLoaderAware, BeanFactoryAware {
/**
* This suffix in a value in an interceptor list indicates to expand globals.
*/
public static final String GLOBAL_SUFFIX = "*";
protected final Log logger = LogFactory.getLog(getClass());
private String[] interceptorNames;
private String targetName;
private boolean autodetectInterfaces = true;
private boolean singleton = true;
private AdvisorAdapterRegistry advisorAdapterRegistry = GlobalAdvisorAdapterRegistry.getInstance();
/**
* Indicates whether the proxy should be frozen before creation.
*/
private boolean freezeProxy = false;
private ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
/**
* Owning bean factory, which cannot be changed after this
* object is initialized.
*/
private BeanFactory beanFactory;
/** Whether the advisor chain has already been initialized */
private boolean advisorChainInitialized = false;
/** If this is a singleton, the cached singleton proxy instance */
private Object singletonInstance;
/**
* Set the names of the interfaces we're proxying. If no interface
* is given, a CGLIB for the actual class will be created.
* <p>This is essentially equivalent to the "setInterfaces" method,
* but mirrors TransactionProxyFactoryBean's "setProxyInterfaces".
* @see #setInterfaces
* @see AbstractSingletonProxyFactoryBean#setProxyInterfaces
*/
public void setProxyInterfaces(Class[] proxyInterfaces) throws ClassNotFoundException {
setInterfaces(proxyInterfaces);
}
/**
* Set the list of Advice/Advisor bean names. This must always be set
* to use this factory bean in a bean factory.
* <p>The referenced beans should be of type Interceptor, Advisor or Advice
* The last entry in the list can be the name of any bean in the factory.
* If it's neither an Advice nor an Advisor, a new SingletonTargetSource
* is added to wrap it. Such a target bean cannot be used if the "target"
* or "targetSource" or "targetName" property is set, in which case the
* "interceptorNames" array must contain only Advice/Advisor bean names.
* @see org.aopalliance.intercept.MethodInterceptor
* @see org.springframework.aop.Advisor
* @see org.aopalliance.aop.Advice
* @see org.springframework.aop.target.SingletonTargetSource
*/
public void setInterceptorNames(String[] interceptorNames) {
this.interceptorNames = interceptorNames;
}
/**
* Set the name of the target bean. This is an alternative to specifying
* the target name at the end of the "interceptorNames" array.
* <p>You can also specify a target object or a TargetSource object
* directly, via the "target"/"targetSource" property, respectively.
* @see #setInterceptorNames(String[])
* @see #setTarget(Object)
* @see #setTargetSource(org.springframework.aop.TargetSource)
*/
public void setTargetName(String targetName) {
this.targetName = targetName;
}
/**
* Set whether to autodetect proxy interfaces if none specified.
* <p>Default is "true". Turn this flag off to create a CGLIB
* proxy for the full target class if no interfaces specified.
* @see #setProxyTargetClass
*/
public void setAutodetectInterfaces(boolean autodetectInterfaces) {
this.autodetectInterfaces = autodetectInterfaces;
}
/**
* Set the value of the singleton property. Governs whether this factory
* should always return the same proxy instance (which implies the same target)
* or whether it should return a new prototype instance, which implies that
* the target and interceptors may be new instances also, if they are obtained
* from prototype bean definitions. This allows for fine control of
* independence/uniqueness in the object graph.
*/
public void setSingleton(boolean singleton) {
this.singleton = singleton;
}
/**
* Specify the AdvisorAdapterRegistry to use.
* Default is the global AdvisorAdapterRegistry.
* @see org.springframework.aop.framework.adapter.GlobalAdvisorAdapterRegistry
*/
public void setAdvisorAdapterRegistry(AdvisorAdapterRegistry advisorAdapterRegistry) {
this.advisorAdapterRegistry = advisorAdapterRegistry;
}
public void setFrozen(boolean frozen) {
this.freezeProxy = frozen;
}
public void setBeanClassLoader(ClassLoader classLoader) {
this.beanClassLoader = classLoader;
}
public void setBeanFactory(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
checkInterceptorNames();
}
/**
* Return a proxy. Invoked when clients obtain beans from this factory bean.
* Create an instance of the AOP proxy to be returned by this factory.
* The instance will be cached for a singleton, and create on each call to
* <code>getObject()</code> for a proxy.
* @return a fresh AOP proxy reflecting the current state of this factory
*/
public Object getObject() throws BeansException {
initializeAdvisorChain();
if (isSingleton()) {
return getSingletonInstance();
}
else {
if (this.targetName == null) {
logger.warn("Using non-singleton proxies with singleton targets is often undesirable." +
"Enable prototype proxies by setting the 'targetName' property.");
}
return newPrototypeInstance();
}
}
/**
* Return the type of the proxy. Will check the singleton instance if
* already created, else fall back to the proxy interface (in case of just
* a single one), the target bean type, or the TargetSource's target class.
* @see org.springframework.aop.TargetSource#getTargetClass
*/
public Class getObjectType() {
synchronized (this) {
if (this.singletonInstance != null) {
return this.singletonInstance.getClass();
}
}
Class[] ifcs = getProxiedInterfaces();
if (ifcs.length == 1) {
return ifcs[0];
}
else if (ifcs.length > 1) {
return createCompositeInterface(ifcs);
}
else if (this.targetName != null && this.beanFactory != null) {
return this.beanFactory.getType(this.targetName);
}
else {
return getTargetClass();
}
}
public boolean isSingleton() {
return this.singleton;
}
/**
* Create a composite interface Class for the given interfaces,
* implementing the given interfaces in one single Class.
* <p>The default implementation builds a JDK proxy class for the
* given interfaces.
* @param interfaces the interfaces to merge
* @return the merged interface as Class
* @see java.lang.reflect.Proxy#getProxyClass
*/
protected Class createCompositeInterface(Class[] interfaces) {
return ClassUtils.createCompositeInterface(interfaces, this.beanClassLoader);
}
/**
* Return the singleton instance of this class's proxy object,
* lazily creating it if it hasn't been created already.
* @return the shared singleton proxy
*/
private synchronized Object getSingletonInstance() {
if (this.singletonInstance == null) {
this.targetSource = freshTargetSource();
if (this.autodetectInterfaces && getProxiedInterfaces().length == 0 && !isProxyTargetClass()) {
// Rely on AOP infrastructure to tell us what interfaces to proxy.
Class targetClass = getTargetClass();
if (targetClass == null) {
throw new FactoryBeanNotInitializedException("Cannot determine target class for proxy at this point");
}
setInterfaces(ClassUtils.getAllInterfacesForClass(targetClass));
}
// Initialize the shared singleton instance.
super.setFrozen(this.freezeProxy);
this.singletonInstance = getProxy(createAopProxy());
}
return this.singletonInstance;
}
/**
* Create a new prototype instance of this class's created proxy object,
* backed by an independent AdvisedSupport configuration.
* @return a totally independent proxy, whose advice we may manipulate in isolation
*/
private synchronized Object newPrototypeInstance() {
// In the case of a prototype, we need to give the proxy
// an independent instance of the configuration.
// In this case, no proxy will have an instance of this object's configuration,
// but will have an independent copy.
if (logger.isTraceEnabled()) {
logger.trace("Creating copy of prototype ProxyFactoryBean config: " + this);
}
ProxyCreatorSupport copy = new ProxyCreatorSupport(getAopProxyFactory());
// The copy needs a fresh advisor chain, and a fresh TargetSource.
TargetSource targetSource = freshTargetSource();
copy.copyConfigurationFrom(this, targetSource, freshAdvisorChain());
if (this.autodetectInterfaces && getProxiedInterfaces().length == 0 && !isProxyTargetClass()) {
// Rely on AOP infrastructure to tell us what interfaces to proxy.
copy.setInterfaces(ClassUtils.getAllInterfacesForClass(targetSource.getTargetClass()));
}
copy.setFrozen(this.freezeProxy);
if (logger.isTraceEnabled()) {
logger.trace("Using ProxyCreatorSupport copy: " + copy);
}
return getProxy(copy.createAopProxy());
}
/**
* Return the proxy object to expose.
* <p>The default implementation uses a <code>getProxy</code> call with
* the factory's bean class loader. Can be overridden to specify a
* custom class loader.
* @param aopProxy the prepared AopProxy instance to get the proxy from
* @return the proxy object to expose
* @see AopProxy#getProxy(ClassLoader)
*/
protected Object getProxy(AopProxy aopProxy) {
return aopProxy.getProxy(this.beanClassLoader);
}
/**
* Check the interceptorNames list whether it contains a target name as final element.
* If found, remove the final name from the list and set it as targetName.
*/
private void checkInterceptorNames() {
if (!ObjectUtils.isEmpty(this.interceptorNames)) {
String finalName = this.interceptorNames[this.interceptorNames.length - 1];
if (this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) {
// The last name in the chain may be an Advisor/Advice or a target/TargetSource.
// Unfortunately we don't know; we must look at type of the bean.
if (!finalName.endsWith(GLOBAL_SUFFIX) && !isNamedBeanAnAdvisorOrAdvice(finalName)) {
// Must be an interceptor.
this.targetName = finalName;
if (logger.isDebugEnabled()) {
logger.debug("Bean with name '" + finalName + "' concluding interceptor chain " +
"is not an advisor class: treating it as a target or TargetSource");
}
String[] newNames = new String[this.interceptorNames.length - 1];
System.arraycopy(this.interceptorNames, 0, newNames, 0, newNames.length);
this.interceptorNames = newNames;
}
}
}
}
/**
* Look at bean factory metadata to work out whether this bean name,
* which concludes the interceptorNames list, is an Advisor or Advice,
* or may be a target.
* @param beanName bean name to check
* @return true if it's an Advisor or Advice
*/
private boolean isNamedBeanAnAdvisorOrAdvice(String beanName) {
Class namedBeanClass = this.beanFactory.getType(beanName);
if (namedBeanClass != null) {
return Advisor.class.isAssignableFrom(namedBeanClass) ||
Advice.class.isAssignableFrom(namedBeanClass);
}
// Treat it as an Advisor if we can't tell.
return true;
}
/**
* Create the advisor (interceptor) chain. Aadvisors that are sourced
* from a BeanFactory will be refreshed each time a new prototype instance
* is added. Interceptors added programmatically through the factory API
* are unaffected by such changes.
*/
private synchronized void initializeAdvisorChain() throws AopConfigException, BeansException {
if (this.advisorChainInitialized) {
return;
}
if (!ObjectUtils.isEmpty(this.interceptorNames)) {
// Globals can't be last unless we specified a targetSource using the property...
if (this.interceptorNames[this.interceptorNames.length - 1].endsWith(GLOBAL_SUFFIX) &&
this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) {
throw new AopConfigException("Target required after globals");
}
// Materialize interceptor chain from bean names.
for (int i = 0; i < this.interceptorNames.length; i++) {
String name = this.interceptorNames[i];
if (logger.isTraceEnabled()) {
logger.trace("Configuring advisor or advice '" + name + "'");
}
if (name.endsWith(GLOBAL_SUFFIX)) {
if (!(this.beanFactory instanceof ListableBeanFactory)) {
throw new AopConfigException(
"Can only use global advisors or interceptors with a ListableBeanFactory");
}
addGlobalAdvisor((ListableBeanFactory) this.beanFactory,
name.substring(0, name.length() - GLOBAL_SUFFIX.length()));
}
else {
// If we get here, we need to add a named interceptor.
// We must check if it's a singleton or prototype.
Object advice = null;
if (this.singleton || this.beanFactory.isSingleton(this.interceptorNames[i])) {
// Add the real Advisor/Advice to the chain.
advice = this.beanFactory.getBean(this.interceptorNames[i]);
}
else {
// It's a prototype Advice or Advisor: replace with a prototype.
// Avoid unnecessary creation of prototype bean just for advisor chain initialization.
advice = new PrototypePlaceholderAdvisor(interceptorNames[i]);
}
addAdvisorOnChainCreation(advice, this.interceptorNames[i]);
}
}
}
this.advisorChainInitialized = true;
}
/**
* Return an independent advisor chain.
* We need to do this every time a new prototype instance is returned,
* to return distinct instances of prototype Advisors and Advices.
*/
private List freshAdvisorChain() {
Advisor[] advisors = getAdvisors();
List freshAdvisors = new ArrayList(advisors.length);
for (int i = 0; i < advisors.length; i++) {
if (advisors[i] instanceof PrototypePlaceholderAdvisor) {
PrototypePlaceholderAdvisor pa = (PrototypePlaceholderAdvisor) advisors[i];
if (logger.isDebugEnabled()) {
logger.debug("Refreshing bean named '" + pa.getBeanName() + "'");
}
// Replace the placeholder with a fresh prototype instance resulting
// from a getBean() lookup
Object bean = this.beanFactory.getBean(pa.getBeanName());
Advisor refreshedAdvisor = namedBeanToAdvisor(bean);
freshAdvisors.add(refreshedAdvisor);
}
else {
// Add the shared instance.
freshAdvisors.add(advisors[i]);
}
}
return freshAdvisors;
}
/**
* Add all global interceptors and pointcuts.
*/
private void addGlobalAdvisor(ListableBeanFactory beanFactory, String prefix) {
String[] globalAdvisorNames =
BeanFactoryUtils.beanNamesForTypeIncludingAncestors(beanFactory, Advisor.class);
String[] globalInterceptorNames =
BeanFactoryUtils.beanNamesForTypeIncludingAncestors(beanFactory, Interceptor.class);
List beans = new ArrayList(globalAdvisorNames.length + globalInterceptorNames.length);
Map names = new HashMap();
for (int i = 0; i < globalAdvisorNames.length; i++) {
String name = globalAdvisorNames[i];
Object bean = beanFactory.getBean(name);
beans.add(bean);
names.put(bean, name);
}
for (int i = 0; i < globalInterceptorNames.length; i++) {
String name = globalInterceptorNames[i];
Object bean = beanFactory.getBean(name);
beans.add(bean);
names.put(bean, name);
}
Collections.sort(beans, new OrderComparator());
for (Iterator it = beans.iterator(); it.hasNext();) {
Object bean = it.next();
String name = (String) names.get(bean);
if (name.startsWith(prefix)) {
addAdvisorOnChainCreation(bean, name);
}
}
}
/**
* Invoked when advice chain is created.
* <p>Add the given advice, advisor or object to the interceptor list.
* Because of these three possibilities, we can't type the signature
* more strongly.
* @param next advice, advisor or target object
* @param name bean name from which we obtained this object in our owning
* bean factory
*/
private void addAdvisorOnChainCreation(Object next, String name) {
// We need to convert to an Advisor if necessary so that our source reference
// matches what we find from superclass interceptors.
Advisor advisor = namedBeanToAdvisor(next);
if (logger.isTraceEnabled()) {
logger.trace("Adding advisor with name '" + name + "'");
}
addAdvisor((Advisor) advisor);
}
/**
* Return a TargetSource to use when creating a proxy. If the target was not
* specified at the end of the interceptorNames list, the TargetSource will be
* this class's TargetSource member. Otherwise, we get the target bean and wrap
* it in a TargetSource if necessary.
*/
private TargetSource freshTargetSource() {
if (this.targetName == null) {
if (logger.isTraceEnabled()) {
logger.trace("Not refreshing target: Bean name not specified in 'interceptorNames'.");
}
return this.targetSource;
}
else {
if (logger.isDebugEnabled()) {
logger.debug("Refreshing target with name '" + this.targetName + "'");
}
Object target = this.beanFactory.getBean(this.targetName);
return (target instanceof TargetSource ? (TargetSource) target : new SingletonTargetSource(target));
}
}
/**
* Convert the following object sourced from calling getBean() on a name in the
* interceptorNames array to an Advisor or TargetSource.
*/
private Advisor namedBeanToAdvisor(Object next) {
try {
return this.advisorAdapterRegistry.wrap(next);
}
catch (UnknownAdviceTypeException ex) {
// We expected this to be an Advisor or Advice,
// but it wasn't. This is a configuration error.
throw new AopConfigException("Unknown advisor type " + next.getClass() +
"; Can only include Advisor or Advice type beans in interceptorNames chain except for last entry," +
"which may also be target or TargetSource", ex);
}
}
/**
* Blow away and recache singleton on an advice change.
*/
protected void adviceChanged() {
super.adviceChanged();
if (this.singleton) {
logger.debug("Advice has changed; recaching singleton instance");
synchronized (this) {
this.singletonInstance = null;
}
}
}
/**
* Used in the interceptor chain where we need to replace a bean with a prototype
* on creating a proxy.
*/
private static class PrototypePlaceholderAdvisor implements Advisor {
private final String beanName;
private final String message;
public PrototypePlaceholderAdvisor(String beanName) {
this.beanName = beanName;
this.message = "Placeholder for prototype Advisor/Advice with bean name '" + beanName + "'";
}
public String getBeanName() {
return beanName;
}
public Advice getAdvice() {
throw new UnsupportedOperationException("Cannot invoke methods: " + this.message);
}
public boolean isPerInstance() {
throw new UnsupportedOperationException("Cannot invoke methods: " + this.message);
}
public String toString() {
return this.message;
}
}
}
|
refined exception message
git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@14326 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
|
src/org/springframework/aop/framework/ProxyFactoryBean.java
|
refined exception message
|
|
Java
|
apache-2.0
|
4757b0be1485801eb7b144b658d5b60344f4b534
| 0
|
mattwolf1234/NoFragmentTabs
|
/**
Copyright 2016 mattwolf1234
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**/
package com.beck.matthew.customtabs;
public enum PagerEnum {
/*
Place your new tabs that you create here, title only matters if you use it.
Remember to place a comma between items
*/
TAB1("tab1", R.layout.tab1_layout),
TAB2("tab2", R.layout.tab2_layout);
private String mTitle;
private int mLayoutResId;
PagerEnum(String title, int layoutResId) {
mTitle = title;
mLayoutResId = layoutResId;
}
public String getTitle() {
return mTitle;
}
public int getLayoutResId() {
return mLayoutResId;
}
public static int length() {
return values().length;
}
}
|
app/src/main/java/com/beck/matthew/customtabs/PagerEnum.java
|
package com.beck.matthew.customtabs;
public enum PagerEnum {
/*
Place your new tabs that you create here, title only matters if you use it.
Remember to place a comma between items
*/
TAB1("tab1", R.layout.tab1_layout),
TAB2("tab2", R.layout.tab2_layout);
private String mTitle;
private int mLayoutResId;
PagerEnum(String title, int layoutResId) {
mTitle = title;
mLayoutResId = layoutResId;
}
public String getTitle() {
return mTitle;
}
public int getLayoutResId() {
return mLayoutResId;
}
public static int length() {
return values().length;
}
}
|
Update PagerEnum.java
|
app/src/main/java/com/beck/matthew/customtabs/PagerEnum.java
|
Update PagerEnum.java
|
|
Java
|
apache-2.0
|
7a2bfe5931005217ca93f6c134203a8e8addc998
| 0
|
vshkl/TransLate2
|
package by.vshkl.translate2.util;
import android.content.Context;
import android.support.v4.content.ContextCompat;
import android.text.InputType;
import com.afollestad.materialdialogs.MaterialDialog;
import by.vshkl.translate2.R;
import by.vshkl.translate2.ui.StopBookmarkListener;
import permissions.dispatcher.PermissionRequest;
public class DialogUtils {
public static void showLocationRationaleDialog(final Context context, final PermissionRequest request) {
new MaterialDialog.Builder(context)
.title(R.string.map_permission_rationale_title)
.content(R.string.map_permission_rationale_title)
.positiveText(R.string.map_location_ok)
.positiveText(R.string.map_location_cancel)
.onPositive((dialog, which) -> request.proceed())
.onNegative(((dialog, which) -> request.cancel()))
.show();
}
public static void showLocationTurnOnDialog(final Context context) {
new MaterialDialog.Builder(context)
.title(R.string.map_location_message)
.positiveText(R.string.map_location_ok)
.positiveText(R.string.map_location_cancel)
.onPositive((dialog, which) -> Navigation.navigateToLocationSettings(context))
.show();
}
public static void showBookmarkActionsDialog(final Context context, final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.items(R.array.bookmarks_dialog_actions)
.itemsColorRes(R.color.colorPrimaryText)
.itemsCallback((dialog, itemView, position, text) -> {
switch (position) {
case 0:
listener.onEditBookmark();
break;
case 1:
listener.onDeleteBookmark();
break;
}
})
.show();
}
public static void showBookmarkDeleteConfirmationDialog(final Context context, final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.title(R.string.bookmark_delete_message)
.positiveText(R.string.bookmark_delete_ok)
.negativeText(R.string.bookmark_delete_cancel)
.onPositive((dialog, which) -> listener.onDeleteConfirmed())
.show();
}
public static void shoeBookmarkRenameDialog(final Context context, final String stopName,
final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.title(R.string.bookmark_dialog_rename_title)
.positiveText(R.string.bookmark_dialog_rename_ok)
.negativeText(R.string.bookmark_dialog_rename_cancel)
.inputType(InputType.TYPE_CLASS_TEXT)
.input(context.getString(R.string.bookmark_dialog_rename_hint), stopName, false,
(dialog, input) -> {
})
.onPositive(
(dialog, which) -> listener.OnRenameConfirmed(dialog.getInputEditText().getText().toString()))
.show();
}
}
|
app/src/main/java/by/vshkl/translate2/util/DialogUtils.java
|
package by.vshkl.translate2.util;
import android.content.Context;
import android.text.InputType;
import com.afollestad.materialdialogs.MaterialDialog;
import by.vshkl.translate2.R;
import by.vshkl.translate2.ui.StopBookmarkListener;
import permissions.dispatcher.PermissionRequest;
public class DialogUtils {
public static void showLocationRationaleDialog(final Context context, final PermissionRequest request) {
new MaterialDialog.Builder(context)
.title(R.string.map_permission_rationale_title)
.content(R.string.map_permission_rationale_title)
.positiveText(R.string.map_location_ok)
.positiveText(R.string.map_location_cancel)
.onPositive((dialog, which) -> request.proceed())
.onNegative(((dialog, which) -> request.cancel()))
.show();
}
public static void showLocationTurnOnDialog(final Context context) {
new MaterialDialog.Builder(context)
.title(R.string.map_location_message)
.positiveText(R.string.map_location_ok)
.positiveText(R.string.map_location_cancel)
.onPositive((dialog, which) -> Navigation.navigateToLocationSettings(context))
.show();
}
public static void showBookmarkActionsDialog(final Context context, final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.items(R.array.bookmarks_dialog_actions)
.itemsCallback((dialog, itemView, position, text) -> {
switch (position) {
case 0:
listener.onEditBookmark();
break;
case 1:
listener.onDeleteBookmark();
break;
}
})
.show();
}
public static void showBookmarkDeleteConfirmationDialog(final Context context, final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.title(R.string.bookmark_delete_message)
.positiveText(R.string.bookmark_delete_ok)
.negativeText(R.string.bookmark_delete_cancel)
.onPositive((dialog, which) -> listener.onDeleteConfirmed())
.show();
}
public static void shoeBookmarkRenameDialog(final Context context, final String stopName,
final StopBookmarkListener listener) {
new MaterialDialog.Builder(context)
.title(R.string.bookmark_dialog_rename_title)
.positiveText(R.string.bookmark_dialog_rename_ok)
.negativeText(R.string.bookmark_dialog_rename_cancel)
.inputType(InputType.TYPE_CLASS_TEXT)
.input(context.getString(R.string.bookmark_dialog_rename_hint), stopName, false,
(dialog, input) -> {
})
.onPositive(
(dialog, which) -> listener.OnRenameConfirmed(dialog.getInputEditText().getText().toString()))
.show();
}
}
|
Change stop bookmark actions dialog text color
|
app/src/main/java/by/vshkl/translate2/util/DialogUtils.java
|
Change stop bookmark actions dialog text color
|
|
Java
|
apache-2.0
|
086c843113c5e06fde33a8485ec9c105d8320c86
| 0
|
MaximSysoev/sysoevm,MaximSysoev/sysoevm
|
package ru.job4j.inputouput;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class Args {
public static void main(String[] args) {
File file = new File("C:\\projects\\sysoevm\\");
byte[] buffer = new byte[1024];
try(
ZipOutputStream zout = new ZipOutputStream(new FileOutputStream("C:\\projects\\sysoevm\\project.zip"));
FileOutputStream fos = new FileOutputStream("C:\\projects\\sysoevm\\project.zip");
){
File[] files = file.listFiles();
for (int i = 0; i < file.listFiles().length; i++) {
FileInputStream fis = new FileInputStream(file.listFiles()[i]);
zout.putNextEntry(new ZipEntry(file.listFiles()[i].getName()));
int length;
while((length = fis.read(buffer)) > 0) {
zout.write(buffer, 0, length);
}
fis.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
chapter_004/src/main/java/ru/job4j/InputOuput/Args.java
|
package ru.job4j.inputouput;
public class Args {
}
|
task #861
|
chapter_004/src/main/java/ru/job4j/InputOuput/Args.java
|
task #861
|
|
Java
|
apache-2.0
|
6995a533d4b2ab8dc8dc2c05ffb132caa321950b
| 0
|
lpy19930103/AndroidPedometer
|
package com.lipy.step.pedometer;
import com.lipy.step.common.BaseApplication;
import com.lipy.step.common.PedometerEvent;
import com.lipy.step.dao.PedometerEntity;
import com.lipy.step.dao.PedometerEntityDao;
import com.lipy.step.result.IGetPedometerResult;
import com.lipy.step.result.PedometerUpDateListener;
import com.lipy.step.utils.TimeUtil;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.text.TextUtils;
import android.util.Log;
import java.util.List;
/**
* 计步器实现类
* Created by lipy on 2017/4/10 0010.
*/
public class PedometerRepositoryIml implements SensorEventListener, PedometerRepository {
private Context mContext;
private static int FIRST_STEP_COUNT = 0; //每天打开软件记录的初始步数,用于带count传感器算法的方式
private static int CURRENT_STEP = 0; //当天的记步数总数
private PedometerUpDateListener mPedometerUpDateListener;
private PedometerEntity mTodayStepEntity;
private static int TODAY_ENTITY_STEPS;
private List<PedometerEntity> mPedometerEntities;
private PedometerEntityDao mPedometerEntityDao;
private PedometerEntity mYesterdayPedometerEntity;
private int allStep = 0;
private int next = 0;
private boolean nextB = true;
public PedometerRepositoryIml() {
mContext = BaseApplication.getAppContext();
Log.e("lipy", "mContext:" + mContext);
}
/**
* 初始化传感器相关数据
*/
public void initData() {
FIRST_STEP_COUNT = 0;
CURRENT_STEP = 0;
TODAY_ENTITY_STEPS = 0;
}
public void setPedometerUpDateListener(PedometerUpDateListener upDateListener) {
mPedometerUpDateListener = upDateListener;
}
@Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
if (sensor == null) {
return;
}
synchronized (this) {
if (mPedometerEntityDao == null) {
BaseApplication.getInstances().setDatabase(mContext);
mPedometerEntityDao = BaseApplication.getInstances().getDaoSession().getPedometerEntityDao();
}
if (sensor.getType() == Sensor.TYPE_STEP_COUNTER) {
allStep = (int) event.values[0];
CURRENT_STEP = allStep - next;
Log.e("lipy", "**************************Date =" + TimeUtil.getStringDateShort());
Log.e("lipy", "FIRST_STEP_COUNT = " + allStep);
Log.e("lipy", "next = " + next);
Log.e("lipy", "CURRENT_STEP = " + CURRENT_STEP);
mPedometerEntities = mPedometerEntityDao.loadAll();
if (mPedometerEntities != null && mPedometerEntities.size() > 0) {
PedometerEntity pedometerEntity = mPedometerEntities.get(mPedometerEntities.size() - 1);
String date = pedometerEntity.getDate();
if (!TextUtils.isEmpty(date)) {
if (TimeUtil.IsToday(date)) {
mTodayStepEntity = pedometerEntity;
Log.e("lipy", "还是今天 复用今天的Entity");
} else if (TimeUtil.IsYesterday(date) && mPedometerEntities.size() > 1) {
Log.e("lipy", "是昨天 创建新的Entity ");
if (allStep > mPedometerEntities.get(mPedometerEntities.size() - 2).getTotalSteps()) {
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false);
} else {
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep +
mPedometerEntities.get(mPedometerEntities.size() - 2).getTotalSteps(), 0, false);
}
mPedometerEntityDao.insert(mTodayStepEntity);
}
}
} else {
mPedometerEntityDao.insert(new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false));
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false);
mPedometerEntityDao.insert(mTodayStepEntity);
Log.e("lipy", "第一次安装创建2个Entity= " + mPedometerEntityDao.loadAll().size());
}
mPedometerEntities = mPedometerEntityDao.loadAll();
if (mPedometerEntities.size() > 1 && !TimeUtil.IsYesterday(mTodayStepEntity.getDate())) {
if (mYesterdayPedometerEntity == null) {
mYesterdayPedometerEntity = mPedometerEntities.get(mPedometerEntities.size() - 2);
}
int dailyStep;
if (allStep < mYesterdayPedometerEntity.getTotalSteps()) {//当前系统步数 < 昨日记录总步数 判断为重启手机
mTodayStepEntity.setRestart(true);
allStep += mYesterdayPedometerEntity.getTotalSteps();//纠正总步数
Log.e("lipy", "纠正总步数1 = " + allStep);
if (mTodayStepEntity.getTotalSteps() > allStep) {
if (next != 0 || CURRENT_STEP != 0) {
allStep = mTodayStepEntity.getTotalSteps() + CURRENT_STEP;
} else {
allStep += mTodayStepEntity.getTotalSteps();
}
Log.e("lipy", "纠正总步数2 = " + allStep);
}
} else if (mTodayStepEntity.getRestart()) {//当前是重启状态
if (next != 0 || CURRENT_STEP != 0) {
allStep = mTodayStepEntity.getTotalSteps() + CURRENT_STEP;
} else {
allStep += mTodayStepEntity.getTotalSteps();
}
Log.e("lipy", "关机状态 = " + mTodayStepEntity.getRestart());
}
dailyStep = allStep - mYesterdayPedometerEntity.getTotalSteps();//正常记步
mTodayStepEntity.setTotalSteps(allStep);
mTodayStepEntity.setDailyStep(dailyStep);
Log.e("lipy", "当日步数 = " + mTodayStepEntity.getDailyStep());
}
}
mPedometerEntityDao.update(mTodayStepEntity);
if (mPedometerUpDateListener != null) {
mPedometerUpDateListener.PedometerUpDate(mTodayStepEntity);
}
Log.e("lipy", "昨日总步数 " + mYesterdayPedometerEntity.getTotalSteps());
Log.e("lipy", "总步数 " + mTodayStepEntity.getTotalSteps());
Log.e("lipy", "统计天数 " + mPedometerEntityDao.loadAll().size());
next = (int) event.values[0];
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
/**
* 获取行走步数
*/
private void showSteps(int steps) {
if (steps < 0) {
return;
}
steps = steps + TODAY_ENTITY_STEPS;
mTodayStepEntity.setDailyStep(steps);
PedometerEvent event = new PedometerEvent();
event.mIsUpdate = true;
BaseApplication.postEvent(event);
}
@Override
public void getPedometerStep(IGetPedometerResult result) {
result.onSuccessGet(mTodayStepEntity);
}
public void onDestroy() {
mTodayStepEntity.setTagStep(0);
mPedometerEntityDao.update(mTodayStepEntity);
FIRST_STEP_COUNT = 0;
CURRENT_STEP = 0;
mTodayStepEntity = null;
}
}
|
app/src/main/java/com/lipy/step/pedometer/PedometerRepositoryIml.java
|
package com.lipy.step.pedometer;
import com.lipy.step.common.BaseApplication;
import com.lipy.step.common.PedometerEvent;
import com.lipy.step.dao.PedometerEntity;
import com.lipy.step.dao.PedometerEntityDao;
import com.lipy.step.result.IGetPedometerResult;
import com.lipy.step.result.PedometerUpDateListener;
import com.lipy.step.utils.TimeUtil;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.text.TextUtils;
import android.util.Log;
import java.util.List;
/**
* 计步器实现类
* Created by lipy on 2017/4/10 0010.
*/
public class PedometerRepositoryIml implements SensorEventListener, PedometerRepository {
private Context mContext;
private static int FIRST_STEP_COUNT = 0; //每天打开软件记录的初始步数,用于带count传感器算法的方式
private static int CURRENT_STEP = 0; //当天的记步数总数
private PedometerUpDateListener mPedometerUpDateListener;
private PedometerEntity mTodayStepEntity;
private static int TODAY_ENTITY_STEPS;
private List<PedometerEntity> mPedometerEntities;
private PedometerEntityDao mPedometerEntityDao;
private PedometerEntity mYesterdayPedometerEntity;
private int allStep = 0;
public PedometerRepositoryIml() {
mContext = BaseApplication.getAppContext();
Log.e("lipy", "mContext:" + mContext);
}
/**
* 初始化传感器相关数据
*/
public void initData() {
FIRST_STEP_COUNT = 0;
CURRENT_STEP = 0;
TODAY_ENTITY_STEPS = 0;
}
public void setPedometerUpDateListener(PedometerUpDateListener upDateListener) {
mPedometerUpDateListener = upDateListener;
}
@Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
if (sensor == null) {
return;
}
synchronized (this) {
if (mPedometerEntityDao == null) {
BaseApplication.getInstances().setDatabase(mContext);
mPedometerEntityDao = BaseApplication.getInstances().getDaoSession().getPedometerEntityDao();
}
if (sensor.getType() == Sensor.TYPE_STEP_COUNTER) {
allStep = (int) event.values[0];
Log.e("lipy", "**************************Date =" + TimeUtil.getStringDateShort());
Log.e("lipy", "FIRST_STEP_COUNT = " + allStep);
mPedometerEntities = mPedometerEntityDao.loadAll();
if (mPedometerEntities != null && mPedometerEntities.size() > 0) {
PedometerEntity pedometerEntity = mPedometerEntities.get(mPedometerEntities.size() - 1);
String date = pedometerEntity.getDate();
if (!TextUtils.isEmpty(date)) {
if (TimeUtil.IsToday(date)) {
mTodayStepEntity = pedometerEntity;
Log.e("lipy", "还是今天 复用今天的Entity");
} else if (TimeUtil.IsYesterday(date) && mPedometerEntities.size() > 1) {
Log.e("lipy", "是昨天 创建新的Entity ");
if (allStep > mPedometerEntities.get(mPedometerEntities.size() - 2).getTotalSteps()) {
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false);
} else {
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep +
mPedometerEntities.get(mPedometerEntities.size() - 2).getTotalSteps(), 0, false);
}
mPedometerEntityDao.insert(mTodayStepEntity);
}
}
} else {
mPedometerEntityDao.insert(new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false));
mTodayStepEntity = new PedometerEntity(null, TimeUtil.getStringDateShort(), 0, allStep, 0, false);
mPedometerEntityDao.insert(mTodayStepEntity);
Log.e("lipy", "第一次安装创建2个Entity= " + mPedometerEntityDao.loadAll().size());
}
mPedometerEntities = mPedometerEntityDao.loadAll();
if (mPedometerEntities.size() > 1 && !TimeUtil.IsYesterday(mTodayStepEntity.getDate())) {
if (mYesterdayPedometerEntity == null) {
mYesterdayPedometerEntity = mPedometerEntities.get(mPedometerEntities.size() - 2);
}
int dailyStep;
if (allStep < mYesterdayPedometerEntity.getTotalSteps()) {//当前系统步数 < 昨日记录总步数 判断为重启手机
mTodayStepEntity.setRestart(true);
allStep += mYesterdayPedometerEntity.getTotalSteps();//纠正总步数
Log.e("lipy", "纠正总步数1 = " + allStep);
if (mTodayStepEntity.getTotalSteps() > allStep) {
allStep += mTodayStepEntity.getTotalSteps();
if (mTodayStepEntity.getTagStep() == 0) {
mTodayStepEntity.setTagStep((int) event.values[0]);
} else {
allStep = allStep - mTodayStepEntity.getTagStep();
}
Log.e("lipy", "纠正总步数2 = " + allStep);
}
} else if (mTodayStepEntity.getRestart()) {//当前是重启状态
if (mTodayStepEntity.getTagStep() == 0) {
mTodayStepEntity.setTagStep((int) event.values[0]);
} else {
allStep = allStep - mTodayStepEntity.getTagStep();
}
allStep = mTodayStepEntity.getDailyStep() + mYesterdayPedometerEntity.getTotalSteps();
Log.e("lipy", "关机状态 = " + "0".equals(mTodayStepEntity.getRestart()));
}
dailyStep = allStep - mYesterdayPedometerEntity.getTotalSteps();//正常记步
mTodayStepEntity.setTotalSteps(allStep);
mTodayStepEntity.setDailyStep(dailyStep);
Log.e("lipy", "当日步数 = " + mTodayStepEntity.getDailyStep());
}
}
mPedometerEntityDao.update(mTodayStepEntity);
if (mPedometerUpDateListener != null) {
mPedometerUpDateListener.PedometerUpDate(mTodayStepEntity);
}
Log.e("lipy", "昨日总步数 " + mYesterdayPedometerEntity.getTotalSteps());
Log.e("lipy", "总步数 " + mTodayStepEntity.getTotalSteps());
Log.e("lipy", "统计天数 " + mPedometerEntityDao.loadAll().size());
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
/**
* 获取行走步数
*/
private void showSteps(int steps) {
if (steps < 0) {
return;
}
steps = steps + TODAY_ENTITY_STEPS;
// Log.i("lipy", "<PedometerRepositoryIml> showSteps TODAY_ENTITY_STEPS= " + TODAY_ENTITY_STEPS + " steps = " + steps);
// mTodayStepEntity.setDailyStep(steps);
PedometerEvent event = new PedometerEvent();
event.mIsUpdate = true;
BaseApplication.postEvent(event);
}
@Override
public void getPedometerStep(IGetPedometerResult result) {
result.onSuccessGet(mTodayStepEntity);
}
public void onDestroy() {
mTodayStepEntity.setTagStep(0);
mPedometerEntityDao.update(mTodayStepEntity);
FIRST_STEP_COUNT = 0;
mTodayStepEntity = null;
}
}
|
修改储存逻辑
|
app/src/main/java/com/lipy/step/pedometer/PedometerRepositoryIml.java
|
修改储存逻辑
|
|
Java
|
apache-2.0
|
1a7712e003114d15aafeac7c60595764c8c8a3ae
| 0
|
OpenHFT/Chronicle-Queue,OpenHFT/Chronicle-Queue
|
/*
* Copyright 2016-2020 chronicle.software
*
* https://chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.queue;
import net.openhft.chronicle.core.io.IORuntimeException;
import net.openhft.chronicle.wire.DocumentContext;
import net.openhft.chronicle.wire.MarshallableIn;
import net.openhft.chronicle.wire.ReadMarshallable;
import net.openhft.chronicle.wire.SourceContext;
import org.jetbrains.annotations.NotNull;
/**
* The component that facilitates sequentially reading data from a {@link ChronicleQueue}.
*
* <p><b>NOTE:</b> Tailers are NOT thread-safe, sharing a Tailer between threads will lead to errors and unpredictable behaviour.</p>
*/
public interface ExcerptTailer extends ExcerptCommon<ExcerptTailer>, MarshallableIn, SourceContext {
/**
* Returns the {@link DocumentContext } for this ExcerptTailer.
* <p>
* This is equivalent to {@link ExcerptTailer#readDocument(ReadMarshallable)} but without the use of a
* lambda expression.
* <p>
* This method is the ExcerptTailer equivalent of {@link net.openhft.chronicle.wire.WireIn#readingDocument()}
*
* @return the document context
*/
@Override
@NotNull
default DocumentContext readingDocument() {
return readingDocument(false);
}
/**
* Returns the {@link DocumentContext } for this ExcerptTailer.
* <p>
* This is equivalent to {@link ExcerptTailer#readDocument(ReadMarshallable)} but without the use of a
* lambda expression.
* <p>
* This method is the ExcerptTailer equivalent of {@link net.openhft.chronicle.wire.WireIn#readingDocument()}
*
* @param includeMetaData if the DocumentContext shall be meta data aware.
*
* @return the document context
*/
@NotNull
DocumentContext readingDocument(boolean includeMetaData);
/**
* Returns {@code true} if {@link #readingDocument()} would return a document which is present, in other words would return a DocumentContext where {@link net.openhft.chronicle.wire.DocumentContext#isPresent()} == {@code true}
*
* For the best performance you should only use this method once a message has been found by toStart() or readingDocument(). Otherwise {@link #readingDocument()} is called and then rolled back, which is not optimal.
* For the same reason, this method should also be avoided when reading documents backwards.
*
* WARNING : THIS METHOD CAN NOT BE RELIED UPON, IT WILL SOMETIME WILL RETURN FALSE WHEN THERE ARE MESSAGES,
* YOU SHOULD ONLY CALL IT PERIODICALLY AND NOT RELY TO MUCH UPON ITS RESULTS INSTEAD CALL READING DOCUMENT ! ( FOR THIS REASON - WE HAVE DECIDED TO DEPRECATE IT ! )
*
* @return if {@link #readingDocument()} would return a DocumentContext
* that provides excerpts to read.
*/
@Deprecated(/* remove in x.23* and make internal to StoreTailer as it is used by chronicle services - removed because sometimes it will report false when there are messages*/)
@Override
default boolean peekDocument() {
return true;
}
/**
* Returns the current index of this Trailer.
* <p>
* If this method is invoked within a {@code try (tailer.readingDocument(){ }} block, returns the index of
* the current reading document. Otherwise, returns the next index to read.
* <p>
* The index includes the cycle and the sequence number within that cycle.
*
* @return the current index of this Trailer
*
*/
@Override
long index();
/**
* Returns the current cycle for this Trailer.
* <p>
* Usually, each cycle will have its own unique data file to store excerpts.
*
* @return Returns the current cycle for this Trailer
*/
int cycle();
/**
* Tries to move the index for this Trailer to the provided {@code index}.
* <p>
* The index contains both the cycle number and sequence number within the cycle.
* <p>
* In order for the operation to succeed, the roll file, corresponding to
* the cycle number in the index, must be present and the roll file must
* contain the sequence number contained in the index.
* <p>
* If the index is not a valid index, the operation is undefined.
*
* @param index index to move to.
* @return if this is a valid index.
*/
boolean moveToIndex(long index);
/**
* Moves the index for this Trailer to the first existing excerpt in the queue.
*
* @return this ExcerptTrailer
*/
@NotNull
ExcerptTailer toStart();
/**
* Moves the index for this Trailer to the end of the queue.
* <p>
* If the direction() == FORWARD, this will be the index position corresponding to one more
* than the last entry. Otherwise, the index will be the last excerpt.
* <p>
* This is not atomic with the appenders, in other words if a cycle has been added in the
* current millisecond, toEnd() may not see it, This is because for performance reasons, the
* queue.lastCycle() is cached, as finding the last cycle is expensive, it requires asking the
* directory for the Files.list() so, this cache is only refreshed if the call toEnd() is in a
* new millisecond. Hence a whole milliseconds with of data could be added to the
* chronicle-queue that toEnd() won’t see. For appenders that that are using the same queue
* instance ( and with then same JVM ), they can be informed that the last cycle has
* changed, this will yield better results, but atomicity can still not be guaranteed.
*
* @return this ExcerptTailer
*/
@NotNull
ExcerptTailer toEnd();
/**
* Sets the {@code striding} property of this Trailer.
* <p>
* When striding is enabled AND direction is BACKWARD, skip to the entries easiest to find, doesn't need to be every entry.
*
* @param striding skip to the indexStride if that is easy, doesn't always happen.
*
* @return this ExcerptTailer
*/
ExcerptTailer striding(boolean striding);
/**
* Returns the striding property of this Trailer.
*
* @return the striding property of this Trailer
* @see #striding(boolean)
*/
boolean striding();
/**
* Sets the direction of this ExcerptTailer.
* <p>
* The direction determines the direction of movement upon reading an excerpt.
*
* @param direction which is either of NONE, FORWARD, BACKWARD
* @return this ExcerptTrailer
* @throws NullPointerException if the provide {@code direction} is {@code null}
*/
@NotNull
ExcerptTailer direction(@NotNull TailerDirection direction);
/**
* Returns the direction of this ExcerptTailer.
* <p>
* The direction determines the direction of movement upon reading an excerpt.
*
* @return the direction of this ExcerptTailer
*/
TailerDirection direction();
/**
* Winds this ExcerptTailer to after the last entry which wrote an entry to the queue.
*
* @param queue which was written to.
* @return this ExcerptTailer
*
* @throws IORuntimeException if the provided {@code queue} couldn't be wound to the last index.
* @throws NullPointerException if the provided {@code queue} is {@code null}
*/
@NotNull
ExcerptTailer afterLastWritten(ChronicleQueue queue);
/**
* Sets the Read After Replica Acknowledged property of this Trailer to the
* provided {@code readAfterReplicaAcknowledged}.
* <p>
* Enterprise Queue only: if replication enabled, setting this to true on a source queue ensures that
* this tailer will not read until at least one of the sinks has acknowledged receipt of the excerpt.
* This will block forever if no sinks acknowledge receipt.
*
* @param readAfterReplicaAcknowledged enable
*/
default void readAfterReplicaAcknowledged(boolean readAfterReplicaAcknowledged) {
}
/**
* Returns the Read After Replica Acknowledged property of this Trailer.
* <p>
* Enterprise Queue only: if replication enabled, setting this to true on a source queue ensures that
* this tailer will not read until at least one of the sinks has acknowledged receipt of the excerpt.
* This will block forever if no sinks acknowledge receipt.
*
* @return the Read After Replica Acknowledged property of this Trailer
*/
default boolean readAfterReplicaAcknowledged() {
return false;
}
/**
* Returns the {@link TailerState} of this Trailer.
*
* @return the {@link TailerState} of this Trailer
*/
@NotNull
TailerState state();
// Need to add this here until we can release Ring. https://github.com/ChronicleEnterprise/Chronicle-Ring/issues/12
@Override
default @NotNull ExcerptTailer disableThreadSafetyCheck(boolean disableThreadSafetyCheck) {
return this;
}
}
|
src/main/java/net/openhft/chronicle/queue/ExcerptTailer.java
|
/*
* Copyright 2016-2020 chronicle.software
*
* https://chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.queue;
import net.openhft.chronicle.core.io.IORuntimeException;
import net.openhft.chronicle.wire.DocumentContext;
import net.openhft.chronicle.wire.MarshallableIn;
import net.openhft.chronicle.wire.ReadMarshallable;
import net.openhft.chronicle.wire.SourceContext;
import org.jetbrains.annotations.NotNull;
/**
* The component that facilitates sequentially reading data from a {@link ChronicleQueue}.
*
* <p><b>NOTE:</b> Tailers are NOT thread-safe, sharing a Tailer between threads will lead to errors and unpredictable behaviour.</p>
*/
public interface ExcerptTailer extends ExcerptCommon<ExcerptTailer>, MarshallableIn, SourceContext {
/**
* Returns the {@link DocumentContext } for this ExcerptTailer.
* <p>
* This is equivalent to {@link ExcerptTailer#readDocument(ReadMarshallable)} but without the use of a
* lambda expression.
* <p>
* This method is the ExcerptTailer equivalent of {@link net.openhft.chronicle.wire.WireIn#readingDocument()}
*
* @return the document context
*/
@Override
@NotNull
default DocumentContext readingDocument() {
return readingDocument(false);
}
/**
* Returns the {@link DocumentContext } for this ExcerptTailer.
* <p>
* This is equivalent to {@link ExcerptTailer#readDocument(ReadMarshallable)} but without the use of a
* lambda expression.
* <p>
* This method is the ExcerptTailer equivalent of {@link net.openhft.chronicle.wire.WireIn#readingDocument()}
*
* @param includeMetaData if the DocumentContext shall be meta data aware.
*
* @return the document context
*/
@NotNull
DocumentContext readingDocument(boolean includeMetaData);
/**
* Returns {@code true} if {@link #readingDocument()} would return a document which is present, in other words would return a DocumentContext where {@link net.openhft.chronicle.wire.DocumentContext#isPresent()} == {@code true}
*
* For the best performance you should only use this method once a message has been found by toStart() or readingDocument(). Otherwise {@link #readingDocument()} is called and then rolled back, which is not optimal.
* For the same reason, this method should also be avoided when reading documents backwards.
*
* WARNING : THIS METHOD CAN NOT BE RELIED UPON, IT WILL SOMETIME WILL RETURN FALSE WHEN THERE ARE MESSAGES,
* YOU SHOULD ONLY CALL IT PERIODICALLY AND NOT RELY TO MUCH UPON ITS RESULTS INSTEAD CALL READING DOCUMENT ! ( FOR THIS REASON - WE HAVE DECIDED TO DEPRECATE IT ! )
*
* @return if {@link #readingDocument()} would return a DocumentContext
* that provides excerpts to read.
*/
@Deprecated(/* remove in x.23* and make internal to StoreTailer as it is used by chronicle services - removed because sometimes it will report false when there are messages*/)
@Override
default boolean peekDocument() {
return true;
}
/**
* Returns the current index of this Trailer.
* <p>
* If this method is invoked within a {@code try (tailer.readingDocument){ }} block, returns the index of
* the current reading document. Otherwise, returns the next index to read.
* <p>
* The index includes the cycle and the sequence number within that cycle.
*
* @return the current index of this Trailer
*
*/
@Override
long index();
/**
* Returns the current cycle for this Trailer.
* <p>
* Usually, each cycle will have its own unique data file to store excerpts.
*
* @return Returns the current cycle for this Trailer
*/
int cycle();
/**
* Tries to move the index for this Trailer to the provided {@code index}.
* <p>
* The index contains both the cycle number and sequence number within the cycle.
* <p>
* In order for the operation to succeed, the roll file, corresponding to
* the cycle number in the index, must be present and the roll file must
* contain the sequence number contained in the index.
* <p>
* If the index is not a valid index, the operation is undefined.
*
* @param index index to move to.
* @return if this is a valid index.
*/
boolean moveToIndex(long index);
/**
* Moves the index for this Trailer to the first existing excerpt in the queue.
*
* @return this ExcerptTrailer
*/
@NotNull
ExcerptTailer toStart();
/**
* Moves the index for this Trailer to the end of the queue.
* <p>
* If the direction() == FORWARD, this will be the index position corresponding to one more
* than the last entry. Otherwise, the index will be the last excerpt.
* <p>
* This is not atomic with the appenders, in other words if a cycle has been added in the
* current millisecond, toEnd() may not see it, This is because for performance reasons, the
* queue.lastCycle() is cached, as finding the last cycle is expensive, it requires asking the
* directory for the Files.list() so, this cache is only refreshed if the call toEnd() is in a
* new millisecond. Hence a whole milliseconds with of data could be added to the
* chronicle-queue that toEnd() won’t see. For appenders that that are using the same queue
* instance ( and with then same JVM ), they can be informed that the last cycle has
* changed, this will yield better results, but atomicity can still not be guaranteed.
*
* @return this ExcerptTailer
*/
@NotNull
ExcerptTailer toEnd();
/**
* Sets the {@code striding} property of this Trailer.
* <p>
* When striding is enabled AND direction is BACKWARD, skip to the entries easiest to find, doesn't need to be every entry.
*
* @param striding skip to the indexStride if that is easy, doesn't always happen.
*
* @return this ExcerptTailer
*/
ExcerptTailer striding(boolean striding);
/**
* Returns the striding property of this Trailer.
*
* @return the striding property of this Trailer
* @see #striding(boolean)
*/
boolean striding();
/**
* Sets the direction of this ExcerptTailer.
* <p>
* The direction determines the direction of movement upon reading an excerpt.
*
* @param direction which is either of NONE, FORWARD, BACKWARD
* @return this ExcerptTrailer
* @throws NullPointerException if the provide {@code direction} is {@code null}
*/
@NotNull
ExcerptTailer direction(@NotNull TailerDirection direction);
/**
* Returns the direction of this ExcerptTailer.
* <p>
* The direction determines the direction of movement upon reading an excerpt.
*
* @return the direction of this ExcerptTailer
*/
TailerDirection direction();
/**
* Winds this ExcerptTailer to after the last entry which wrote an entry to the queue.
*
* @param queue which was written to.
* @return this ExcerptTailer
*
* @throws IORuntimeException if the provided {@code queue} couldn't be wound to the last index.
* @throws NullPointerException if the provided {@code queue} is {@code null}
*/
@NotNull
ExcerptTailer afterLastWritten(ChronicleQueue queue);
/**
* Sets the Read After Replica Acknowledged property of this Trailer to the
* provided {@code readAfterReplicaAcknowledged}.
* <p>
* Enterprise Queue only: if replication enabled, setting this to true on a source queue ensures that
* this tailer will not read until at least one of the sinks has acknowledged receipt of the excerpt.
* This will block forever if no sinks acknowledge receipt.
*
* @param readAfterReplicaAcknowledged enable
*/
default void readAfterReplicaAcknowledged(boolean readAfterReplicaAcknowledged) {
}
/**
* Returns the Read After Replica Acknowledged property of this Trailer.
* <p>
* Enterprise Queue only: if replication enabled, setting this to true on a source queue ensures that
* this tailer will not read until at least one of the sinks has acknowledged receipt of the excerpt.
* This will block forever if no sinks acknowledge receipt.
*
* @return the Read After Replica Acknowledged property of this Trailer
*/
default boolean readAfterReplicaAcknowledged() {
return false;
}
/**
* Returns the {@link TailerState} of this Trailer.
*
* @return the {@link TailerState} of this Trailer
*/
@NotNull
TailerState state();
// Need to add this here until we can release Ring. https://github.com/ChronicleEnterprise/Chronicle-Ring/issues/12
@Override
default @NotNull ExcerptTailer disableThreadSafetyCheck(boolean disableThreadSafetyCheck) {
return this;
}
}
|
doco typo
|
src/main/java/net/openhft/chronicle/queue/ExcerptTailer.java
|
doco typo
|
|
Java
|
apache-2.0
|
84786947a61bab2ae564cd31f76d951c37cb1ed4
| 0
|
web-education/web-utils,web-education/web-utils
|
/*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.webutils.security;
import org.vertx.java.core.MultiMap;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public final class XSSUtils {
private XSSUtils() {}
private static final Pattern[] patterns = new Pattern[]{
Pattern.compile("<script>(.*?)</script>", Pattern.CASE_INSENSITIVE),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\'(.*?)\\\'", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\"(.*?)\\\"", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("<script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("</script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("<script(.*?)>", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("eval\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("expression\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("javascript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("vbscript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("on(click|context|mouse|dblclick|key|abort|error|before|hash|load|page|" +
"resize|scroll|unload|blur|change|focus|in|reset|se|submit|drag|drop|copy|cut|paste|" +
"after| before|can|end|duration|emp|p|seek|stall|sus|time|volume|waiting|message|open|touch|" +
"on|off|pop|show|storage|toggle|wheel)(.*?)=",
Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
};
public static MultiMap safeMultiMap(MultiMap m) {
for (String name : m.names()) {
List<String> values = m.getAll(name);
List<String> safeValues = new ArrayList<>();
if (values == null) continue;
for (String value: values) {
safeValues.add(stripXSS(value));
}
m.set(name, safeValues);
}
return m;
}
public static String stripXSS(String value) {
if (value != null) {
//value = ESAPI.encoder().canonicalize(value);
value = value.replaceAll("\0", "");
for (Pattern scriptPattern : patterns){
value = scriptPattern.matcher(value).replaceAll("");
}
}
return value;
}
}
|
src/main/java/fr/wseduc/webutils/security/XSSUtils.java
|
/*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.webutils.security;
import org.vertx.java.core.MultiMap;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public final class XSSUtils {
private XSSUtils() {}
private static final Pattern[] patterns = new Pattern[]{
Pattern.compile("<script>(.*?)</script>", Pattern.CASE_INSENSITIVE),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\'(.*?)\\\'", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\"(.*?)\\\"", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("<script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("</script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("<script(.*?)>", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("eval\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("expression\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("javascript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("vbscript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("onload(.*?)=", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("onmouse(.*?)=", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("onclick(.*?)=", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL)
};
public static MultiMap safeMultiMap(MultiMap m) {
for (String name : m.names()) {
List<String> values = m.getAll(name);
List<String> safeValues = new ArrayList<>();
if (values == null) continue;
for (String value: values) {
safeValues.add(stripXSS(value));
}
m.set(name, safeValues);
}
return m;
}
public static String stripXSS(String value) {
if (value != null) {
//value = ESAPI.encoder().canonicalize(value);
value = value.replaceAll("\0", "");
for (Pattern scriptPattern : patterns){
value = scriptPattern.matcher(value).replaceAll("");
}
}
return value;
}
}
|
improve patterns in xss utils
|
src/main/java/fr/wseduc/webutils/security/XSSUtils.java
|
improve patterns in xss utils
|
|
Java
|
apache-2.0
|
7e97c7157d4c5616c20853469bef7fbc621e24da
| 0
|
eolivelli/herddb,diennea/herddb,eolivelli/herddb,diennea/herddb,eolivelli/herddb,diennea/herddb,eolivelli/herddb,diennea/herddb
|
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package herddb.cluster;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.bookkeeper.bookie.Bookie;
import org.apache.bookkeeper.client.BookKeeperAdmin;
import org.apache.bookkeeper.meta.HierarchicalLedgerManagerFactory;
import org.apache.bookkeeper.proto.BookieServer;
import org.apache.bookkeeper.stats.CodahaleMetricsProvider;
import org.apache.bookkeeper.stats.StatsProvider;
import org.apache.bookkeeper.util.ReflectionUtils;
import herddb.network.netty.NetworkUtils;
import herddb.server.ServerConfiguration;
/**
* Utility for starting embedded Apache BookKeeper Server (Bookie)
*
* @author enrico.olivelli
*/
public class EmbeddedBookie implements AutoCloseable {
private final static Logger LOG = Logger.getLogger(EmbeddedBookie.class.getName());
private final ServerConfiguration configuration;
private final Path baseDirectory;
private BookieServer bookieServer;
private StatsProvider statsProvider;
public EmbeddedBookie(Path baseDirectory, ServerConfiguration configuration) {
this.configuration = configuration;
this.baseDirectory = baseDirectory;
}
public void start() throws Exception {
org.apache.bookkeeper.conf.ServerConfiguration conf = new org.apache.bookkeeper.conf.ServerConfiguration();
conf.setZkTimeout(configuration.getInt(ServerConfiguration.PROPERTY_ZOOKEEPER_SESSIONTIMEOUT, ServerConfiguration.PROPERTY_ZOOKEEPER_SESSIONTIMEOUT_DEFAULT));
conf.setZkServers(configuration.getString(ServerConfiguration.PROPERTY_ZOOKEEPER_ADDRESS, ServerConfiguration.PROPERTY_ZOOKEEPER_ADDRESS_DEFAULT));
conf.setStatisticsEnabled(true);
conf.setProperty("codahaleStatsJmxEndpoint", "Bookie");
conf.setStatsProviderClass(CodahaleMetricsProvider.class);
int port = configuration.getInt(ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT, ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT_DEFAULT);
conf.setUseHostNameAsBookieID(true);
Path bookie_dir = baseDirectory.resolve("bookie");
if (port <= 0) {
Integer _port = readLocalBookiePort(bookie_dir);
if (_port == null) {
_port = NetworkUtils.assignFirstFreePort();
LOG.log(Level.SEVERE, "As configuration parameter "
+ ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT + " is {0},I have choosen to listen on port {1}."
+ " Set to a positive number in order to use a fixed port", new Object[]{Integer.toString(port), Integer.toString(_port)});
persistLocalBookiePort(bookie_dir, _port);
}
port = _port;
}
conf.setBookiePort(port);
Files.createDirectories(bookie_dir);
Path bookie_data_dir = bookie_dir.resolve("bookie_data").toAbsolutePath();
Path bookie_journal_dir = bookie_dir.resolve("bookie_journal").toAbsolutePath();
Files.createDirectories(bookie_data_dir);
Files.createDirectories(bookie_journal_dir);
conf.setLedgerDirNames(new String[]{bookie_data_dir.toString()});
conf.setJournalDirName(bookie_journal_dir.toString());
conf.setFlushInterval(1000);
conf.setMaxBackupJournals(5);
conf.setMaxJournalSizeMB(1048);
conf.setMaxPendingReadRequestPerThread(10000); // new in 4.6
conf.setMaxPendingAddRequestPerThread(20000); // new in 4.6
conf.setEnableLocalTransport(true);
conf.setProperty("journalMaxGroupWaitMSec", 10L); // default 200ms
conf.setJournalFlushWhenQueueEmpty(true);
conf.setAutoRecoveryDaemonEnabled(false);
conf.setLedgerManagerFactoryClass(HierarchicalLedgerManagerFactory.class);
for (String key : configuration.keys()) {
if (key.startsWith("bookie.")) {
String bookieConf = key.substring("bookie.".length());
String value = configuration.getString(key, null);
conf.addProperty(bookieConf, value);
LOG.log(Level.CONFIG, "config {0} remapped to {1}={2}", new Object[]{key, bookieConf, value});
}
}
long _start = System.currentTimeMillis();
LOG.severe("Booting Apache Bookkeeper on port " + port);
Files.createDirectories(bookie_dir);
dumpBookieConfiguration(bookie_dir, conf);
boolean forcemetaformat = configuration.getBoolean("bookie.forcemetaformat", false);
LOG.log(Level.CONFIG, "bookie.forcemetaformat={0}", forcemetaformat);
org.apache.bookkeeper.conf.ClientConfiguration adminConf = new org.apache.bookkeeper.conf.ClientConfiguration(conf);
boolean result = BookKeeperAdmin.format(adminConf, false, forcemetaformat);
if (result) {
LOG.info("BookKeeperAdmin.format: created a new workspace on ZK");
} else {
LOG.info("BookKeeperAdmin.format: ZK space does not need an format operation");
}
boolean forceformat = configuration.getBoolean("bookie.forceformat", false);
LOG.log(Level.CONFIG, "bookie.forceformat={0}", forceformat);
if (forceformat) {
result = Bookie.format(conf, false, forceformat);
if (result) {
LOG.info("Bookie.format: formatter applied to local bookie");
} else {
LOG.info("Bookie.format: local boookie did not need formatting");
}
}
Class<? extends StatsProvider> statsProviderClass
= conf.getStatsProviderClass();
statsProvider = ReflectionUtils.newInstance(statsProviderClass);
statsProvider.start(conf);
bookieServer = new BookieServer(conf, statsProvider.getStatsLogger(""));
bookieServer.start();
for (int i = 0; i < 100; i++) {
if (bookieServer.getBookie().isRunning()) {
LOG.info("Apache Bookkeeper started");
break;
}
Thread.sleep(500);
}
long _stop = System.currentTimeMillis();
LOG.severe("Booting Apache Bookkeeper finished. Time " + (_stop - _start) + " ms");
}
private void dumpBookieConfiguration(Path bookie_dir, org.apache.bookkeeper.conf.ServerConfiguration conf) throws IOException {
// dump actual BookKeeper configuration in order to use bookkeeper shell
Path actual_bookkeeper_configuration = bookie_dir.resolve("embedded.bookie.properties");
StringBuilder builder = new StringBuilder();
for (Iterator<String> key_it = conf.getKeys(); key_it.hasNext();) {
String key = key_it.next() + "";
Object value = conf.getProperty(key + "");
builder.append(key + "=" + value + "\n");
}
Files.write(actual_bookkeeper_configuration, builder.toString().getBytes(StandardCharsets.UTF_8),
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
LOG.severe(
"Dumped actual Bookie configuration to " + actual_bookkeeper_configuration.toAbsolutePath());
}
@Override
public void close() {
if (bookieServer != null) {
LOG.info("Apache Bookkeeper stopping");
try {
bookieServer.shutdown();
bookieServer.join();
} catch (InterruptedException err) {
Thread.currentThread().interrupt();
} finally {
bookieServer = null;
}
}
if (statsProvider != null) {
statsProvider.stop();
}
}
public Integer readLocalBookiePort(Path dataPath) throws IOException {
Path file = dataPath.resolve("bookie_port");
try {
LOG.log(Level.SEVERE, "Looking for local port into file {0}", file);
if (!Files.isRegularFile(file)) {
LOG.log(Level.SEVERE, "Cannot find file {0}", file);
return null;
}
List<String> lines = Files.readAllLines(file, StandardCharsets.UTF_8);
for (String line : lines) {
line = line.trim().toLowerCase();
// skip comments and empty lines
if (line.startsWith("#") || line.isEmpty()) {
continue;
}
int res = Integer.parseInt(line);
LOG.log(Level.SEVERE, "Found local port {0} into file {1}", new Object[]{Integer.toString(res), file});
return res;
}
throw new IOException("Cannot find any valid line inside file " + file.toAbsolutePath());
} catch (IOException error) {
LOG.log(Level.SEVERE, "Error while reading file " + file.toAbsolutePath(), error);
throw error;
}
}
public void persistLocalBookiePort(Path dataPath, int port) throws IOException {
Files.createDirectories(dataPath);
Path file = dataPath.resolve("bookie_port");
StringBuilder message = new StringBuilder();
message.append("# This file contains the port of the bookie used by this node\n");
message.append("# Do not change the contents of this file, otherwise the beheaviour of the system will\n");
message.append("# lead eventually to data loss\n");
message.append("# \n");
message.append("# Any line which starts with '#' and and blank line will be ignored\n");
message.append("# The system will consider the first non-blank line as port\n");
message.append("\n\n");
message.append(port);
Files.write(file, message.toString().getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW);
}
}
|
herddb-core/src/main/java/herddb/cluster/EmbeddedBookie.java
|
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package herddb.cluster;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.bookkeeper.bookie.Bookie;
import org.apache.bookkeeper.client.BookKeeperAdmin;
import org.apache.bookkeeper.meta.HierarchicalLedgerManagerFactory;
import org.apache.bookkeeper.proto.BookieServer;
import org.apache.bookkeeper.stats.CodahaleMetricsProvider;
import org.apache.bookkeeper.stats.StatsProvider;
import org.apache.bookkeeper.util.ReflectionUtils;
import herddb.network.netty.NetworkUtils;
import herddb.server.ServerConfiguration;
/**
* Utility for starting embedded Apache BookKeeper Server (Bookie)
*
* @author enrico.olivelli
*/
public class EmbeddedBookie implements AutoCloseable {
private final static Logger LOG = Logger.getLogger(EmbeddedBookie.class.getName());
private final ServerConfiguration configuration;
private final Path baseDirectory;
private BookieServer bookieServer;
private StatsProvider statsProvider;
public EmbeddedBookie(Path baseDirectory, ServerConfiguration configuration) {
this.configuration = configuration;
this.baseDirectory = baseDirectory;
}
public void start() throws Exception {
org.apache.bookkeeper.conf.ServerConfiguration conf = new org.apache.bookkeeper.conf.ServerConfiguration();
conf.setZkTimeout(configuration.getInt(ServerConfiguration.PROPERTY_ZOOKEEPER_SESSIONTIMEOUT, ServerConfiguration.PROPERTY_ZOOKEEPER_SESSIONTIMEOUT_DEFAULT));
conf.setZkServers(configuration.getString(ServerConfiguration.PROPERTY_ZOOKEEPER_ADDRESS, ServerConfiguration.PROPERTY_ZOOKEEPER_ADDRESS_DEFAULT));
conf.setStatisticsEnabled(true);
conf.setProperty("codahaleStatsJmxEndpoint", "Bookie");
conf.setStatsProviderClass(CodahaleMetricsProvider.class);
int port = configuration.getInt(ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT, ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT_DEFAULT);
conf.setUseHostNameAsBookieID(true);
Path bookie_dir = baseDirectory.resolve("bookie");
if (port <= 0) {
Integer _port = readLocalBookiePort(bookie_dir);
if (_port == null) {
_port = NetworkUtils.assignFirstFreePort();
LOG.log(Level.SEVERE, "As configuration parameter "
+ ServerConfiguration.PROPERTY_BOOKKEEPER_BOOKIE_PORT + " is {0},I have choosen to listen on port {1}."
+ " Set to a positive number in order to use a fixed port", new Object[]{Integer.toString(port), Integer.toString(_port)});
persistLocalBookiePort(bookie_dir, _port);
}
port = _port;
}
conf.setBookiePort(port);
Files.createDirectories(bookie_dir);
Path bookie_data_dir = bookie_dir.resolve("bookie_data").toAbsolutePath();
Path bookie_journal_dir = bookie_dir.resolve("bookie_journal").toAbsolutePath();
Files.createDirectories(bookie_data_dir);
Files.createDirectories(bookie_journal_dir);
conf.setLedgerDirNames(new String[]{bookie_data_dir.toString()});
conf.setJournalDirName(bookie_journal_dir.toString());
conf.setFlushInterval(1000);
conf.setMaxBackupJournals(5);
conf.setMaxJournalSizeMB(1048);
conf.setEnableLocalTransport(true);
conf.setProperty("journalMaxGroupWaitMSec", 10L); // default 200ms
conf.setJournalFlushWhenQueueEmpty(true);
conf.setAutoRecoveryDaemonEnabled(false);
conf.setLedgerManagerFactoryClass(HierarchicalLedgerManagerFactory.class);
for (String key : configuration.keys()) {
if (key.startsWith("bookie.")) {
String bookieConf = key.substring("bookie.".length());
String value = configuration.getString(key, null);
conf.addProperty(bookieConf, value);
LOG.log(Level.CONFIG, "config {0} remapped to {1}={2}", new Object[]{key, bookieConf, value});
}
}
long _start = System.currentTimeMillis();
LOG.severe("Booting Apache Bookkeeper on port " + port);
Files.createDirectories(bookie_dir);
dumpBookieConfiguration(bookie_dir, conf);
boolean forcemetaformat = configuration.getBoolean("bookie.forcemetaformat", false);
LOG.log(Level.CONFIG, "bookie.forcemetaformat={0}", forcemetaformat);
org.apache.bookkeeper.conf.ClientConfiguration adminConf = new org.apache.bookkeeper.conf.ClientConfiguration(conf);
boolean result = BookKeeperAdmin.format(adminConf, false, forcemetaformat);
if (result) {
LOG.info("BookKeeperAdmin.format: created a new workspace on ZK");
} else {
LOG.info("BookKeeperAdmin.format: ZK space does not need an format operation");
}
boolean forceformat = configuration.getBoolean("bookie.forceformat", false);
LOG.log(Level.CONFIG, "bookie.forceformat={0}", forceformat);
if (forceformat) {
result = Bookie.format(conf, false, forceformat);
if (result) {
LOG.info("Bookie.format: formatter applied to local bookie");
} else {
LOG.info("Bookie.format: local boookie did not need formatting");
}
}
Class<? extends StatsProvider> statsProviderClass
= conf.getStatsProviderClass();
statsProvider = ReflectionUtils.newInstance(statsProviderClass);
statsProvider.start(conf);
bookieServer = new BookieServer(conf, statsProvider.getStatsLogger(""));
bookieServer.start();
for (int i = 0; i < 100; i++) {
if (bookieServer.getBookie().isRunning()) {
LOG.info("Apache Bookkeeper started");
break;
}
Thread.sleep(500);
}
long _stop = System.currentTimeMillis();
LOG.severe("Booting Apache Bookkeeper finished. Time " + (_stop - _start) + " ms");
}
private void dumpBookieConfiguration(Path bookie_dir, org.apache.bookkeeper.conf.ServerConfiguration conf) throws IOException {
// dump actual BookKeeper configuration in order to use bookkeeper shell
Path actual_bookkeeper_configuration = bookie_dir.resolve("embedded.bookie.properties");
StringBuilder builder = new StringBuilder();
for (Iterator<String> key_it = conf.getKeys(); key_it.hasNext();) {
String key = key_it.next() + "";
if (System.getProperty(key) == null) { // Bookkeeper 4.4 adds system properties to configuration
Object value = conf.getProperty(key + "");
builder.append(key + "=" + value + "\n");
}
}
Files.write(actual_bookkeeper_configuration, builder.toString().getBytes(StandardCharsets.UTF_8),
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
LOG.severe(
"Dumped actual Bookie configuration to " + actual_bookkeeper_configuration.toAbsolutePath());
}
@Override
public void close() {
if (bookieServer != null) {
LOG.info("Apache Bookkeeper stopping");
try {
bookieServer.shutdown();
bookieServer.join();
} catch (InterruptedException err) {
Thread.currentThread().interrupt();
} finally {
bookieServer = null;
}
}
if (statsProvider != null) {
statsProvider.stop();
}
}
public Integer readLocalBookiePort(Path dataPath) throws IOException {
Path file = dataPath.resolve("bookie_port");
try {
LOG.log(Level.SEVERE, "Looking for local port into file {0}", file);
if (!Files.isRegularFile(file)) {
LOG.log(Level.SEVERE, "Cannot find file {0}", file);
return null;
}
List<String> lines = Files.readAllLines(file, StandardCharsets.UTF_8);
for (String line : lines) {
line = line.trim().toLowerCase();
// skip comments and empty lines
if (line.startsWith("#") || line.isEmpty()) {
continue;
}
int res = Integer.parseInt(line);
LOG.log(Level.SEVERE, "Found local port {0} into file {1}", new Object[]{Integer.toString(res), file});
return res;
}
throw new IOException("Cannot find any valid line inside file " + file.toAbsolutePath());
} catch (IOException error) {
LOG.log(Level.SEVERE, "Error while reading file " + file.toAbsolutePath(), error);
throw error;
}
}
public void persistLocalBookiePort(Path dataPath, int port) throws IOException {
Files.createDirectories(dataPath);
Path file = dataPath.resolve("bookie_port");
StringBuilder message = new StringBuilder();
message.append("# This file contains the port of the bookie used by this node\n");
message.append("# Do not change the contents of this file, otherwise the beheaviour of the system will\n");
message.append("# lead eventually to data loss\n");
message.append("# \n");
message.append("# Any line which starts with '#' and and blank line will be ignored\n");
message.append("# The system will consider the first non-blank line as port\n");
message.append("\n\n");
message.append(port);
Files.write(file, message.toString().getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW);
}
}
|
upgrade to BookKeeper 4.6.0
|
herddb-core/src/main/java/herddb/cluster/EmbeddedBookie.java
|
upgrade to BookKeeper 4.6.0
|
|
Java
|
apache-2.0
|
c3cb98a116357593ad0f73d9ba99808d065f9942
| 0
|
ayne/Smack,chuangWu/Smack,opg7371/Smack,deeringc/Smack,TTalkIM/Smack,vito-c/Smack,igorexax3mal/Smack,lovely3x/Smack,hy9902/Smack,dpr-odoo/Smack,Flowdalic/Smack,igorexax3mal/Smack,magnetsystems/message-smack,ishan1604/Smack,Tibo-lg/Smack,vanitasvitae/Smack,cjpx00008/Smack,Flowdalic/Smack,Tibo-lg/Smack,TTalkIM/Smack,igniterealtime/Smack,esl/Smack,mar-v-in/Smack,kkroid/OnechatSmack,vanitasvitae/smack-omemo,opg7371/Smack,unisontech/Smack,magnetsystems/message-smack,vito-c/Smack,annovanvliet/Smack,lovely3x/Smack,ayne/Smack,ishan1604/Smack,kkroid/OnechatSmack,annovanvliet/Smack,mar-v-in/Smack,ishan1604/Smack,andrey42/Smack,unisontech/Smack,hy9902/Smack,qingsong-xu/Smack,magnetsystems/message-smack,igniterealtime/Smack,igniterealtime/Smack,esl/Smack,u20024804/Smack,Flowdalic/Smack,esl/Smack,chuangWu/Smack,igorexax3mal/Smack,vanitasvitae/Smack,qingsong-xu/Smack,lovely3x/Smack,qingsong-xu/Smack,cjpx00008/Smack,chuangWu/Smack,andrey42/Smack,opg7371/Smack,vanitasvitae/Smack,cjpx00008/Smack,dpr-odoo/Smack,ayne/Smack,vanitasvitae/smack-omemo,u20024804/Smack,annovanvliet/Smack,xuIcream/Smack,Tibo-lg/Smack,xuIcream/Smack,xuIcream/Smack,kkroid/OnechatSmack,u20024804/Smack,vanitasvitae/smack-omemo,mar-v-in/Smack,dpr-odoo/Smack,deeringc/Smack,hy9902/Smack,unisontech/Smack,deeringc/Smack,TTalkIM/Smack,andrey42/Smack
|
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack;
import org.jivesoftware.smack.packet.Session;
import org.jivesoftware.smack.proxy.ProxyInfo;
import org.jivesoftware.smack.util.DNSUtil;
import org.jivesoftware.smack.util.dns.HostAddress;
import javax.net.SocketFactory;
import javax.net.ssl.SSLContext;
import javax.security.auth.callback.CallbackHandler;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Configuration to use while establishing the connection to the server. It is possible to
* configure the path to the trustore file that keeps the trusted CA root certificates and
* enable or disable all or some of the checkings done while verifying server certificates.<p>
*
* It is also possible to configure if TLS, SASL, and compression are used or not.
*
* @author Gaston Dombiak
*/
public class ConnectionConfiguration implements Cloneable {
/**
* Hostname of the XMPP server. Usually servers use the same service name as the name
* of the server. However, there are some servers like google where host would be
* talk.google.com and the serviceName would be gmail.com.
*/
private String serviceName;
protected List<HostAddress> hostAddresses;
private String keystorePath;
private String keystoreType;
private String pkcs11Library;
private SSLContext customSSLContext;
private boolean compressionEnabled = false;
/**
* Used to get information from the user
*/
private CallbackHandler callbackHandler;
private boolean debuggerEnabled = SmackConfiguration.DEBUG_ENABLED;
// Flag that indicates if a reconnection should be attempted when abruptly disconnected
private boolean reconnectionAllowed = true;
// Holds the socket factory that is used to generate the socket in the connection
private SocketFactory socketFactory;
// Holds the authentication information for future reconnections
private String username;
private String password;
private String resource;
private boolean sendPresence = true;
private boolean rosterLoadedAtLogin = true;
private boolean legacySessionDisabled = false;
private boolean useDnsSrvRr = true;
private SecurityMode securityMode = SecurityMode.enabled;
/**
* Permanent store for the Roster, needed for roster versioning
*/
private RosterStore rosterStore;
// Holds the proxy information (such as proxyhost, proxyport, username, password etc)
protected ProxyInfo proxy;
/**
* Creates a new ConnectionConfiguration for the specified service name.
* A DNS SRV lookup will be performed to find out the actual host address
* and port to use for the connection.
*
* @param serviceName the name of the service provided by an XMPP server.
*/
public ConnectionConfiguration(String serviceName) {
init(serviceName, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration for the specified service name
* with specified proxy.
* A DNS SRV lookup will be performed to find out the actual host address
* and port to use for the connection.
*
* @param serviceName the name of the service provided by an XMPP server.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String serviceName,ProxyInfo proxy) {
init(serviceName, proxy);
}
/**
* Creates a new ConnectionConfiguration using the specified host, port and
* service name. This is useful for manually overriding the DNS SRV lookup
* process that's used with the {@link #ConnectionConfiguration(String)}
* constructor. For example, say that an XMPP server is running at localhost
* in an internal network on port 5222 but is configured to think that it's
* "example.com" for testing purposes. This constructor is necessary to connect
* to the server in that case since a DNS SRV lookup for example.com would not
* point to the local testing server.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param serviceName the name of the service provided by an XMPP server.
*/
public ConnectionConfiguration(String host, int port, String serviceName) {
initHostAddresses(host, port);
init(serviceName, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration using the specified host, port and
* service name. This is useful for manually overriding the DNS SRV lookup
* process that's used with the {@link #ConnectionConfiguration(String)}
* constructor. For example, say that an XMPP server is running at localhost
* in an internal network on port 5222 but is configured to think that it's
* "example.com" for testing purposes. This constructor is necessary to connect
* to the server in that case since a DNS SRV lookup for example.com would not
* point to the local testing server.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param serviceName the name of the service provided by an XMPP server.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String host, int port, String serviceName, ProxyInfo proxy) {
initHostAddresses(host, port);
init(serviceName, proxy);
}
/**
* Creates a new ConnectionConfiguration for a connection that will connect
* to the desired host and port.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
*/
public ConnectionConfiguration(String host, int port) {
initHostAddresses(host, port);
init(host, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration for a connection that will connect
* to the desired host and port with desired proxy.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String host, int port, ProxyInfo proxy) {
initHostAddresses(host, port);
init(host, proxy);
}
protected void init(String serviceName, ProxyInfo proxy) {
this.serviceName = serviceName;
this.proxy = proxy;
keystorePath = System.getProperty("javax.net.ssl.keyStore");
keystoreType = "jks";
pkcs11Library = "pkcs11.config";
//Setting the SocketFactory according to proxy supplied
socketFactory = proxy.getSocketFactory();
}
/**
* Sets the server name, also known as XMPP domain of the target server.
*
* @param serviceName the XMPP domain of the target server.
*/
void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* Returns the server name of the target server.
*
* @return the server name of the target server.
*/
public String getServiceName() {
return serviceName;
}
/**
* Returns the TLS security mode used when making the connection. By default,
* the mode is {@link SecurityMode#enabled}.
*
* @return the security mode.
*/
public SecurityMode getSecurityMode() {
return securityMode;
}
/**
* Sets the TLS security mode used when making the connection. By default,
* the mode is {@link SecurityMode#enabled}.
*
* @param securityMode the security mode.
*/
public void setSecurityMode(SecurityMode securityMode) {
this.securityMode = securityMode;
}
/**
* Retuns the path to the keystore file. The key store file contains the
* certificates that may be used to authenticate the client to the server,
* in the event the server requests or requires it.
*
* @return the path to the keystore file.
*/
public String getKeystorePath() {
return keystorePath;
}
/**
* Sets the path to the keystore file. The key store file contains the
* certificates that may be used to authenticate the client to the server,
* in the event the server requests or requires it.
*
* @param keystorePath the path to the keystore file.
*/
public void setKeystorePath(String keystorePath) {
this.keystorePath = keystorePath;
}
/**
* Returns the keystore type, or <tt>null</tt> if it's not set.
*
* @return the keystore type.
*/
public String getKeystoreType() {
return keystoreType;
}
/**
* Sets the keystore type.
*
* @param keystoreType the keystore type.
*/
public void setKeystoreType(String keystoreType) {
this.keystoreType = keystoreType;
}
/**
* Returns the PKCS11 library file location, needed when the
* Keystore type is PKCS11.
*
* @return the path to the PKCS11 library file
*/
public String getPKCS11Library() {
return pkcs11Library;
}
/**
* Sets the PKCS11 library file location, needed when the
* Keystore type is PKCS11
*
* @param pkcs11Library the path to the PKCS11 library file
*/
public void setPKCS11Library(String pkcs11Library) {
this.pkcs11Library = pkcs11Library;
}
/**
* Gets the custom SSLContext for SSL sockets. This is null by default.
*
* @return the SSLContext previously set with setCustomSSLContext() or null.
*/
public SSLContext getCustomSSLContext() {
return this.customSSLContext;
}
/**
* Sets a custom SSLContext for creating SSL sockets. A custom Context causes all other
* SSL/TLS realted settings to be ignored.
*
* @param context the custom SSLContext for new sockets; null to reset default behavior.
*/
public void setCustomSSLContext(SSLContext context) {
this.customSSLContext = context;
}
/**
* Returns true if the connection is going to use stream compression. Stream compression
* will be requested after TLS was established (if TLS was enabled) and only if the server
* offered stream compression. With stream compression network traffic can be reduced
* up to 90%. By default compression is disabled.
*
* @return true if the connection is going to use stream compression.
*/
public boolean isCompressionEnabled() {
return compressionEnabled;
}
/**
* Sets if the connection is going to use stream compression. Stream compression
* will be requested after TLS was established (if TLS was enabled) and only if the server
* offered stream compression. With stream compression network traffic can be reduced
* up to 90%. By default compression is disabled.
*
* @param compressionEnabled if the connection is going to use stream compression.
*/
public void setCompressionEnabled(boolean compressionEnabled) {
this.compressionEnabled = compressionEnabled;
}
/**
* Returns true if the new connection about to be establish is going to be debugged. By
* default the value of {@link SmackConfiguration#DEBUG_ENABLED} is used.
*
* @return true if the new connection about to be establish is going to be debugged.
*/
public boolean isDebuggerEnabled() {
return debuggerEnabled;
}
/**
* Sets if the new connection about to be establish is going to be debugged. By
* default the value of {@link SmackConfiguration#DEBUG_ENABLED} is used.
*
* @param debuggerEnabled if the new connection about to be establish is going to be debugged.
*/
public void setDebuggerEnabled(boolean debuggerEnabled) {
this.debuggerEnabled = debuggerEnabled;
}
/**
* Sets if the reconnection mechanism is allowed to be used. By default
* reconnection is allowed.
*
* @param isAllowed if the reconnection mechanism is allowed to use.
*/
public void setReconnectionAllowed(boolean isAllowed) {
this.reconnectionAllowed = isAllowed;
}
/**
* Returns if the reconnection mechanism is allowed to be used. By default
* reconnection is allowed.
*
* @return if the reconnection mechanism is allowed to be used.
*/
public boolean isReconnectionAllowed() {
return this.reconnectionAllowed;
}
/**
* Sets the socket factory used to create new xmppConnection sockets.
* This is useful when connecting through SOCKS5 proxies.
*
* @param socketFactory used to create new sockets.
*/
public void setSocketFactory(SocketFactory socketFactory) {
this.socketFactory = socketFactory;
}
/**
* Sets if an initial available presence will be sent to the server. By default
* an available presence will be sent to the server indicating that this presence
* is not online and available to receive messages. If you want to log in without
* being 'noticed' then pass a <tt>false</tt> value.
*
* @param sendPresence true if an initial available presence will be sent while logging in.
*/
public void setSendPresence(boolean sendPresence) {
this.sendPresence = sendPresence;
}
/**
* Returns true if the roster will be loaded from the server when logging in. This
* is the common behaviour for clients but sometimes clients may want to differ this
* or just never do it if not interested in rosters.
*
* @return true if the roster will be loaded from the server when logging in.
*/
public boolean isRosterLoadedAtLogin() {
return rosterLoadedAtLogin;
}
/**
* Sets if the roster will be loaded from the server when logging in. This
* is the common behaviour for clients but sometimes clients may want to differ this
* or just never do it if not interested in rosters.
*
* @param rosterLoadedAtLogin if the roster will be loaded from the server when logging in.
*/
public void setRosterLoadedAtLogin(boolean rosterLoadedAtLogin) {
this.rosterLoadedAtLogin = rosterLoadedAtLogin;
}
/**
* Returns true if a {@link Session} will be requested on login if the server
* supports it. Although this was mandatory on RFC 3921, RFC 6120/6121 don't
* even mention this part of the protocol.
*
* @return true if a session has to be requested when logging in.
*/
public boolean isLegacySessionDisabled() {
return legacySessionDisabled;
}
/**
* Sets if a {@link Session} will be requested on login if the server supports
* it. Although this was mandatory on RFC 3921, RFC 6120/6121 don't even
* mention this part of the protocol.
*
* @param legacySessionDisabled if a session has to be requested when logging in.
*/
public void setLegacySessionDisabled(boolean legacySessionDisabled) {
this.legacySessionDisabled = legacySessionDisabled;
}
/**
* Returns a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication. A CallbackHandler
* will be used <b>ONLY</b> if no password was specified during the login while
* using SASL authentication.
*
* @return a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication.
*/
public CallbackHandler getCallbackHandler() {
return callbackHandler;
}
/**
* Sets a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication. A CallbackHandler
* will be used <b>ONLY</b> if no password was specified during the login while
* using SASL authentication.
*
* @param callbackHandler to obtain information, such as the password or
* principal information during the SASL authentication.
*/
public void setCallbackHandler(CallbackHandler callbackHandler) {
this.callbackHandler = callbackHandler;
}
/**
* Returns the socket factory used to create new xmppConnection sockets.
* This is useful when connecting through SOCKS5 proxies.
*
* @return socketFactory used to create new sockets.
*/
public SocketFactory getSocketFactory() {
return this.socketFactory;
}
public List<HostAddress> getHostAddresses() {
return Collections.unmodifiableList(hostAddresses);
}
/**
* Set the permanent roster store
*/
public void setRosterStore(RosterStore store) {
rosterStore = store;
}
/**
* Get the permanent roster store
*/
public RosterStore getRosterStore() {
return rosterStore;
}
/**
* An enumeration for TLS security modes that are available when making a connection
* to the XMPP server.
*/
public static enum SecurityMode {
/**
* Securirty via TLS encryption is required in order to connect. If the server
* does not offer TLS or if the TLS negotiaton fails, the connection to the server
* will fail.
*/
required,
/**
* Security via TLS encryption is used whenever it's available. This is the
* default setting.
*/
enabled,
/**
* Security via TLS encryption is disabled and only un-encrypted connections will
* be used. If only TLS encryption is available from the server, the connection
* will fail.
*/
disabled
}
/**
* Returns the username to use when trying to reconnect to the server.
*
* @return the username to use when trying to reconnect to the server.
*/
public String getUsername() {
return this.username;
}
/**
* Returns the password to use when trying to reconnect to the server.
*
* @return the password to use when trying to reconnect to the server.
*/
public String getPassword() {
return this.password;
}
/**
* Returns the resource to use when trying to reconnect to the server.
*
* @return the resource to use when trying to reconnect to the server.
*/
public String getResource() {
return resource;
}
/**
* Returns true if an available presence should be sent when logging in while reconnecting.
*
* @return true if an available presence should be sent when logging in while reconnecting
*/
public boolean isSendPresence() {
return sendPresence;
}
void setLoginInfo(String username, String password, String resource) {
this.username = username;
this.password = password;
this.resource = resource;
}
void maybeResolveDns() throws Exception {
if (!useDnsSrvRr) return;
hostAddresses = DNSUtil.resolveXMPPDomain(serviceName);
}
private void initHostAddresses(String host, int port) {
hostAddresses = new ArrayList<HostAddress>(1);
HostAddress hostAddress;
hostAddress = new HostAddress(host, port);
hostAddresses.add(hostAddress);
useDnsSrvRr = false;
}
}
|
core/src/main/java/org/jivesoftware/smack/ConnectionConfiguration.java
|
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack;
import org.jivesoftware.smack.packet.Session;
import org.jivesoftware.smack.proxy.ProxyInfo;
import org.jivesoftware.smack.util.DNSUtil;
import org.jivesoftware.smack.util.dns.HostAddress;
import javax.net.SocketFactory;
import javax.net.ssl.SSLContext;
import javax.security.auth.callback.CallbackHandler;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Configuration to use while establishing the connection to the server. It is possible to
* configure the path to the trustore file that keeps the trusted CA root certificates and
* enable or disable all or some of the checkings done while verifying server certificates.<p>
*
* It is also possible to configure if TLS, SASL, and compression are used or not.
*
* @author Gaston Dombiak
*/
public class ConnectionConfiguration implements Cloneable {
/**
* Hostname of the XMPP server. Usually servers use the same service name as the name
* of the server. However, there are some servers like google where host would be
* talk.google.com and the serviceName would be gmail.com.
*/
private String serviceName;
protected List<HostAddress> hostAddresses;
private String keystorePath;
private String keystoreType;
private String pkcs11Library;
private SSLContext customSSLContext;
private boolean compressionEnabled = false;
/**
* Used to get information from the user
*/
private CallbackHandler callbackHandler;
private boolean debuggerEnabled = SmackConfiguration.DEBUG_ENABLED;
// Flag that indicates if a reconnection should be attempted when abruptly disconnected
private boolean reconnectionAllowed = true;
// Holds the socket factory that is used to generate the socket in the connection
private SocketFactory socketFactory;
// Holds the authentication information for future reconnections
private String username;
private String password;
private String resource;
private boolean sendPresence = true;
private boolean rosterLoadedAtLogin = true;
private boolean legacySessionDisabled = false;
private boolean useDnsSrvRr = true;
private SecurityMode securityMode = SecurityMode.enabled;
/**
* Permanent store for the Roster, needed for roster versioning
*/
private RosterStore rosterStore;
// Holds the proxy information (such as proxyhost, proxyport, username, password etc)
protected ProxyInfo proxy;
/**
* Creates a new ConnectionConfiguration for the specified service name.
* A DNS SRV lookup will be performed to find out the actual host address
* and port to use for the connection.
*
* @param serviceName the name of the service provided by an XMPP server.
*/
public ConnectionConfiguration(String serviceName) {
init(serviceName, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration for the specified service name
* with specified proxy.
* A DNS SRV lookup will be performed to find out the actual host address
* and port to use for the connection.
*
* @param serviceName the name of the service provided by an XMPP server.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String serviceName,ProxyInfo proxy) {
init(serviceName, proxy);
}
/**
* Creates a new ConnectionConfiguration using the specified host, port and
* service name. This is useful for manually overriding the DNS SRV lookup
* process that's used with the {@link #ConnectionConfiguration(String)}
* constructor. For example, say that an XMPP server is running at localhost
* in an internal network on port 5222 but is configured to think that it's
* "example.com" for testing purposes. This constructor is necessary to connect
* to the server in that case since a DNS SRV lookup for example.com would not
* point to the local testing server.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param serviceName the name of the service provided by an XMPP server.
*/
public ConnectionConfiguration(String host, int port, String serviceName) {
initHostAddresses(host, port);
init(serviceName, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration using the specified host, port and
* service name. This is useful for manually overriding the DNS SRV lookup
* process that's used with the {@link #ConnectionConfiguration(String)}
* constructor. For example, say that an XMPP server is running at localhost
* in an internal network on port 5222 but is configured to think that it's
* "example.com" for testing purposes. This constructor is necessary to connect
* to the server in that case since a DNS SRV lookup for example.com would not
* point to the local testing server.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param serviceName the name of the service provided by an XMPP server.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String host, int port, String serviceName, ProxyInfo proxy) {
initHostAddresses(host, port);
init(serviceName, proxy);
}
/**
* Creates a new ConnectionConfiguration for a connection that will connect
* to the desired host and port.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
*/
public ConnectionConfiguration(String host, int port) {
initHostAddresses(host, port);
init(host, ProxyInfo.forDefaultProxy());
}
/**
* Creates a new ConnectionConfiguration for a connection that will connect
* to the desired host and port with desired proxy.
*
* @param host the host where the XMPP server is running.
* @param port the port where the XMPP is listening.
* @param proxy the proxy through which XMPP is to be connected
*/
public ConnectionConfiguration(String host, int port, ProxyInfo proxy) {
initHostAddresses(host, port);
init(host, proxy);
}
protected void init(String serviceName, ProxyInfo proxy) {
this.serviceName = serviceName;
this.proxy = proxy;
keystorePath = System.getProperty("javax.net.ssl.keyStore");
keystoreType = "jks";
pkcs11Library = "pkcs11.config";
//Setting the SocketFactory according to proxy supplied
socketFactory = proxy.getSocketFactory();
}
/**
* Sets the server name, also known as XMPP domain of the target server.
*
* @param serviceName the XMPP domain of the target server.
*/
void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* Returns the server name of the target server.
*
* @return the server name of the target server.
*/
public String getServiceName() {
return serviceName;
}
/**
* Returns the TLS security mode used when making the connection. By default,
* the mode is {@link SecurityMode#enabled}.
*
* @return the security mode.
*/
public SecurityMode getSecurityMode() {
return securityMode;
}
/**
* Sets the TLS security mode used when making the connection. By default,
* the mode is {@link SecurityMode#enabled}.
*
* @param securityMode the security mode.
*/
public void setSecurityMode(SecurityMode securityMode) {
this.securityMode = securityMode;
}
/**
* Retuns the path to the keystore file. The key store file contains the
* certificates that may be used to authenticate the client to the server,
* in the event the server requests or requires it.
*
* @return the path to the keystore file.
*/
public String getKeystorePath() {
return keystorePath;
}
/**
* Sets the path to the keystore file. The key store file contains the
* certificates that may be used to authenticate the client to the server,
* in the event the server requests or requires it.
*
* @param keystorePath the path to the keystore file.
*/
public void setKeystorePath(String keystorePath) {
this.keystorePath = keystorePath;
}
/**
* Returns the keystore type, or <tt>null</tt> if it's not set.
*
* @return the keystore type.
*/
public String getKeystoreType() {
return keystoreType;
}
/**
* Sets the keystore type.
*
* @param keystoreType the keystore type.
*/
public void setKeystoreType(String keystoreType) {
this.keystoreType = keystoreType;
}
/**
* Returns the PKCS11 library file location, needed when the
* Keystore type is PKCS11.
*
* @return the path to the PKCS11 library file
*/
public String getPKCS11Library() {
return pkcs11Library;
}
/**
* Sets the PKCS11 library file location, needed when the
* Keystore type is PKCS11
*
* @param pkcs11Library the path to the PKCS11 library file
*/
public void setPKCS11Library(String pkcs11Library) {
this.pkcs11Library = pkcs11Library;
}
/**
* Gets the custom SSLContext for SSL sockets. This is null by default.
*
* @return the SSLContext previously set with setCustomSSLContext() or null.
*/
public SSLContext getCustomSSLContext() {
return this.customSSLContext;
}
/**
* Sets a custom SSLContext for creating SSL sockets. A custom Context causes all other
* SSL/TLS realted settings to be ignored.
*
* @param context the custom SSLContext for new sockets; null to reset default behavior.
*/
public void setCustomSSLContext(SSLContext context) {
this.customSSLContext = context;
}
/**
* Returns true if the connection is going to use stream compression. Stream compression
* will be requested after TLS was established (if TLS was enabled) and only if the server
* offered stream compression. With stream compression network traffic can be reduced
* up to 90%. By default compression is disabled.
*
* @return true if the connection is going to use stream compression.
*/
public boolean isCompressionEnabled() {
return compressionEnabled;
}
/**
* Sets if the connection is going to use stream compression. Stream compression
* will be requested after TLS was established (if TLS was enabled) and only if the server
* offered stream compression. With stream compression network traffic can be reduced
* up to 90%. By default compression is disabled.
*
* @param compressionEnabled if the connection is going to use stream compression.
*/
public void setCompressionEnabled(boolean compressionEnabled) {
this.compressionEnabled = compressionEnabled;
}
/**
* Returns true if the new connection about to be establish is going to be debugged. By
* default the value of {@link SmackConfiguration#DEBUG_ENABLED} is used.
*
* @return true if the new connection about to be establish is going to be debugged.
*/
public boolean isDebuggerEnabled() {
return debuggerEnabled;
}
/**
* Sets if the new connection about to be establish is going to be debugged. By
* default the value of {@link SmackConfiguration#DEBUG_ENABLED} is used.
*
* @param debuggerEnabled if the new connection about to be establish is going to be debugged.
*/
public void setDebuggerEnabled(boolean debuggerEnabled) {
this.debuggerEnabled = debuggerEnabled;
}
/**
* Sets if the reconnection mechanism is allowed to be used. By default
* reconnection is allowed.
*
* @param isAllowed if the reconnection mechanism is allowed to use.
*/
public void setReconnectionAllowed(boolean isAllowed) {
this.reconnectionAllowed = isAllowed;
}
/**
* Returns if the reconnection mechanism is allowed to be used. By default
* reconnection is allowed.
*
* @return if the reconnection mechanism is allowed to be used.
*/
public boolean isReconnectionAllowed() {
return this.reconnectionAllowed;
}
/**
* Sets the socket factory used to create new xmppConnection sockets.
* This is useful when connecting through SOCKS5 proxies.
*
* @param socketFactory used to create new sockets.
*/
public void setSocketFactory(SocketFactory socketFactory) {
this.socketFactory = socketFactory;
}
/**
* Sets if an initial available presence will be sent to the server. By default
* an available presence will be sent to the server indicating that this presence
* is not online and available to receive messages. If you want to log in without
* being 'noticed' then pass a <tt>false</tt> value.
*
* @param sendPresence true if an initial available presence will be sent while logging in.
*/
public void setSendPresence(boolean sendPresence) {
this.sendPresence = sendPresence;
}
/**
* Returns true if the roster will be loaded from the server when logging in. This
* is the common behaviour for clients but sometimes clients may want to differ this
* or just never do it if not interested in rosters.
*
* @return true if the roster will be loaded from the server when logging in.
*/
public boolean isRosterLoadedAtLogin() {
return rosterLoadedAtLogin;
}
/**
* Sets if the roster will be loaded from the server when logging in. This
* is the common behaviour for clients but sometimes clients may want to differ this
* or just never do it if not interested in rosters.
*
* @param rosterLoadedAtLogin if the roster will be loaded from the server when logging in.
*/
public void setRosterLoadedAtLogin(boolean rosterLoadedAtLogin) {
this.rosterLoadedAtLogin = rosterLoadedAtLogin;
}
/**
* Returns true if a {@link Session} will be requested on login if the server
* supports it. Although this was mandatory on RFC 3921, RFC 6120/6121 don't
* even mention this part of the protocol.
*
* @return true if a session has to be requested when logging in.
*/
public boolean isLegacySessionDisabled() {
return legacySessionDisabled;
}
/**
* Sets if a {@link Session} will be requested on login if the server supports
* it. Although this was mandatory on RFC 3921, RFC 6120/6121 don't even
* mention this part of the protocol.
*
* @param legacySessionDisabled if a session has to be requested when logging in.
*/
public void setLegacySessionDisabled(boolean legacySessionDisabled) {
this.legacySessionDisabled = legacySessionDisabled;
}
/**
* Returns a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication. A CallbackHandler
* will be used <b>ONLY</b> if no password was specified during the login while
* using SASL authentication.
*
* @return a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication.
*/
public CallbackHandler getCallbackHandler() {
return callbackHandler;
}
/**
* Sets a CallbackHandler to obtain information, such as the password or
* principal information during the SASL authentication. A CallbackHandler
* will be used <b>ONLY</b> if no password was specified during the login while
* using SASL authentication.
*
* @param callbackHandler to obtain information, such as the password or
* principal information during the SASL authentication.
*/
public void setCallbackHandler(CallbackHandler callbackHandler) {
this.callbackHandler = callbackHandler;
}
/**
* Returns the socket factory used to create new xmppConnection sockets.
* This is useful when connecting through SOCKS5 proxies.
*
* @return socketFactory used to create new sockets.
*/
public SocketFactory getSocketFactory() {
return this.socketFactory;
}
public List<HostAddress> getHostAddresses() {
return Collections.unmodifiableList(hostAddresses);
}
/**
* Set the permanent roster store
*/
public void setRosterStore(RosterStore store) {
rosterStore = store;
}
/**
* Get the permanent roster store
*/
public RosterStore getRosterStore() {
return rosterStore;
}
/**
* An enumeration for TLS security modes that are available when making a connection
* to the XMPP server.
*/
public static enum SecurityMode {
/**
* Securirty via TLS encryption is required in order to connect. If the server
* does not offer TLS or if the TLS negotiaton fails, the connection to the server
* will fail.
*/
required,
/**
* Security via TLS encryption is used whenever it's available. This is the
* default setting.
*/
enabled,
/**
* Security via TLS encryption is disabled and only un-encrypted connections will
* be used. If only TLS encryption is available from the server, the connection
* will fail.
*/
disabled
}
/**
* Returns the username to use when trying to reconnect to the server.
*
* @return the username to use when trying to reconnect to the server.
*/
String getUsername() {
return this.username;
}
/**
* Returns the password to use when trying to reconnect to the server.
*
* @return the password to use when trying to reconnect to the server.
*/
String getPassword() {
return this.password;
}
/**
* Returns the resource to use when trying to reconnect to the server.
*
* @return the resource to use when trying to reconnect to the server.
*/
String getResource() {
return resource;
}
/**
* Returns true if an available presence should be sent when logging in while reconnecting.
*
* @return true if an available presence should be sent when logging in while reconnecting
*/
boolean isSendPresence() {
return sendPresence;
}
void setLoginInfo(String username, String password, String resource) {
this.username = username;
this.password = password;
this.resource = resource;
}
void maybeResolveDns() throws Exception {
if (!useDnsSrvRr) return;
hostAddresses = DNSUtil.resolveXMPPDomain(serviceName);
}
private void initHostAddresses(String host, int port) {
hostAddresses = new ArrayList<HostAddress>(1);
HostAddress hostAddress;
hostAddress = new HostAddress(host, port);
hostAddresses.add(hostAddress);
useDnsSrvRr = false;
}
}
|
Make ConnectionConfugration getters public
No need to keep them package-private. SMACK-556
|
core/src/main/java/org/jivesoftware/smack/ConnectionConfiguration.java
|
Make ConnectionConfugration getters public
|
|
Java
|
apache-2.0
|
8fdbb682e13b65659b39c7ee5593e90dcc808b84
| 0
|
tensorics/tensorics-core
|
/**
* Copyright (c) 2013 European Organisation for Nuclear Research (CERN), All Rights Reserved.
*/
package org.tensorics.core.tensor;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
/**
* Default Implementation of {@link Tensor}.
* <p>
* By constraint of creation it holds a map of {@link Position} of certain type to values of type T, such that ALL
* Positions contains the same number and type of coordinates. Number and type of coordinates can be accessed and
* explored via {@link Shape}.
* <p>
* There is a special type of Tensor that has ZERO dimensiality. Can be obtained via factory method.
* <p>
* {@link ImmutableTensor} is immutable.
* <p>
* The toString() method does not print all the tensor entries.
*
* @author agorzaws, kfuchsbe
* @param <T> type of values in Tensor.
*/
@SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.TooManyMethods" })
public class ImmutableTensor<T> implements Tensor<T> {
private static final int TOSTRING_BUFFER_SIZE = 64;
private static final int POSITION_TO_DISPLAY = 10;
private final Map<Position, Tensor.Entry<T>> entries; // NOSONAR
private final Shape shape; // NOSONAR
private final Context context; // NOSONAR
/**
* Package-private constructor to be called from builder
*
* @param builder to be used when {@link ImmutableTensor} is created.
*/
ImmutableTensor(Builder<T> builder) {
this.entries = builder.createEntries();
this.shape = Shape.viewOf(builder.getDimensions(), this.entries.keySet());
this.context = builder.getContext();
}
/**
* Returns a builder for an {@link ImmutableTensor}. As argument it takes set of class of coordinates which
* represent the dimensions of the tensor.
*
* @param dimensions a set of classes that can later be used as coordinates for the tensor entries.
* @return a builder for {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static final <T> Builder<T> builder(Set<? extends Class<?>> dimensions) {
return new Builder<T>(dimensions);
}
/**
* Returns a builder for an {@link ImmutableTensor}. The dimensions (classes of coordinates) of the future tensor
* have to be given as arguments here.
*
* @param dimensions the dimensions of the tensor to create
* @return a builder for an immutable tensor
* @param <T> the type of values of the tensor
*/
public static final <T> Builder<T> builder(Class<?>... dimensions) {
return builder(ImmutableSet.copyOf(dimensions));
}
/**
* Creates a tensor from the given map, where the map has to contain the positions as keys and the values as values.
*
* @param dimensions the desired dimensions of the tensor. This has to be consistent with the position - keys in the
* map.
* @param map the map from which to construct a tensor
* @return a new immutable tensor
*/
public static final <T> Tensor<T> fromMap(Set<? extends Class<?>> dimensions, Map<Position, T> map) {
Builder<T> builder = builder(dimensions);
for (Map.Entry<Position, T> entry : map.entrySet()) {
builder.at(entry.getKey()).put(entry.getValue());
}
return builder.build();
}
/**
* Returns the builder that can create special tensor of dimension size equal ZERO.
*
* @param value to be used.
* @return a builder for {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static final <T> Tensor<T> zeroDimensionalOf(T value) {
Builder<T> builder = builder(Collections.<Class<?>> emptySet());
builder.at(Position.empty()).put(value);
return builder.build();
}
/**
* Creates an immutable copy of the given tensor.
*
* @param tensor the tensor whose element to copy
* @return new immutable Tensor
*/
public static final <T> Tensor<T> copyOf(Tensor<T> tensor) {
Builder<T> builder = builder(tensor.shape().dimensionSet());
builder.putAll(tensor.entrySet());
builder.setTensorContext(tensor.context());
return builder.build();
}
/**
* Returns a builder for an {@link ImmutableTensor} which is initiliased with the given {@link ImmutableTensor}.
*
* @param tensor a Tensor with which the {@link Builder} is initialized
* @return a {@link Builder} for an {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static <T> Builder<T> builderFrom(Tensor<T> tensor) {
Builder<T> builder = builder(tensor.shape().dimensionSet());
builder.putAll(tensor.entrySet());
return builder;
}
@Override
public T get(Position position) {
return findEntryOrThrow(position).getValue();
}
@Override
public Context context() {
return context;
}
@Override
public Set<Tensor.Entry<T>> entrySet() {
return new HashSet<>(this.entries.values());
}
@Override
@SafeVarargs
public final T get(Object... coordinates) {
return get(Position.of(coordinates));
}
@Override
public Shape shape() {
return this.shape;
}
private Tensor.Entry<T> findEntryOrThrow(Position position) {
Tensor.Entry<T> entry = findEntryOrNull(position);
if (entry == null) {
throw new NoSuchElementException("Entry for position '" + position + "' is not contained in this tensor.");
}
return entry;
}
private Tensor.Entry<T> findEntryOrNull(Position position) {
return this.entries.get(position);
}
/**
* A builder for an immutable tensor.
*
* @author kfuchsbe
* @param <S> the type of the values to be added
*/
public static final class Builder<S> extends AbstractTensorBuilder<S> {
private final ImmutableMap.Builder<Position, Entry<S>> entries = ImmutableMap.builder();
Builder(Set<? extends Class<?>> dimensions) {
super(dimensions);
}
/**
* Builds an {@link ImmutableTensor} from all elements put before.
*
* @return an {@link ImmutableTensor}.
*/
@Override
public ImmutableTensor<S> build() {
return new ImmutableTensor<S>(this);
}
protected Map<Position, Tensor.Entry<S>> createEntries() {
return this.entries.build();
}
@Override
protected void putItAt(S value, Position position) {
this.entries.put(position, new ImmutableEntry<>(position, value));
}
}
/**
* When printing the tensor content output is automatically not larger then N ant the beginning and N at the end of
* the Tensor entries.
*/
@Override
public String toString() {
StringBuffer buffer = new StringBuffer(TOSTRING_BUFFER_SIZE);
int totalSize = this.shape.positionSet().size();
int index = 1;
for (Position position : this.shape.positionSet()) {
if (index < POSITION_TO_DISPLAY || index > totalSize - POSITION_TO_DISPLAY) {
buffer.append(position + "=" + get(position) + "; ");
} else if (index == POSITION_TO_DISPLAY) {
buffer.append("(.." + (totalSize - 2 * POSITION_TO_DISPLAY) + "skipped positions..)");
}
index++;
}
if (buffer.length() > 1) {
buffer.setLength(buffer.length() - 2);
}
return Coordinates.dimensionsWithoutClassPath(this) + "{" + buffer + "}";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((context == null) ? 0 : context.hashCode());
result = prime * result + ((entries == null) ? 0 : entries.hashCode());
result = prime * result + ((shape == null) ? 0 : shape.hashCode());
return result;
}
@Override
@SuppressWarnings("PMD.NPathComplexity")
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ImmutableTensor<?> other = (ImmutableTensor<?>) obj;
if (context == null) {
if (other.context != null) {
return false;
}
} else if (!context.equals(other.context)) {
return false;
}
if (entries == null) {
if (other.entries != null) {
return false;
}
} else if (!entries.equals(other.entries)) {
return false;
}
if (shape == null) {
if (other.shape != null) {
return false;
}
} else if (!shape.equals(other.shape)) {
return false;
}
return true;
}
}
|
src/java/org/tensorics/core/tensor/ImmutableTensor.java
|
/**
* Copyright (c) 2013 European Organisation for Nuclear Research (CERN), All Rights Reserved.
*/
package org.tensorics.core.tensor;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
/**
* Default Implementation of {@link Tensor}.
* <p>
* By constraint of creation it holds a map of {@link Position} of certain type to values of type T, such that ALL
* Positions contains the same number and type of coordinates. Number and type of coordinates can be accessed and
* explored via {@link Shape}.
* <p>
* There is a special type of Tensor that has ZERO dimensiality. Can be obtained via factory method.
* <p>
* {@link ImmutableTensor} is immutable.
* <p>
* The toString() method does not print all the tensor entries.
*
* @author agorzaws, kfuchsbe
* @param <T> type of values in Tensor.
*/
@SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.TooManyMethods" })
public class ImmutableTensor<T> implements Tensor<T> {
private static final int POSITION_TO_DISPLAY = 10;
private final Map<Position, Tensor.Entry<T>> entries; // NOSONAR
private final Shape shape; // NOSONAR
private final Context context; // NOSONAR
/**
* Package-private constructor to be called from builder
*
* @param builder to be used when {@link ImmutableTensor} is created.
*/
ImmutableTensor(Builder<T> builder) {
this.entries = builder.createEntries();
this.shape = Shape.viewOf(builder.getDimensions(), this.entries.keySet());
this.context = builder.getContext();
}
/**
* Returns a builder for an {@link ImmutableTensor}. As argument it takes set of class of coordinates which
* represent the dimensions of the tensor.
*
* @param dimensions a set of classes that can later be used as coordinates for the tensor entries.
* @return a builder for {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static final <T> Builder<T> builder(Set<? extends Class<?>> dimensions) {
return new Builder<T>(dimensions);
}
/**
* Returns a builder for an {@link ImmutableTensor}. The dimensions (classes of coordinates) of the future tensor
* have to be given as arguments here.
*
* @param dimensions the dimensions of the tensor to create
* @return a builder for an immutable tensor
* @param <T> the type of values of the tensor
*/
public static final <T> Builder<T> builder(Class<?>... dimensions) {
return builder(ImmutableSet.copyOf(dimensions));
}
/**
* Creates a tensor from the given map, where the map has to contain the positions as keys and the values as values.
*
* @param dimensions the desired dimensions of the tensor. This has to be consistent with the position - keys in the
* map.
* @param map the map from which to construct a tensor
* @return a new immutable tensor
*/
public static final <T> Tensor<T> fromMap(Set<? extends Class<?>> dimensions, Map<Position, T> map) {
Builder<T> builder = builder(dimensions);
for (Map.Entry<Position, T> entry : map.entrySet()) {
builder.at(entry.getKey()).put(entry.getValue());
}
return builder.build();
}
/**
* Returns the builder that can create special tensor of dimension size equal ZERO.
*
* @param value to be used.
* @return a builder for {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static final <T> Tensor<T> zeroDimensionalOf(T value) {
Builder<T> builder = builder(Collections.<Class<?>> emptySet());
builder.at(Position.empty()).put(value);
return builder.build();
}
/**
* Creates an immutable copy of the given tensor.
*
* @param tensor the tensor whose element to copy
* @return new immutable Tensor
*/
public static final <T> Tensor<T> copyOf(Tensor<T> tensor) {
Builder<T> builder = builder(tensor.shape().dimensionSet());
builder.putAll(tensor.entrySet());
builder.setTensorContext(tensor.context());
return builder.build();
}
/**
* Returns a builder for an {@link ImmutableTensor} which is initiliased with the given {@link ImmutableTensor}.
*
* @param tensor a Tensor with which the {@link Builder} is initialized
* @return a {@link Builder} for an {@link ImmutableTensor}
* @param <T> type of values in Tensor.
*/
public static <T> Builder<T> builderFrom(Tensor<T> tensor) {
Builder<T> builder = builder(tensor.shape().dimensionSet());
builder.putAll(tensor.entrySet());
return builder;
}
@Override
public T get(Position position) {
return findEntryOrThrow(position).getValue();
}
@Override
public Context context() {
return context;
}
@Override
public Set<Tensor.Entry<T>> entrySet() {
return new HashSet<>(this.entries.values());
}
@Override
@SafeVarargs
public final T get(Object... coordinates) {
return get(Position.of(coordinates));
}
@Override
public Shape shape() {
return this.shape;
}
private Tensor.Entry<T> findEntryOrThrow(Position position) {
Tensor.Entry<T> entry = findEntryOrNull(position);
if (entry == null) {
throw new NoSuchElementException("Entry for position '" + position + "' is not contained in this tensor.");
}
return entry;
}
private Tensor.Entry<T> findEntryOrNull(Position position) {
return this.entries.get(position);
}
/**
* A builder for an immutable tensor.
*
* @author kfuchsbe
* @param <S> the type of the values to be added
*/
public static final class Builder<S> extends AbstractTensorBuilder<S> {
private final ImmutableMap.Builder<Position, Entry<S>> entries = ImmutableMap.builder();
Builder(Set<? extends Class<?>> dimensions) {
super(dimensions);
}
/**
* Builds an {@link ImmutableTensor} from all elements put before.
*
* @return an {@link ImmutableTensor}.
*/
@Override
public ImmutableTensor<S> build() {
return new ImmutableTensor<S>(this);
}
protected Map<Position, Tensor.Entry<S>> createEntries() {
return this.entries.build();
}
@Override
protected void putItAt(S value, Position position) {
this.entries.put(position, new ImmutableEntry<>(position, value));
}
}
/**
* When printing the tensor content output is automatically not larger then N ant the beginning and N at the end of
* the Tensor entries.
*/
@Override
public String toString() {
StringBuffer buffer = new StringBuffer(20);
int totalSize = this.shape.positionSet().size();
int index = 1;
for (Position position : this.shape.positionSet()) {
if (index < POSITION_TO_DISPLAY || index > totalSize - POSITION_TO_DISPLAY) {
buffer.append(position + "=" + get(position) + "; ");
} else if (index == POSITION_TO_DISPLAY) {
buffer.append("(.." + (totalSize - 2 * POSITION_TO_DISPLAY) + "skipped positions..)");
}
index++;
}
if (buffer.length() > 1) {
buffer.setLength(buffer.length() - 2);
}
return Coordinates.dimensionsWithoutClassPath(this) + "{" + buffer + "}";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((context == null) ? 0 : context.hashCode());
result = prime * result + ((entries == null) ? 0 : entries.hashCode());
result = prime * result + ((shape == null) ? 0 : shape.hashCode());
return result;
}
@Override
@SuppressWarnings("PMD.NPathComplexity")
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ImmutableTensor<?> other = (ImmutableTensor<?>) obj;
if (context == null) {
if (other.context != null) {
return false;
}
} else if (!context.equals(other.context)) {
return false;
}
if (entries == null) {
if (other.entries != null) {
return false;
}
} else if (!entries.equals(other.entries)) {
return false;
}
if (shape == null) {
if (other.shape != null) {
return false;
}
} else if (!shape.equals(other.shape)) {
return false;
}
return true;
}
}
|
some more fixes
git-svn-id: 44110302500ff4d6168e3867631ad1bb4eb9722b@8670 6cd15df7-5b2d-4548-a7df-5dcce267a22b
|
src/java/org/tensorics/core/tensor/ImmutableTensor.java
|
some more fixes
|
|
Java
|
apache-2.0
|
7ccaaf5500dfd29b83fc12e270d0e7586f9a92a2
| 0
|
Maurice-Betzel/lmdbjava-jca-resource-adapter,Maurice-Betzel/lmdbjava-resource-adapter
|
/*
Copyright 2017 Maurice Betzel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.betzel.lmdb.jca;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import javax.resource.ResourceException;
import javax.resource.spi.ConnectionEvent;
import javax.resource.spi.ConnectionEventListener;
import javax.resource.spi.ConnectionRequestInfo;
import javax.resource.spi.LocalTransaction;
import javax.resource.spi.ManagedConnection;
import javax.resource.spi.ManagedConnectionMetaData;
import javax.security.auth.Subject;
import javax.transaction.xa.XAResource;
import org.lmdbjava.Dbi;
import org.lmdbjava.DbiFlags;
import org.lmdbjava.Env;
import org.lmdbjava.Txn;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* LMDbManagedConnection
*
* @version $Revision: $
*/
public class LMDbManagedConnection implements ManagedConnection {
/**
* The logger
*/
private static Logger log = Logger.getLogger(LMDbManagedConnection.class.getName());
/**
* The logwriter
*/
private PrintWriter logwriter;
/**
* ManagedConnectionFactory
*/
private LMDbManagedConnectionFactory managedConnectionFactory;
/**
* Listeners
*/
private List<ConnectionEventListener> listeners;
/**
* Connections
*/
private Set<LMDbConnectionImpl> connections;
/**
* The lmdb environment
*/
private Env environment;
/**
* The database
*/
private Dbi<ByteBuffer> dbi;
private XAResource xaResource;
private LocalTransaction txResource;
/**
* Default constructor
*
* @param managedConnectionFactory managedConnectionFactory
*/
public LMDbManagedConnection(LMDbManagedConnectionFactory managedConnectionFactory, Env environment) {
this.managedConnectionFactory = managedConnectionFactory;
this.environment = environment;
this.logwriter = null;
this.listeners = Collections.synchronizedList(new ArrayList<ConnectionEventListener>(1));
this.connections = new HashSet<LMDbConnectionImpl>();
}
/**
* Creates a new connection handle for every underlying physical Dbi connection
* represented by the ManagedConnection instance.
*
* @param subject Security context as JAAS subject
* @param cxRequestInfo ConnectionRequestInfo instance
* @return generic Object instance representing the connection handle.
* @throws ResourceException generic exception if operation fails
*/
public Object getConnection(Subject subject, ConnectionRequestInfo cxRequestInfo) throws ResourceException {
log.finest("getConnection()");
LMDbConnectionRequestInfo connectionRequestInfo = (LMDbConnectionRequestInfo) cxRequestInfo;
//TODO new connection object for every named database
if(this.dbi == null) {
this.dbi = environment.openDbi(connectionRequestInfo.getDatabaseName(), DbiFlags.MDB_CREATE);
}
LMDbConnectionImpl connection = new LMDbConnectionImpl(this, managedConnectionFactory);
connections.add(connection);
return connection;
}
/**
* Used by the container to change the association of an
* application-level connection handle with a ManagedConneciton instance.
*
* @param connection Application-level connection handle
* @throws ResourceException generic exception if operation fails
*/
public void associateConnection(Object connection) throws ResourceException {
log.finest("associateConnection()");
if (connection == null) {
throw new ResourceException("Null connection handle");
}
if (!(connection instanceof LMDbConnectionImpl)) {
throw new ResourceException("Wrong connection handle");
}
LMDbConnectionImpl handle = (LMDbConnectionImpl) connection;
handle.setManagedConnection(this);
connections.add(handle);
}
/**
* Application server calls this method to force any cleanup on the ManagedConnection instance.
*
* @throws ResourceException generic exception if operation fails
*/
public void cleanup() throws ResourceException {
log.finest("cleanup()");
for (LMDbConnectionImpl connection : connections) {
connection.setManagedConnection(null);
}
connections.clear();
}
/**
* Destroys the physical connection to the underlying resource manager.
*
* @throws ResourceException generic exception if operation fails
*/
public void destroy() throws ResourceException {
log.finest("destroy()");
dbi.close();
dbi = null;
}
/**
* Adds a connection event listener to the ManagedConnection instance.
*
* @param listener A new ConnectionEventListener to be registered
*/
public void addConnectionEventListener(ConnectionEventListener listener) {
log.finest("addConnectionEventListener()");
if (listener == null) {
throw new IllegalArgumentException("Listener is null");
}
listeners.add(listener);
}
/**
* Removes an already registered connection event listener from the ManagedConnection instance.
*
* @param listener already registered connection event listener to be removed
*/
public void removeConnectionEventListener(ConnectionEventListener listener) {
log.finest("removeConnectionEventListener()");
if (listener == null) {
throw new IllegalArgumentException("Listener is null");
}
listeners.remove(listener);
}
/**
* Close handle
*
* @param handle The handle
*/
void closeHandle(LMDbConnection handle) {
log.finest("closeHandle()");
connections.remove((LMDbConnectionImpl) handle);
ConnectionEvent event = new ConnectionEvent(this, ConnectionEvent.CONNECTION_CLOSED);
event.setConnectionHandle(handle);
for (ConnectionEventListener cel : listeners) {
cel.connectionClosed(event);
}
}
/**
* Gets the log writer for this ManagedConnection instance.
*
* @return Character output stream associated with this Managed-Connection instance
* @throws ResourceException generic exception if operation fails
*/
public PrintWriter getLogWriter() throws ResourceException {
log.finest("getLogWriter()");
return logwriter;
}
/**
* Sets the log writer for this ManagedConnection instance.
*
* @param out Character Output stream to be associated
* @throws ResourceException generic exception if operation fails
*/
public void setLogWriter(PrintWriter out) throws ResourceException {
log.finest("setLogWriter()");
logwriter = out;
}
/**
* Returns an <code>javax.resource.spi.LocalTransaction</code> instance.
*
* @return LocalTransaction instance
* @throws ResourceException generic exception if operation fails
*/
public LocalTransaction getLocalTransaction() throws ResourceException {
log.finest("getLocalTransaction()");
return txResource == null ? new LMDbTXResource(this) : txResource;
}
/**
* Returns an <code>javax.transaction.xa.XAresource</code> instance.
*
* @return XAResource instance
* @throws ResourceException generic exception if operation fails
*/
public XAResource getXAResource() throws ResourceException {
log.finest("getXAResource()");
if(xaResource == null) {
this.xaResource = new LMDbXAResource(this);
}
return xaResource;
}
/**
* Gets the metadata information for this connection's underlying EIS resource manager instance.
*
* @return ManagedConnectionMetaData instance
* @throws ResourceException generic exception if operation fails
*/
public ManagedConnectionMetaData getMetaData() throws ResourceException {
log.finest("getMetaData()");
return new LMDbManagedConnectionMetaData(managedConnectionFactory.getMaxReaders());
}
public List<ConnectionEventListener> getListeners() {
return listeners;
}
Dbi getDbi() {
return dbi;
}
String getDatabaseName() {
return String.valueOf(UTF_8.decode(ByteBuffer.wrap(dbi.getName())));
}
Txn getWriteTransaction() {
return environment.txnWrite();
}
Txn getReadTransaction() {
return environment.txnRead();
}
}
|
src/main/java/net/betzel/lmdb/jca/LMDbManagedConnection.java
|
/*
Copyright 2017 Maurice Betzel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.betzel.lmdb.jca;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import javax.resource.ResourceException;
import javax.resource.spi.ConnectionEvent;
import javax.resource.spi.ConnectionEventListener;
import javax.resource.spi.ConnectionRequestInfo;
import javax.resource.spi.LocalTransaction;
import javax.resource.spi.ManagedConnection;
import javax.resource.spi.ManagedConnectionMetaData;
import javax.security.auth.Subject;
import javax.transaction.xa.XAResource;
import org.lmdbjava.Dbi;
import org.lmdbjava.DbiFlags;
import org.lmdbjava.Env;
import org.lmdbjava.Txn;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* LMDbManagedConnection
*
* @version $Revision: $
*/
public class LMDbManagedConnection implements ManagedConnection {
/**
* The logger
*/
private static Logger log = Logger.getLogger(LMDbManagedConnection.class.getName());
/**
* The logwriter
*/
private PrintWriter logwriter;
/**
* ManagedConnectionFactory
*/
private LMDbManagedConnectionFactory managedConnectionFactory;
/**
* Listeners
*/
private List<ConnectionEventListener> listeners;
/**
* Connections
*/
private Set<LMDbConnectionImpl> connections;
/**
* The lmdb environment
*/
private Env environment;
/**
* The database
*/
private Dbi<ByteBuffer> dbi;
private XAResource xaResource;
private LocalTransaction txResource;
/**
* Default constructor
*
* @param managedConnectionFactory managedConnectionFactory
*/
public LMDbManagedConnection(LMDbManagedConnectionFactory managedConnectionFactory, Env environment) {
this.managedConnectionFactory = managedConnectionFactory;
this.environment = environment;
this.logwriter = null;
this.listeners = Collections.synchronizedList(new ArrayList<ConnectionEventListener>(1));
this.connections = new HashSet<LMDbConnectionImpl>();
}
/**
* Creates a new connection handle for every underlying physical Dbi connection
* represented by the ManagedConnection instance.
*
* @param subject Security context as JAAS subject
* @param cxRequestInfo ConnectionRequestInfo instance
* @return generic Object instance representing the connection handle.
* @throws ResourceException generic exception if operation fails
*/
public Object getConnection(Subject subject, ConnectionRequestInfo cxRequestInfo) throws ResourceException {
log.finest("getConnection()");
LMDbConnectionRequestInfo connectionRequestInfo = (LMDbConnectionRequestInfo) cxRequestInfo;
if(this.dbi == null) {
this.dbi = environment.openDbi(connectionRequestInfo.getDatabaseName(), DbiFlags.MDB_CREATE);
}
LMDbConnectionImpl connection = new LMDbConnectionImpl(this, managedConnectionFactory);
connections.add(connection);
return connection;
}
/**
* Used by the container to change the association of an
* application-level connection handle with a ManagedConneciton instance.
*
* @param connection Application-level connection handle
* @throws ResourceException generic exception if operation fails
*/
public void associateConnection(Object connection) throws ResourceException {
log.finest("associateConnection()");
if (connection == null) {
throw new ResourceException("Null connection handle");
}
if (!(connection instanceof LMDbConnectionImpl)) {
throw new ResourceException("Wrong connection handle");
}
LMDbConnectionImpl handle = (LMDbConnectionImpl) connection;
handle.setManagedConnection(this);
connections.add(handle);
}
/**
* Application server calls this method to force any cleanup on the ManagedConnection instance.
*
* @throws ResourceException generic exception if operation fails
*/
public void cleanup() throws ResourceException {
log.finest("cleanup()");
for (LMDbConnectionImpl connection : connections) {
connection.setManagedConnection(null);
}
connections.clear();
}
/**
* Destroys the physical connection to the underlying resource manager.
*
* @throws ResourceException generic exception if operation fails
*/
public void destroy() throws ResourceException {
log.finest("destroy()");
dbi.close();
dbi = null;
}
/**
* Adds a connection event listener to the ManagedConnection instance.
*
* @param listener A new ConnectionEventListener to be registered
*/
public void addConnectionEventListener(ConnectionEventListener listener) {
log.finest("addConnectionEventListener()");
if (listener == null) {
throw new IllegalArgumentException("Listener is null");
}
listeners.add(listener);
}
/**
* Removes an already registered connection event listener from the ManagedConnection instance.
*
* @param listener already registered connection event listener to be removed
*/
public void removeConnectionEventListener(ConnectionEventListener listener) {
log.finest("removeConnectionEventListener()");
if (listener == null) {
throw new IllegalArgumentException("Listener is null");
}
listeners.remove(listener);
}
/**
* Close handle
*
* @param handle The handle
*/
void closeHandle(LMDbConnection handle) {
log.finest("closeHandle()");
connections.remove((LMDbConnectionImpl) handle);
ConnectionEvent event = new ConnectionEvent(this, ConnectionEvent.CONNECTION_CLOSED);
event.setConnectionHandle(handle);
for (ConnectionEventListener cel : listeners) {
cel.connectionClosed(event);
}
}
/**
* Gets the log writer for this ManagedConnection instance.
*
* @return Character output stream associated with this Managed-Connection instance
* @throws ResourceException generic exception if operation fails
*/
public PrintWriter getLogWriter() throws ResourceException {
log.finest("getLogWriter()");
return logwriter;
}
/**
* Sets the log writer for this ManagedConnection instance.
*
* @param out Character Output stream to be associated
* @throws ResourceException generic exception if operation fails
*/
public void setLogWriter(PrintWriter out) throws ResourceException {
log.finest("setLogWriter()");
logwriter = out;
}
/**
* Returns an <code>javax.resource.spi.LocalTransaction</code> instance.
*
* @return LocalTransaction instance
* @throws ResourceException generic exception if operation fails
*/
public LocalTransaction getLocalTransaction() throws ResourceException {
log.finest("getLocalTransaction()");
return txResource == null ? new LMDbTXResource(this) : txResource;
}
/**
* Returns an <code>javax.transaction.xa.XAresource</code> instance.
*
* @return XAResource instance
* @throws ResourceException generic exception if operation fails
*/
public XAResource getXAResource() throws ResourceException {
log.finest("getXAResource()");
if(xaResource == null) {
this.xaResource = new LMDbXAResource(this);
}
return xaResource;
}
/**
* Gets the metadata information for this connection's underlying EIS resource manager instance.
*
* @return ManagedConnectionMetaData instance
* @throws ResourceException generic exception if operation fails
*/
public ManagedConnectionMetaData getMetaData() throws ResourceException {
log.finest("getMetaData()");
return new LMDbManagedConnectionMetaData(managedConnectionFactory.getMaxReaders());
}
public List<ConnectionEventListener> getListeners() {
return listeners;
}
Dbi getDbi() {
return dbi;
}
String getDatabaseName() {
return String.valueOf(UTF_8.decode(ByteBuffer.wrap(dbi.getName())));
}
Txn getWriteTransaction() {
return environment.txnWrite();
}
Txn getReadTransaction() {
return environment.txnRead();
}
}
|
Update LMDbManagedConnection.java
Added todo
|
src/main/java/net/betzel/lmdb/jca/LMDbManagedConnection.java
|
Update LMDbManagedConnection.java
|
|
Java
|
apache-2.0
|
21de533359a91e9310d4f9b4221fa83e4d393c78
| 0
|
synthetichealth/synthea,synthetichealth/synthea,synthetichealth/synthea
|
package org.mitre.synthea.export;
import static org.mitre.synthea.export.ExportHelper.nextFriday;
import com.google.gson.JsonObject;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.mitre.synthea.helpers.SimpleCSV;
import org.mitre.synthea.helpers.Utilities;
import org.mitre.synthea.world.agents.Payer;
import org.mitre.synthea.world.agents.Person;
import org.mitre.synthea.world.agents.Provider.ProviderType;
import org.mitre.synthea.world.concepts.HealthRecord;
import org.mitre.synthea.world.concepts.HealthRecord.EncounterType;
import org.mitre.synthea.world.concepts.HealthRecord.Medication;
/**
* BlueButton 2 Exporter.
*/
public class BB2Exporter implements Flushable {
private SynchronizedBBLineWriter beneficiary;
private SynchronizedBBLineWriter beneficiaryHistory;
private SynchronizedBBLineWriter outpatient;
private SynchronizedBBLineWriter inpatient;
private SynchronizedBBLineWriter carrier;
private SynchronizedBBLineWriter prescription;
private AtomicInteger claimId; // per claim per encounter
private AtomicInteger claimGroupId; // per encounter
private AtomicInteger pdeId; // per medication claim
private List<LinkedHashMap<String, String>> carrierLookup;
private stateCodeMapper stateLookup;
private static final String BB2_BENE_ID = "BB2_BENE_ID";
private static final String BB2_HIC_ID = "BB2_HIC_ID";
/**
* Day-Month-Year date format.
*/
private static final SimpleDateFormat BB2_DATE_FORMAT = new SimpleDateFormat("dd-MMM-yyyy");
/**
* Get a date string in the format DD-MMM-YY from the given time stamp.
*/
private static String bb2DateFromTimestamp(long time) {
synchronized (BB2_DATE_FORMAT) {
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6231579
return BB2_DATE_FORMAT.format(new Date(time));
}
}
/**
* Create the output folder and files. Write headers to each file.
*/
private BB2Exporter() {
claimId = new AtomicInteger();
claimGroupId = new AtomicInteger();
pdeId = new AtomicInteger();
try {
String csv = Utilities.readResource("payers/carriers.csv");
if (csv.startsWith("\uFEFF")) {
csv = csv.substring(1); // Removes BOM.
}
carrierLookup = SimpleCSV.parse(csv);
} catch (IOException e) {
throw new RuntimeException(e);
}
stateLookup = new stateCodeMapper();
try {
prepareOutputFiles();
} catch (IOException e) {
// wrap the exception in a runtime exception.
// the singleton pattern below doesn't work if the constructor can throw
// and if these do throw ioexceptions there's nothing we can do anyway
throw new RuntimeException(e);
}
}
/**
* Create the output folder and files. Write headers to each file.
*/
final void prepareOutputFiles() throws IOException {
// Clean up any existing output files
if (beneficiary != null) {
beneficiary.close();
}
if (beneficiaryHistory != null) {
beneficiaryHistory.close();
}
if (inpatient != null) {
inpatient.close();
}
if (outpatient != null) {
outpatient.close();
}
if (carrier != null) {
carrier.close();
}
if (prescription != null) {
prescription.close();
}
// Initialize output files
File output = Exporter.getOutputFolder("bb2", null);
output.mkdirs();
Path outputDirectory = output.toPath();
File beneficiaryFile = outputDirectory.resolve("beneficiary.csv").toFile();
beneficiary = new SynchronizedBBLineWriter(beneficiaryFile);
beneficiary.writeHeader(BeneficiaryFields.class);
File beneficiaryHistoryFile = outputDirectory.resolve("beneficiary_history.csv").toFile();
beneficiaryHistory = new SynchronizedBBLineWriter(beneficiaryHistoryFile);
beneficiaryHistory.writeHeader(BeneficiaryHistoryFields.class);
File outpatientFile = outputDirectory.resolve("outpatient.csv").toFile();
outpatient = new SynchronizedBBLineWriter(outpatientFile);
outpatient.writeHeader(OutpatientFields.class);
File inpatientFile = outputDirectory.resolve("inpatient.csv").toFile();
inpatient = new SynchronizedBBLineWriter(inpatientFile);
inpatient.writeHeader(InpatientFields.class);
File carrierFile = outputDirectory.resolve("carrier.csv").toFile();
carrier = new SynchronizedBBLineWriter(carrierFile);
carrier.writeHeader(CarrierFields.class);
File prescriptionFile = outputDirectory.resolve("prescription.csv").toFile();
prescription = new SynchronizedBBLineWriter(prescriptionFile);
prescription.writeHeader(PrescriptionFields.class);
}
/**
* Export a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
void export(Person person, long stopTime) throws IOException {
exportBeneficiary(person, stopTime);
exportBeneficiaryHistory(person, stopTime);
exportOutpatient(person, stopTime);
exportInpatient(person, stopTime);
exportCarrier(person, stopTime);
exportPrescription(person, stopTime);
}
/**
* Export a beneficiary details for single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportBeneficiary(Person person, long stopTime) throws IOException {
HashMap<BeneficiaryFields, String> fieldValues = new HashMap<>();
fieldValues.put(BeneficiaryFields.DML_IND, "INSERT");
String personId = (String)person.attributes.get(Person.ID);
String beneId = personId.split("-")[4]; // last segment of UUID
person.attributes.put(BB2_BENE_ID, beneId);
fieldValues.put(BeneficiaryFields.BENE_ID, beneId);
//String hicId = personId.split("-")[0]; // first segment of UUID
String hicId = person.attributes.get(Person.IDENTIFIER_SSN).toString();
hicId = hicId.replace("-","") + "A"; // hicId = SSN + letter (A means retired beneficiary but other options too).
System.out.println("HIC: " + hicId);
person.attributes.put(BB2_HIC_ID, hicId);
fieldValues.put(BeneficiaryFields.BENE_CRNT_HIC_NUM, hicId);
fieldValues.put(BeneficiaryFields.BENE_SEX_IDENT_CD,
(String)person.attributes.get(Person.GENDER));
fieldValues.put(BeneficiaryFields.BENE_COUNTY_CD,
(String)person.attributes.get("county"));
fieldValues.put(BeneficiaryFields.STATE_CODE,
(String)person.attributes.get(Person.STATE));
fieldValues.put(BeneficiaryFields.BENE_ZIP_CD,
(String)person.attributes.get(Person.ZIP));
fieldValues.put(BeneficiaryFields.BENE_RACE_CD,
bb2RaceCode(
(String)person.attributes.get(Person.ETHNICITY),
(String)person.attributes.get(Person.RACE)));
fieldValues.put(BeneficiaryFields.BENE_SRNM_NAME,
(String)person.attributes.get(Person.LAST_NAME));
fieldValues.put(BeneficiaryFields.BENE_GVN_NAME,
(String)person.attributes.get(Person.FIRST_NAME));
long birthdate = (long) person.attributes.get(Person.BIRTHDATE);
fieldValues.put(BeneficiaryFields.BENE_BIRTH_DT, bb2DateFromTimestamp(birthdate));
fieldValues.put(BeneficiaryFields.RFRNC_YR, String.valueOf(getYear(stopTime)));
fieldValues.put(BeneficiaryFields.AGE, String.valueOf(ageAtEndOfYear(birthdate, stopTime)));
if (person.attributes.get(Person.DEATHDATE) != null) {
long deathDate = (long) person.attributes.get(Person.DEATHDATE);
fieldValues.put(BeneficiaryFields.DEATH_DT, bb2DateFromTimestamp(deathDate));
}
beneficiary.writeValues(BeneficiaryFields.class, fieldValues);
}
/**
* Export a beneficiary history for single person. Assumes exportBeneficiary
* was called first to set up various ID on person
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportBeneficiaryHistory(Person person, long stopTime) throws IOException {
HashMap<BeneficiaryHistoryFields, String> fieldValues = new HashMap<>();
fieldValues.put(BeneficiaryHistoryFields.DML_IND, "INSERT");
String beneId = (String)person.attributes.get(BB2_BENE_ID);
fieldValues.put(BeneficiaryHistoryFields.BENE_ID, beneId);
String hicId = (String)person.attributes.get(BB2_HIC_ID);
fieldValues.put(BeneficiaryHistoryFields.BENE_CRNT_HIC_NUM, hicId);
fieldValues.put(BeneficiaryHistoryFields.BENE_SEX_IDENT_CD,
(String)person.attributes.get(Person.GENDER));
long birthdate = (long) person.attributes.get(Person.BIRTHDATE);
fieldValues.put(BeneficiaryHistoryFields.BENE_BIRTH_DT, bb2DateFromTimestamp(birthdate));
fieldValues.put(BeneficiaryHistoryFields.BENE_COUNTY_CD,
(String)person.attributes.get("county"));
fieldValues.put(BeneficiaryHistoryFields.STATE_CODE,
(String)person.attributes.get(Person.STATE));
fieldValues.put(BeneficiaryHistoryFields.BENE_ZIP_CD,
(String)person.attributes.get(Person.ZIP));
fieldValues.put(BeneficiaryHistoryFields.BENE_RACE_CD,
bb2RaceCode(
(String)person.attributes.get(Person.ETHNICITY),
(String)person.attributes.get(Person.RACE)));
fieldValues.put(BeneficiaryHistoryFields.BENE_SRNM_NAME,
(String)person.attributes.get(Person.LAST_NAME));
fieldValues.put(BeneficiaryHistoryFields.BENE_GVN_NAME,
(String)person.attributes.get(Person.FIRST_NAME));
beneficiaryHistory.writeValues(BeneficiaryHistoryFields.class, fieldValues);
}
/**
* Get the year of a point in time.
* @param time point in time specified as number of milliseconds since the epoch
* @return the year as a four figure value, e.g. 1971
*/
private static int getYear(long time) {
return 1900 + new Date(time).getYear();
}
/**
* Calculate the age of a person at the end of the year of a reference point in time.
* @param birthdate a person's birthdate specified as number of milliseconds since the epoch
* @param stopTime a reference point in time specified as number of milliseconds since the epoch
* @return the person's age
*/
private static int ageAtEndOfYear(long birthdate, long stopTime) {
return getYear(stopTime) - getYear(birthdate);
}
/**
* Export outpatient claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportOutpatient(Person person, long stopTime) throws IOException {
HashMap<OutpatientFields, String> fieldValues = new HashMap<>();
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isAmbulatory = encounter.type.equals(EncounterType.AMBULATORY.toString());
boolean isOutpatient = encounter.type.equals(EncounterType.OUTPATIENT.toString());
boolean isUrgent = encounter.type.equals(EncounterType.URGENTCARE.toString());
boolean isWellness = encounter.type.equals(EncounterType.WELLNESS.toString());
boolean isPrimary = (ProviderType.PRIMARY == encounter.provider.type);
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
if (isPrimary || !(isAmbulatory || isOutpatient || isUrgent || isWellness)) {
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(OutpatientFields.DML_IND, "INSERT");
fieldValues.put(OutpatientFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(OutpatientFields.CLM_ID, "" + claimId);
fieldValues.put(OutpatientFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(OutpatientFields.FINAL_ACTION, "F");
fieldValues.put(OutpatientFields.NCH_NEAR_LINE_REC_IDENT_CD, "W"); // W=outpatient
fieldValues.put(OutpatientFields.NCH_CLM_TYPE_CD, "40"); // 40=outpatient
fieldValues.put(OutpatientFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(OutpatientFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(OutpatientFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(OutpatientFields.CLAIM_QUERY_CODE, "3"); // 1=Interim, 3=Final, 5=Debit
fieldValues.put(OutpatientFields.PRVDR_NUM, encounter.provider.id);
fieldValues.put(OutpatientFields.CLM_FAC_TYPE_CD, "1"); // 1=Hospital, 2=SNF, 7=Dialysis
fieldValues.put(OutpatientFields.CLM_SRVC_CLSFCTN_TYPE_CD, "3"); // depends on value of above
fieldValues.put(OutpatientFields.CLM_FREQ_CD, "1"); // 1=Admit-Discharge, 9=Final
fieldValues.put(OutpatientFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(OutpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT, "0");
} else {
fieldValues.put(OutpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
//fieldValues.put(OutpatientFields.PRVDR_STATE_CD, encounter.provider.state);
fieldValues.put(OutpatientFields.PRVDR_STATE_CD, stateLookup.getStateCode(encounter.provider.state));
// PTNT_DSCHRG_STUS_CD: 1=home, 2=transfer, 3=SNF, 20=died, 30=still here
String field = null;
if (encounter.ended) {
field = "1";
} else {
field = "30"; // the patient is still here
}
if (!person.alive(encounter.stop)) {
field = "20"; // the patient died before the encounter ended
}
fieldValues.put(OutpatientFields.PTNT_DSCHRG_STUS_CD, field);
fieldValues.put(OutpatientFields.CLM_TOT_CHRG_AMT, "" + encounter.claim.getTotalClaimCost());
// TODO required in the mapping, but not in the Enum
// fieldValues.put(OutpatientFields.CLM_IP_ADMSN_TYPE_CD, null);
// fieldValues.put(OutpatientFields.CLM_PASS_THRU_PER_DIEM_AMT, null);
// fieldValues.put(OutpatientFields.NCH_BENE_IP_DDCTBL_AMT, null);
// fieldValues.put(OutpatientFields.NCH_BENE_PTA_COINSRNC_LBLTY_AM, null);
fieldValues.put(OutpatientFields.NCH_BENE_BLOOD_DDCTBL_LBLTY_AM, "0");
fieldValues.put(OutpatientFields.NCH_PROFNL_CMPNT_CHRG_AMT, "4"); // fixed $ amount?
// TODO required in the mapping, but not in the Enum
// fieldValues.put(OutpatientFields.NCH_IP_NCVRD_CHRG_AMT, null);
// fieldValues.put(OutpatientFields.NCH_IP_TOT_DDCTN_AMT, null);
// fieldValues.put(OutpatientFields.CLM_UTLZTN_DAY_CNT, null);
// fieldValues.put(OutpatientFields.BENE_TOT_COINSRNC_DAYS_CNT, null);
// fieldValues.put(OutpatientFields.CLM_NON_UTLZTN_DAYS_CNT, null);
// fieldValues.put(OutpatientFields.NCH_BLOOD_PNTS_FRNSHD_QTY, null);
// fieldValues.put(OutpatientFields.CLM_DRG_OUTLIER_STAY_CD, null);
fieldValues.put(OutpatientFields.CLM_LINE_NUM, "1");
fieldValues.put(OutpatientFields.REV_CNTR, "0001"); // total charge, lots of alternatives
fieldValues.put(OutpatientFields.REV_CNTR_UNIT_CNT, "0");
fieldValues.put(OutpatientFields.REV_CNTR_RATE_AMT, "0");
fieldValues.put(OutpatientFields.REV_CNTR_TOT_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(OutpatientFields.REV_CNTR_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
outpatient.writeValues(OutpatientFields.class, fieldValues);
}
}
/**
* Export inpatient claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportInpatient(Person person, long stopTime) throws IOException {
HashMap<InpatientFields, String> fieldValues = new HashMap<>();
HealthRecord.Encounter previous = null;
boolean previousInpatient = false;
boolean previousEmergency = false;
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isInpatient = encounter.type.equals(EncounterType.INPATIENT.toString());
boolean isEmergency = encounter.type.equals(EncounterType.EMERGENCY.toString());
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
if (!(isInpatient || isEmergency)) {
previous = encounter;
previousInpatient = false;
previousEmergency = false;
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(InpatientFields.DML_IND, "INSERT");
fieldValues.put(InpatientFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(InpatientFields.CLM_ID, "" + claimId);
fieldValues.put(InpatientFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(InpatientFields.FINAL_ACTION, "F"); // F or V
fieldValues.put(InpatientFields.NCH_NEAR_LINE_REC_IDENT_CD, "V"); // V = inpatient
fieldValues.put(InpatientFields.NCH_CLM_TYPE_CD, "60"); // Always 60 for inpatient claims
fieldValues.put(InpatientFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(InpatientFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(InpatientFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(InpatientFields.CLAIM_QUERY_CODE, "3"); // 1=Interim, 3=Final, 5=Debit
fieldValues.put(InpatientFields.PRVDR_NUM, encounter.provider.id);
fieldValues.put(InpatientFields.CLM_FAC_TYPE_CD, "1"); // 1=Hospital, 2=SNF, 7=Dialysis
fieldValues.put(InpatientFields.CLM_SRVC_CLSFCTN_TYPE_CD, "1"); // depends on value of above
fieldValues.put(InpatientFields.CLM_FREQ_CD, "1"); // 1=Admit-Discharge, 9=Final
fieldValues.put(InpatientFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(InpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT, "0");
} else {
fieldValues.put(InpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
fieldValues.put(InpatientFields.PRVDR_STATE_CD, encounter.provider.state);
fieldValues.put(InpatientFields.PRVDR_STATE_CD, stateLookup.getStateCode(encounter.provider.state));
// PTNT_DSCHRG_STUS_CD: 1=home, 2=transfer, 3=SNF, 20=died, 30=still here
String field = null;
if (encounter.ended) {
field = "1"; // TODO 2=transfer if the next encounter is also inpatient
} else {
field = "30"; // the patient is still here
}
if (!person.alive(encounter.stop)) {
field = "20"; // the patient died before the encounter ended
}
fieldValues.put(InpatientFields.PTNT_DSCHRG_STUS_CD, field);
fieldValues.put(InpatientFields.CLM_TOT_CHRG_AMT, "" + encounter.claim.getTotalClaimCost());
if (isEmergency) {
field = "1"; // emergency
} else if (previousEmergency) {
field = "2"; // urgent
} else {
field = "3"; // elective
}
fieldValues.put(InpatientFields.CLM_IP_ADMSN_TYPE_CD, field);
fieldValues.put(InpatientFields.CLM_PASS_THRU_PER_DIEM_AMT, "10"); // fixed $ amount?
fieldValues.put(InpatientFields.NCH_BENE_IP_DDCTBL_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(InpatientFields.NCH_BENE_PTA_COINSRNC_LBLTY_AM,
"" + encounter.claim.getCoinsurancePaid());
fieldValues.put(InpatientFields.NCH_BENE_BLOOD_DDCTBL_LBLTY_AM, "0");
fieldValues.put(InpatientFields.NCH_PROFNL_CMPNT_CHRG_AMT, "4"); // fixed $ amount?
fieldValues.put(InpatientFields.NCH_IP_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
fieldValues.put(InpatientFields.NCH_IP_TOT_DDCTN_AMT,
"" + encounter.claim.getPatientCost());
int days = (int) ((encounter.stop - encounter.start) / (1000 * 60 * 60 * 24));
fieldValues.put(InpatientFields.CLM_UTLZTN_DAY_CNT, "" + days);
if (days > 60) {
field = "" + (days - 60);
} else {
field = "0";
}
fieldValues.put(InpatientFields.BENE_TOT_COINSRNC_DAYS_CNT, field);
fieldValues.put(InpatientFields.CLM_NON_UTLZTN_DAYS_CNT, "0");
fieldValues.put(InpatientFields.NCH_BLOOD_PNTS_FRNSHD_QTY, "0");
if (days > 60) {
field = "1"; // days outlier
} else if (encounter.claim.getTotalClaimCost() > 100_000) {
field = "2"; // cost outlier
} else {
field = "0"; // no outlier
}
fieldValues.put(InpatientFields.CLM_DRG_OUTLIER_STAY_CD, field);
fieldValues.put(InpatientFields.CLM_LINE_NUM, "1");
fieldValues.put(InpatientFields.REV_CNTR, "0001"); // total charge, lots of alternatives
fieldValues.put(InpatientFields.REV_CNTR_UNIT_CNT, "0");
fieldValues.put(InpatientFields.REV_CNTR_RATE_AMT, "0");
fieldValues.put(InpatientFields.REV_CNTR_TOT_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(InpatientFields.REV_CNTR_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
previous = encounter;
previousInpatient = isInpatient;
previousEmergency = isEmergency;
inpatient.writeValues(InpatientFields.class, fieldValues);
}
}
/**
* Export carrier claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportCarrier(Person person, long stopTime) throws IOException {
HashMap<CarrierFields, String> fieldValues = new HashMap<>();
HealthRecord.Encounter previous = null;
double latestHemoglobin = 0;
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isPrimary = (ProviderType.PRIMARY == encounter.provider.type);
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
for (HealthRecord.Observation observation : encounter.observations) {
if (observation.containsCode("718-7", "http://loinc.org")) {
latestHemoglobin = (double) observation.value;
}
}
if (!isPrimary) {
previous = encounter;
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(CarrierFields.DML_IND, "INSERT");
fieldValues.put(CarrierFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(CarrierFields.CLM_ID, "" + claimId);
fieldValues.put(CarrierFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(CarrierFields.FINAL_ACTION, "F"); // F or V
fieldValues.put(CarrierFields.NCH_NEAR_LINE_REC_IDENT_CD, "O"); // O=physician
fieldValues.put(CarrierFields.NCH_CLM_TYPE_CD, "71"); // local carrier, non-DME
fieldValues.put(CarrierFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(CarrierFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(CarrierFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(CarrierFields.CARR_CLM_ENTRY_CD, "1");
fieldValues.put(CarrierFields.CLM_DISP_CD, "01");
fieldValues.put(CarrierFields.CARR_NUM,
getCarrier(encounter.provider.state, CarrierFields.CARR_NUM));
fieldValues.put(CarrierFields.CARR_CLM_PMT_DNL_CD, "1"); // 1=paid to physician
fieldValues.put(CarrierFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(CarrierFields.CARR_CLM_PRMRY_PYR_PD_AMT, "0");
} else {
fieldValues.put(CarrierFields.CARR_CLM_PRMRY_PYR_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
fieldValues.put(CarrierFields.NCH_CLM_PRVDR_PMT_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.NCH_CLM_BENE_PMT_AMT, "0");
fieldValues.put(CarrierFields.NCH_CARR_CLM_SBMTD_CHRG_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.NCH_CARR_CLM_ALOWD_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.CARR_CLM_CASH_DDCTBL_APLD_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(CarrierFields.CARR_CLM_RFRNG_PIN_NUM, encounter.provider.id);
fieldValues.put(CarrierFields.LINE_NUM, "1");
fieldValues.put(CarrierFields.CARR_PRFRNG_PIN_NUM, encounter.provider.id);
fieldValues.put(CarrierFields.CARR_LINE_PRVDR_TYPE_CD, "0");
fieldValues.put(CarrierFields.TAX_NUM,
"" + encounter.clinician.attributes.get(Person.IDENTIFIER_SSN));
fieldValues.put(CarrierFields.CARR_LINE_RDCD_PMT_PHYS_ASTN_C, "0");
fieldValues.put(CarrierFields.LINE_SRVC_CNT, "" + encounter.claim.items.size());
fieldValues.put(CarrierFields.LINE_CMS_TYPE_SRVC_CD, "1");
fieldValues.put(CarrierFields.LINE_PLACE_OF_SRVC_CD, "11"); // 11=office
fieldValues.put(CarrierFields.CARR_LINE_PRCNG_LCLTY_CD,
getCarrier(encounter.provider.state, CarrierFields.CARR_LINE_PRCNG_LCLTY_CD));
fieldValues.put(CarrierFields.LINE_NCH_PMT_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.LINE_BENE_PMT_AMT, "0");
fieldValues.put(CarrierFields.LINE_PRVDR_PMT_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.LINE_BENE_PTB_DDCTBL_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(CarrierFields.LINE_BENE_PRMRY_PYR_PD_AMT, "0");
fieldValues.put(CarrierFields.LINE_COINSRNC_AMT,
"" + encounter.claim.getCoinsurancePaid());
fieldValues.put(CarrierFields.LINE_SBMTD_CHRG_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.LINE_ALOWD_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
// length of encounter in minutes
fieldValues.put(CarrierFields.CARR_LINE_MTUS_CNT,
"" + ((encounter.stop - encounter.start) / (1000 * 60)));
fieldValues.put(CarrierFields.LINE_HCT_HGB_RSLT_NUM,
"" + latestHemoglobin);
fieldValues.put(CarrierFields.CARR_LINE_ANSTHSA_UNIT_CNT, "0");
carrier.writeValues(CarrierFields.class, fieldValues);
}
}
/**
* Export prescription claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportPrescription(Person person, long stopTime) throws IOException {
HashMap<PrescriptionFields, String> fieldValues = new HashMap<>();
HashMap<String, Integer> fillNum = new HashMap<>();
double costs = 0;
int costYear = 0;
for (HealthRecord.Encounter encounter : person.record.encounters) {
for (Medication medication : encounter.medications) {
int pdeId = this.pdeId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(PrescriptionFields.DML_IND, "INSERT");
fieldValues.put(PrescriptionFields.PDE_ID, "" + pdeId);
fieldValues.put(PrescriptionFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(PrescriptionFields.FINAL_ACTION, "F");
fieldValues.put(PrescriptionFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(PrescriptionFields.SRVC_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(PrescriptionFields.SRVC_PRVDR_ID_QLFYR_CD, "01"); // undefined
fieldValues.put(PrescriptionFields.SRVC_PRVDR_ID, encounter.provider.id);
fieldValues.put(PrescriptionFields.PRSCRBR_ID_QLFYR_CD, "01"); // undefined
fieldValues.put(PrescriptionFields.PRSCRBR_ID,
"" + (9_999_999_999L - encounter.clinician.identifier));
fieldValues.put(PrescriptionFields.RX_SRVC_RFRNC_NUM, "" + pdeId);
// TODO this should be an NDC code, not RxNorm
fieldValues.put(PrescriptionFields.PROD_SRVC_ID, medication.codes.get(0).code);
// H=hmo, R=ppo, S=stand-alone, E=employer direct, X=limited income
fieldValues.put(PrescriptionFields.PLAN_CNTRCT_REC_ID,
("R" + Math.abs(
UUID.fromString(medication.claim.payer.uuid)
.getMostSignificantBits())).substring(0, 5));
fieldValues.put(PrescriptionFields.PLAN_PBP_REC_NUM, "999");
// 0=not specified, 1=not compound, 2=compound
fieldValues.put(PrescriptionFields.CMPND_CD, "0");
fieldValues.put(PrescriptionFields.DAW_PROD_SLCTN_CD, "" + (int) person.rand(0, 9));
fieldValues.put(PrescriptionFields.QTY_DSPNSD_NUM, "" + getQuantity(medication, stopTime));
fieldValues.put(PrescriptionFields.DAYS_SUPLY_NUM, "" + getDays(medication, stopTime));
Integer fill = 1;
if (fillNum.containsKey(medication.codes.get(0).code)) {
fill = 1 + fillNum.get(medication.codes.get(0).code);
}
fillNum.put(medication.codes.get(0).code, fill);
fieldValues.put(PrescriptionFields.FILL_NUM, "" + fill);
fieldValues.put(PrescriptionFields.DRUG_CVRG_STUS_CD, "C");
int year = Utilities.getYear(medication.start);
if (year != costYear) {
costYear = year;
costs = 0;
}
costs += medication.claim.getPatientCost();
if (costs <= 4550.00) {
fieldValues.put(PrescriptionFields.GDC_BLW_OOPT_AMT, "" + costs);
fieldValues.put(PrescriptionFields.GDC_ABV_OOPT_AMT, "0");
} else {
fieldValues.put(PrescriptionFields.GDC_BLW_OOPT_AMT, "4550.00");
fieldValues.put(PrescriptionFields.GDC_ABV_OOPT_AMT, "" + (costs - 4550));
}
fieldValues.put(PrescriptionFields.PTNT_PAY_AMT, "" + medication.claim.getPatientCost());
fieldValues.put(PrescriptionFields.OTHR_TROOP_AMT, "0");
fieldValues.put(PrescriptionFields.LICS_AMT, "0");
fieldValues.put(PrescriptionFields.PLRO_AMT, "0");
fieldValues.put(PrescriptionFields.CVRD_D_PLAN_PD_AMT,
"" + medication.claim.getCoveredCost());
fieldValues.put(PrescriptionFields.NCVRD_PLAN_PD_AMT,
"" + medication.claim.getPatientCost());
fieldValues.put(PrescriptionFields.TOT_RX_CST_AMT,
"" + medication.claim.getTotalClaimCost());
fieldValues.put(PrescriptionFields.RPTD_GAP_DSCNT_NUM, "0");
fieldValues.put(PrescriptionFields.PHRMCY_SRVC_TYPE_CD, "0" + (int) person.rand(1, 8));
// 00=not specified, 01=home, 02=SNF, 03=long-term, 11=hospice, 14=homeless
if (person.attributes.containsKey("homeless")
&& ((Boolean) person.attributes.get("homeless") == true)) {
fieldValues.put(PrescriptionFields.PTNT_RSDNC_CD, "14");
} else {
fieldValues.put(PrescriptionFields.PTNT_RSDNC_CD, "01");
}
prescription.writeValues(PrescriptionFields.class, fieldValues);
}
}
}
/**
* Flush contents of any buffered streams to disk.
* @throws IOException if something goes wrong
*/
@Override
public void flush() throws IOException {
beneficiary.flush();
beneficiaryHistory.flush();
inpatient.flush();
outpatient.flush();
carrier.flush();
prescription.flush();
}
/**
* Get the BB2 race code. BB2 uses a single code to represent race and ethnicity, we assume
* ethnicity gets priority here.
* @param ethnicity the Synthea ethnicity
* @param race the Synthea race
* @return the BB2 race code
*/
private String bb2RaceCode(String ethnicity, String race) {
if ("hispanic".equals(ethnicity)) {
return "5";
} else {
String bbRaceCode = "0"; // unknown
switch (race) {
case "white":
bbRaceCode = "1";
break;
case "black":
bbRaceCode = "2";
break;
case "asian":
bbRaceCode = "4";
break;
case "native":
bbRaceCode = "6";
break;
case "other":
default:
bbRaceCode = "3";
break;
}
return bbRaceCode;
}
}
private String getCarrier(String state, CarrierFields column) {
for (LinkedHashMap<String, String> row : carrierLookup) {
if (row.get("STATE").equals(state) || row.get("STATE_CODE").equals(state)) {
return row.get(column.toString());
}
}
return "0";
}
private int getQuantity(Medication medication, long stopTime) {
double amountPerDay = 1;
double days = getDays(medication, stopTime);
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("dosage")) {
JsonObject dosage = medication.prescriptionDetails.getAsJsonObject("dosage");
long amount = dosage.get("amount").getAsLong();
long frequency = dosage.get("frequency").getAsLong();
long period = dosage.get("period").getAsLong();
String units = dosage.get("unit").getAsString();
long periodTime = Utilities.convertTime(units, period);
long perPeriod = amount * frequency;
amountPerDay = (double) ((double) (perPeriod * periodTime) / (1000.0 * 60 * 60 * 24));
if (amountPerDay == 0) {
amountPerDay = 1;
}
}
return (int) (amountPerDay * days);
}
private int getDays(Medication medication, long stopTime) {
double days = 1;
long stop = medication.stop;
if (stop == 0L) {
stop = stopTime;
}
long medDuration = stop - medication.start;
days = (double) (medDuration / (1000 * 60 * 60 * 24));
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("duration")) {
JsonObject duration = medication.prescriptionDetails.getAsJsonObject("duration");
long quantity = duration.get("quantity").getAsLong();
String unit = duration.get("unit").getAsString();
long durationTime = Utilities.convertTime(unit, quantity);
double durationTimeInDays = (double) (durationTime / (1000 * 60 * 60 * 24));
if (durationTimeInDays > days) {
days = durationTimeInDays;
}
}
return (int) days;
}
/**
* Utility class for converting state names and abbreviations to provider state codes
*/
class stateCodeMapper {
private HashMap<String, String> ProviderStateCodes;
private Map<String, String> StateToAbbrev = this.buildStateAbbrevTable();
private Map<String, String> AbbrevToState;
public stateCodeMapper(){
this.ProviderStateCodes = this.buildProviderStateTable();
this.StateToAbbrev = this.buildStateAbbrevTable();
// support two-way conversion between state name and abbreviations
Map<String, String> AbbrevToState = new HashMap<String, String>();
for(Map.Entry<String, String> entry : StateToAbbrev.entrySet()){
AbbrevToState.put(entry.getValue(), entry.getKey());
}
this.AbbrevToState = AbbrevToState;
}
/**
* Return state code for a given state
* @param state (either state name or abbreviation)
* @return 2-digit state code
*/
private String getStateCode(String state){
if (state.length() == 2) {
state = this.changeStateFormat(state);
}else{
state = this.capitalizeWords(state);
}
String res = this.ProviderStateCodes.getOrDefault(state, "NONE");
return res;
}
/**
* Switch between state name and abbreviation. If state is abbreviation, will return name, and vice versa
* @param state
* @return
*/
private String changeStateFormat(String state){
if (state.length() == 2) {
return this.AbbrevToState.getOrDefault(state.toUpperCase(), null);
}else{
String stateClean = this.capitalizeWords(state.toLowerCase());
return this.StateToAbbrev.getOrDefault(stateClean, null);
}
}
private Map<String, String> buildStateAbbrevTable(){
Map<String, String> states = new HashMap<String, String>();
states.put("Alabama","AL");
states.put("Alaska","AK");
states.put("Alberta","AB");
states.put("American Samoa","AS");
states.put("Arizona","AZ");
states.put("Arkansas","AR");
states.put("Armed Forces (AE)","AE");
states.put("Armed Forces Americas","AA");
states.put("Armed Forces Pacific","AP");
states.put("British Columbia","BC");
states.put("California","CA");
states.put("Colorado","CO");
states.put("Connecticut","CT");
states.put("Delaware","DE");
states.put("District Of Columbia","DC");
states.put("Florida","FL");
states.put("Georgia","GA");
states.put("Guam","GU");
states.put("Hawaii","HI");
states.put("Idaho","ID");
states.put("Illinois","IL");
states.put("Indiana","IN");
states.put("Iowa","IA");
states.put("Kansas","KS");
states.put("Kentucky","KY");
states.put("Louisiana","LA");
states.put("Maine","ME");
states.put("Manitoba","MB");
states.put("Maryland","MD");
states.put("Massachusetts","MA");
states.put("Michigan","MI");
states.put("Minnesota","MN");
states.put("Mississippi","MS");
states.put("Missouri","MO");
states.put("Montana","MT");
states.put("Nebraska","NE");
states.put("Nevada","NV");
states.put("New Brunswick","NB");
states.put("New Hampshire","NH");
states.put("New Jersey","NJ");
states.put("New Mexico","NM");
states.put("New York","NY");
states.put("Newfoundland","NF");
states.put("North Carolina","NC");
states.put("North Dakota","ND");
states.put("Northwest Territories","NT");
states.put("Nova Scotia","NS");
states.put("Nunavut","NU");
states.put("Ohio","OH");
states.put("Oklahoma","OK");
states.put("Ontario","ON");
states.put("Oregon","OR");
states.put("Pennsylvania","PA");
states.put("Prince Edward Island","PE");
states.put("Puerto Rico","PR");
states.put("Quebec","QC");
states.put("Rhode Island","RI");
states.put("Saskatchewan","SK");
states.put("South Carolina","SC");
states.put("South Dakota","SD");
states.put("Tennessee","TN");
states.put("Texas","TX");
states.put("Utah","UT");
states.put("Vermont","VT");
states.put("Virgin Islands","VI");
states.put("Virginia","VA");
states.put("Washington","WA");
states.put("West Virginia","WV");
states.put("Wisconsin","WI");
states.put("Wyoming","WY");
states.put("Yukon Territory","YT");
return states;
}
private HashMap<String, String> buildProviderStateTable(){
HashMap<String, String> ProviderStateCode = new HashMap<String, String>();
ProviderStateCode.put("Alabama", "01");
ProviderStateCode.put("Alaska", "02");
ProviderStateCode.put("Arizona", "03");
ProviderStateCode.put("Arkansas", "04");
ProviderStateCode.put("California", "05");
ProviderStateCode.put("Colorado", "06");
ProviderStateCode.put("Connecticut", "07");
ProviderStateCode.put("Delaware", "08");
ProviderStateCode.put("District of Columbia", "09");
ProviderStateCode.put("Florida", "10");
ProviderStateCode.put("Georgia", "11");
ProviderStateCode.put("Hawaii", "12");
ProviderStateCode.put("Idaho", "13");
ProviderStateCode.put("Illinois", "14");
ProviderStateCode.put("Indiana", "15");
ProviderStateCode.put("Iowa", "16");
ProviderStateCode.put("Kansas", "17");
ProviderStateCode.put("Kentucky", "18");
ProviderStateCode.put("Louisiana", "19");
ProviderStateCode.put("Maine", "20");
ProviderStateCode.put("Maryland", "21");
ProviderStateCode.put("Massachusetts", "22");
ProviderStateCode.put("Michigan", "23");
ProviderStateCode.put("Minnesota", "24");
ProviderStateCode.put("Mississippi", "25");
ProviderStateCode.put("Missouri", "26");
ProviderStateCode.put("Montana", "27");
ProviderStateCode.put("Nebraska", "28");
ProviderStateCode.put("Nevada", "29");
ProviderStateCode.put("New Hampshire", "30");
ProviderStateCode.put("New Jersey", "31");
ProviderStateCode.put("New Mexico", "32");
ProviderStateCode.put("New York", "33");
ProviderStateCode.put("North Carolina", "34");
ProviderStateCode.put("North Dakota", "35");
ProviderStateCode.put("Ohio", "36");
ProviderStateCode.put("Oklahoma", "37");
ProviderStateCode.put("Oregon", "38");
ProviderStateCode.put("Pennsylvania", "39");
ProviderStateCode.put("Puerto Rico", "40");
ProviderStateCode.put("Rhode Island", "41");
ProviderStateCode.put("South Carolina", "42");
ProviderStateCode.put("South Dakota", "43");
ProviderStateCode.put("Tennessee", "44");
ProviderStateCode.put("Texas", "45");
ProviderStateCode.put("Utah", "46");
ProviderStateCode.put("Vermont", "47");
ProviderStateCode.put("Virgin Islands", "48");
ProviderStateCode.put("Virginia", "49");
ProviderStateCode.put("Washington", "50");
ProviderStateCode.put("West Virginia", "51");
ProviderStateCode.put("Wisconsin", "52");
ProviderStateCode.put("Wyoming", "53");
ProviderStateCode.put("Africa", "54");
ProviderStateCode.put("California", "55");
ProviderStateCode.put("Canada & Islands", "56");
ProviderStateCode.put("Central America and West Indies", "57");
ProviderStateCode.put("Europe", "58");
ProviderStateCode.put("Mexico", "59");
ProviderStateCode.put("Oceania", "60");
ProviderStateCode.put("Philippines", "61");
ProviderStateCode.put("South America", "62");
ProviderStateCode.put("U.S. Possessions", "63");
ProviderStateCode.put("American Samoa", "64");
ProviderStateCode.put("Guam", "65");
ProviderStateCode.put("Commonwealth of the Northern Marianas Islands", "66");
return ProviderStateCode;
}
private String capitalizeWords(String str){
String words[]=str.split("\\s");
String capitalizeWords="";
for(String w:words){
String first=w.substring(0,1);
String afterFirst=w.substring(1);
capitalizeWords+=first.toUpperCase()+afterFirst+" ";
}
return capitalizeWords.trim();
}
}
/**
* Defines the fields used in the beneficiary file. Note that order is significant, columns will
* be written in the order specified.
*/
private enum BeneficiaryFields {
DML_IND,
BENE_ID,
STATE_CODE,
BENE_COUNTY_CD,
BENE_ZIP_CD,
BENE_BIRTH_DT,
BENE_SEX_IDENT_CD,
BENE_RACE_CD,
BENE_ENTLMT_RSN_ORIG,
BENE_ENTLMT_RSN_CURR,
BENE_ESRD_IND,
BENE_MDCR_STATUS_CD,
BENE_PTA_TRMNTN_CD,
BENE_PTB_TRMNTN_CD,
// BENE_PTD_TRMNTN_CD, // The spreadsheet has a gap for this column, examples do not include it
BENE_CRNT_HIC_NUM,
BENE_SRNM_NAME,
BENE_GVN_NAME,
BENE_MDL_NAME,
MBI_NUM,
DEATH_DT,
RFRNC_YR,
A_MO_CNT,
B_MO_CNT,
BUYIN_MO_CNT,
HMO_MO_CNT,
RDS_MO_CNT,
ENRL_SRC,
SAMPLE_GROUP,
EFIVEPCT,
CRNT_BIC,
AGE,
COVSTART,
DUAL_MO_CNT,
FIPS_STATE_CNTY_JAN_CD,
FIPS_STATE_CNTY_FEB_CD,
FIPS_STATE_CNTY_MAR_CD,
FIPS_STATE_CNTY_APR_CD,
FIPS_STATE_CNTY_MAY_CD,
FIPS_STATE_CNTY_JUN_CD,
FIPS_STATE_CNTY_JUL_CD,
FIPS_STATE_CNTY_AUG_CD,
FIPS_STATE_CNTY_SEPT_CD,
FIPS_STATE_CNTY_OCT_CD,
FIPS_STATE_CNTY_NOV_CD,
FIPS_STATE_CNTY_DEC_CD,
V_DOD_SW,
RTI_RACE_CD,
MDCR_STUS_JAN_CD,
MDCR_STUS_FEB_CD,
MDCR_STUS_MAR_CD,
MDCR_STUS_APR_CD,
MDCR_STUS_MAY_CD,
MDCR_STUS_JUN_CD,
MDCR_STUS_JUL_CD,
MDCR_STUS_AUG_CD,
MDCR_STUS_SEPT_CD,
MDCR_STUS_OCT_CD,
MDCR_STUS_NOV_CD,
MDCR_STUS_DEC_CD,
PLAN_CVRG_MO_CNT,
MDCR_ENTLMT_BUYIN_1_IND,
MDCR_ENTLMT_BUYIN_2_IND,
MDCR_ENTLMT_BUYIN_3_IND,
MDCR_ENTLMT_BUYIN_4_IND,
MDCR_ENTLMT_BUYIN_5_IND,
MDCR_ENTLMT_BUYIN_6_IND,
MDCR_ENTLMT_BUYIN_7_IND,
MDCR_ENTLMT_BUYIN_8_IND,
MDCR_ENTLMT_BUYIN_9_IND,
MDCR_ENTLMT_BUYIN_10_IND,
MDCR_ENTLMT_BUYIN_11_IND,
MDCR_ENTLMT_BUYIN_12_IND,
HMO_1_IND,
HMO_2_IND,
HMO_3_IND,
HMO_4_IND,
HMO_5_IND,
HMO_6_IND,
HMO_7_IND,
HMO_8_IND,
HMO_9_IND,
HMO_10_IND,
HMO_11_IND,
HMO_12_IND,
PTC_CNTRCT_JAN_ID,
PTC_CNTRCT_FEB_ID,
PTC_CNTRCT_MAR_ID,
PTC_CNTRCT_APR_ID,
PTC_CNTRCT_MAY_ID,
PTC_CNTRCT_JUN_ID,
PTC_CNTRCT_JUL_ID,
PTC_CNTRCT_AUG_ID,
PTC_CNTRCT_SEPT_ID,
PTC_CNTRCT_OCT_ID,
PTC_CNTRCT_NOV_ID,
PTC_CNTRCT_DEC_ID,
PTC_PBP_JAN_ID,
PTC_PBP_FEB_ID,
PTC_PBP_MAR_ID,
PTC_PBP_APR_ID,
PTC_PBP_MAY_ID,
PTC_PBP_JUN_ID,
PTC_PBP_JUL_ID,
PTC_PBP_AUG_ID,
PTC_PBP_SEPT_ID,
PTC_PBP_OCT_ID,
PTC_PBP_NOV_ID,
PTC_PBP_DEC_ID,
PTC_PLAN_TYPE_JAN_CD,
PTC_PLAN_TYPE_FEB_CD,
PTC_PLAN_TYPE_MAR_CD,
PTC_PLAN_TYPE_APR_CD,
PTC_PLAN_TYPE_MAY_CD,
PTC_PLAN_TYPE_JUN_CD,
PTC_PLAN_TYPE_JUL_CD,
PTC_PLAN_TYPE_AUG_CD,
PTC_PLAN_TYPE_SEPT_CD,
PTC_PLAN_TYPE_OCT_CD,
PTC_PLAN_TYPE_NOV_CD,
PTC_PLAN_TYPE_DEC_CD,
PTD_CNTRCT_JAN_ID,
PTD_CNTRCT_FEB_ID,
PTD_CNTRCT_MAR_ID,
PTD_CNTRCT_APR_ID,
PTD_CNTRCT_MAY_ID,
PTD_CNTRCT_JUN_ID,
PTD_CNTRCT_JUL_ID,
PTD_CNTRCT_AUG_ID,
PTD_CNTRCT_SEPT_ID,
PTD_CNTRCT_OCT_ID,
PTD_CNTRCT_NOV_ID,
PTD_CNTRCT_DEC_ID,
PTD_PBP_JAN_ID,
PTD_PBP_FEB_ID,
PTD_PBP_MAR_ID,
PTD_PBP_APR_ID,
PTD_PBP_MAY_ID,
PTD_PBP_JUN_ID,
PTD_PBP_JUL_ID,
PTD_PBP_AUG_ID,
PTD_PBP_SEPT_ID,
PTD_PBP_OCT_ID,
PTD_PBP_NOV_ID,
PTD_PBP_DEC_ID,
PTD_SGMT_JAN_ID,
PTD_SGMT_FEB_ID,
PTD_SGMT_MAR_ID,
PTD_SGMT_APR_ID,
PTD_SGMT_MAY_ID,
PTD_SGMT_JUN_ID,
PTD_SGMT_JUL_ID,
PTD_SGMT_AUG_ID,
PTD_SGMT_SEPT_ID,
PTD_SGMT_OCT_ID,
PTD_SGMT_NOV_ID,
PTD_SGMT_DEC_ID,
RDS_JAN_IND,
RDS_FEB_IND,
RDS_MAR_IND,
RDS_APR_IND,
RDS_MAY_IND,
RDS_JUN_IND,
RDS_JUL_IND,
RDS_AUG_IND,
RDS_SEPT_IND,
RDS_OCT_IND,
RDS_NOV_IND,
RDS_DEC_IND,
META_DUAL_ELGBL_STUS_JAN_CD,
META_DUAL_ELGBL_STUS_FEB_CD,
META_DUAL_ELGBL_STUS_MAR_CD,
META_DUAL_ELGBL_STUS_APR_CD,
META_DUAL_ELGBL_STUS_MAY_CD,
META_DUAL_ELGBL_STUS_JUN_CD,
META_DUAL_ELGBL_STUS_JUL_CD,
META_DUAL_ELGBL_STUS_AUG_CD,
META_DUAL_ELGBL_STUS_SEPT_CD,
META_DUAL_ELGBL_STUS_OCT_CD,
META_DUAL_ELGBL_STUS_NOV_CD,
META_DUAL_ELGBL_STUS_DEC_CD,
CST_SHR_GRP_JAN_CD,
CST_SHR_GRP_FEB_CD,
CST_SHR_GRP_MAR_CD,
CST_SHR_GRP_APR_CD,
CST_SHR_GRP_MAY_CD,
CST_SHR_GRP_JUN_CD,
CST_SHR_GRP_JUL_CD,
CST_SHR_GRP_AUG_CD,
CST_SHR_GRP_SEPT_CD,
CST_SHR_GRP_OCT_CD,
CST_SHR_GRP_NOV_CD,
CST_SHR_GRP_DEC_CD
}
private enum BeneficiaryHistoryFields {
DML_IND,
BENE_ID,
STATE_CODE,
BENE_COUNTY_CD,
BENE_ZIP_CD,
BENE_BIRTH_DT,
BENE_SEX_IDENT_CD,
BENE_RACE_CD,
BENE_ENTLMT_RSN_ORIG,
BENE_ENTLMT_RSN_CURR,
BENE_ESRD_IND,
BENE_MDCR_STATUS_CD,
BENE_PTA_TRMNTN_CD,
BENE_PTB_TRMNTN_CD,
BENE_CRNT_HIC_NUM,
BENE_SRNM_NAME,
BENE_GVN_NAME,
BENE_MDL_NAME,
MBI_NUM
}
private enum OutpatientFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
FI_CLM_PROC_DT,
CLAIM_QUERY_CODE,
PRVDR_NUM,
CLM_FAC_TYPE_CD,
CLM_SRVC_CLSFCTN_TYPE_CD,
CLM_FREQ_CD,
FI_NUM,
CLM_MDCR_NON_PMT_RSN_CD,
CLM_PMT_AMT,
NCH_PRMRY_PYR_CLM_PD_AMT,
NCH_PRMRY_PYR_CD,
PRVDR_STATE_CD,
ORG_NPI_NUM,
AT_PHYSN_UPIN,
AT_PHYSN_NPI,
OP_PHYSN_UPIN,
OP_PHYSN_NPI,
OT_PHYSN_UPIN,
OT_PHYSN_NPI,
CLM_MCO_PD_SW,
PTNT_DSCHRG_STUS_CD,
CLM_TOT_CHRG_AMT,
NCH_BENE_BLOOD_DDCTBL_LBLTY_AM,
NCH_PROFNL_CMPNT_CHRG_AMT,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
ICD_DGNS_CD13,
ICD_DGNS_VRSN_CD13,
ICD_DGNS_CD14,
ICD_DGNS_VRSN_CD14,
ICD_DGNS_CD15,
ICD_DGNS_VRSN_CD15,
ICD_DGNS_CD16,
ICD_DGNS_VRSN_CD16,
ICD_DGNS_CD17,
ICD_DGNS_VRSN_CD17,
ICD_DGNS_CD18,
ICD_DGNS_VRSN_CD18,
ICD_DGNS_CD19,
ICD_DGNS_VRSN_CD19,
ICD_DGNS_CD20,
ICD_DGNS_VRSN_CD20,
ICD_DGNS_CD21,
ICD_DGNS_VRSN_CD21,
ICD_DGNS_CD22,
ICD_DGNS_VRSN_CD22,
ICD_DGNS_CD23,
ICD_DGNS_VRSN_CD23,
ICD_DGNS_CD24,
ICD_DGNS_VRSN_CD24,
ICD_DGNS_CD25,
ICD_DGNS_VRSN_CD25,
FST_DGNS_E_CD,
FST_DGNS_E_VRSN_CD,
ICD_DGNS_E_CD1,
ICD_DGNS_E_VRSN_CD1,
ICD_DGNS_E_CD2,
ICD_DGNS_E_VRSN_CD2,
ICD_DGNS_E_CD3,
ICD_DGNS_E_VRSN_CD3,
ICD_DGNS_E_CD4,
ICD_DGNS_E_VRSN_CD4,
ICD_DGNS_E_CD5,
ICD_DGNS_E_VRSN_CD5,
ICD_DGNS_E_CD6,
ICD_DGNS_E_VRSN_CD6,
ICD_DGNS_E_CD7,
ICD_DGNS_E_VRSN_CD7,
ICD_DGNS_E_CD8,
ICD_DGNS_E_VRSN_CD8,
ICD_DGNS_E_CD9,
ICD_DGNS_E_VRSN_CD9,
ICD_DGNS_E_CD10,
ICD_DGNS_E_VRSN_CD10,
ICD_DGNS_E_CD11,
ICD_DGNS_E_VRSN_CD11,
ICD_DGNS_E_CD12,
ICD_DGNS_E_VRSN_CD12,
ICD_PRCDR_CD1,
ICD_PRCDR_VRSN_CD1,
PRCDR_DT1,
ICD_PRCDR_CD2,
ICD_PRCDR_VRSN_CD2,
PRCDR_DT2,
ICD_PRCDR_CD3,
ICD_PRCDR_VRSN_CD3,
PRCDR_DT3,
ICD_PRCDR_CD4,
ICD_PRCDR_VRSN_CD4,
PRCDR_DT4,
ICD_PRCDR_CD5,
ICD_PRCDR_VRSN_CD5,
PRCDR_DT5,
ICD_PRCDR_CD6,
ICD_PRCDR_VRSN_CD6,
PRCDR_DT6,
ICD_PRCDR_CD7,
ICD_PRCDR_VRSN_CD7,
PRCDR_DT7,
ICD_PRCDR_CD8,
ICD_PRCDR_VRSN_CD8,
PRCDR_DT8,
ICD_PRCDR_CD9,
ICD_PRCDR_VRSN_CD9,
PRCDR_DT9,
ICD_PRCDR_CD10,
ICD_PRCDR_VRSN_CD10,
PRCDR_DT10,
ICD_PRCDR_CD11,
ICD_PRCDR_VRSN_CD11,
PRCDR_DT11,
ICD_PRCDR_CD12,
ICD_PRCDR_VRSN_CD12,
PRCDR_DT12,
ICD_PRCDR_CD13,
ICD_PRCDR_VRSN_CD13,
PRCDR_DT13,
ICD_PRCDR_CD14,
ICD_PRCDR_VRSN_CD14,
PRCDR_DT14,
ICD_PRCDR_CD15,
ICD_PRCDR_VRSN_CD15,
PRCDR_DT15,
ICD_PRCDR_CD16,
ICD_PRCDR_VRSN_CD16,
PRCDR_DT16,
ICD_PRCDR_CD17,
ICD_PRCDR_VRSN_CD17,
PRCDR_DT17,
ICD_PRCDR_CD18,
ICD_PRCDR_VRSN_CD18,
PRCDR_DT18,
ICD_PRCDR_CD19,
ICD_PRCDR_VRSN_CD19,
PRCDR_DT19,
ICD_PRCDR_CD20,
ICD_PRCDR_VRSN_CD20,
PRCDR_DT20,
ICD_PRCDR_CD21,
ICD_PRCDR_VRSN_CD21,
PRCDR_DT21,
ICD_PRCDR_CD22,
ICD_PRCDR_VRSN_CD22,
PRCDR_DT22,
ICD_PRCDR_CD23,
ICD_PRCDR_VRSN_CD23,
PRCDR_DT23,
ICD_PRCDR_CD24,
ICD_PRCDR_VRSN_CD24,
PRCDR_DT24,
ICD_PRCDR_CD25,
ICD_PRCDR_VRSN_CD25,
PRCDR_DT25,
RSN_VISIT_CD1,
RSN_VISIT_VRSN_CD1,
RSN_VISIT_CD2,
RSN_VISIT_VRSN_CD2,
RSN_VISIT_CD3,
RSN_VISIT_VRSN_CD3,
NCH_BENE_PTB_DDCTBL_AMT,
NCH_BENE_PTB_COINSRNC_AMT,
CLM_OP_PRVDR_PMT_AMT,
CLM_OP_BENE_PMT_AMT,
CLM_LINE_NUM,
REV_CNTR,
REV_CNTR_DT,
REV_CNTR_1ST_ANSI_CD,
REV_CNTR_2ND_ANSI_CD,
REV_CNTR_3RD_ANSI_CD,
REV_CNTR_4TH_ANSI_CD,
REV_CNTR_APC_HIPPS_CD,
HCPCS_CD,
HCPCS_1ST_MDFR_CD,
HCPCS_2ND_MDFR_CD,
REV_CNTR_PMT_MTHD_IND_CD,
REV_CNTR_DSCNT_IND_CD,
REV_CNTR_PACKG_IND_CD,
REV_CNTR_OTAF_PMT_CD,
REV_CNTR_IDE_NDC_UPC_NUM,
REV_CNTR_UNIT_CNT,
REV_CNTR_RATE_AMT,
REV_CNTR_BLOOD_DDCTBL_AMT,
REV_CNTR_CASH_DDCTBL_AMT,
REV_CNTR_COINSRNC_WGE_ADJSTD_C,
REV_CNTR_RDCD_COINSRNC_AMT,
REV_CNTR_1ST_MSP_PD_AMT,
REV_CNTR_2ND_MSP_PD_AMT,
REV_CNTR_PRVDR_PMT_AMT,
REV_CNTR_BENE_PMT_AMT,
REV_CNTR_PTNT_RSPNSBLTY_PMT,
REV_CNTR_PMT_AMT_AMT,
REV_CNTR_TOT_CHRG_AMT,
REV_CNTR_NCVRD_CHRG_AMT,
REV_CNTR_STUS_IND_CD,
REV_CNTR_NDC_QTY,
REV_CNTR_NDC_QTY_QLFR_CD,
RNDRNG_PHYSN_UPIN,
RNDRNG_PHYSN_NPI
}
private enum InpatientFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
FI_CLM_PROC_DT,
CLAIM_QUERY_CODE,
PRVDR_NUM,
CLM_FAC_TYPE_CD,
CLM_SRVC_CLSFCTN_TYPE_CD,
CLM_FREQ_CD,
FI_NUM,
CLM_MDCR_NON_PMT_RSN_CD,
CLM_PMT_AMT,
NCH_PRMRY_PYR_CLM_PD_AMT,
NCH_PRMRY_PYR_CD,
FI_CLM_ACTN_CD,
PRVDR_STATE_CD,
ORG_NPI_NUM,
AT_PHYSN_UPIN,
AT_PHYSN_NPI,
OP_PHYSN_UPIN,
OP_PHYSN_NPI,
OT_PHYSN_UPIN,
OT_PHYSN_NPI,
CLM_MCO_PD_SW,
PTNT_DSCHRG_STUS_CD,
CLM_PPS_IND_CD,
CLM_TOT_CHRG_AMT,
CLM_ADMSN_DT,
CLM_IP_ADMSN_TYPE_CD,
CLM_SRC_IP_ADMSN_CD,
NCH_PTNT_STATUS_IND_CD,
CLM_PASS_THRU_PER_DIEM_AMT,
NCH_BENE_IP_DDCTBL_AMT,
NCH_BENE_PTA_COINSRNC_LBLTY_AM,
NCH_BENE_BLOOD_DDCTBL_LBLTY_AM,
NCH_PROFNL_CMPNT_CHRG_AMT,
NCH_IP_NCVRD_CHRG_AMT,
NCH_IP_TOT_DDCTN_AMT,
CLM_TOT_PPS_CPTL_AMT,
CLM_PPS_CPTL_FSP_AMT,
CLM_PPS_CPTL_OUTLIER_AMT,
CLM_PPS_CPTL_DSPRPRTNT_SHR_AMT,
CLM_PPS_CPTL_IME_AMT,
CLM_PPS_CPTL_EXCPTN_AMT,
CLM_PPS_OLD_CPTL_HLD_HRMLS_AMT,
CLM_PPS_CPTL_DRG_WT_NUM,
CLM_UTLZTN_DAY_CNT,
BENE_TOT_COINSRNC_DAYS_CNT,
BENE_LRD_USED_CNT,
CLM_NON_UTLZTN_DAYS_CNT,
NCH_BLOOD_PNTS_FRNSHD_QTY,
NCH_VRFD_NCVRD_STAY_FROM_DT,
NCH_VRFD_NCVRD_STAY_THRU_DT,
NCH_ACTV_OR_CVRD_LVL_CARE_THRU,
NCH_BENE_MDCR_BNFTS_EXHTD_DT_I,
NCH_BENE_DSCHRG_DT,
CLM_DRG_CD,
CLM_DRG_OUTLIER_STAY_CD,
NCH_DRG_OUTLIER_APRVD_PMT_AMT,
ADMTG_DGNS_CD,
ADMTG_DGNS_VRSN_CD,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
CLM_POA_IND_SW1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
CLM_POA_IND_SW2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
CLM_POA_IND_SW3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
CLM_POA_IND_SW4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
CLM_POA_IND_SW5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
CLM_POA_IND_SW6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
CLM_POA_IND_SW7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
CLM_POA_IND_SW8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
CLM_POA_IND_SW9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
CLM_POA_IND_SW10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
CLM_POA_IND_SW11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
CLM_POA_IND_SW12,
ICD_DGNS_CD13,
ICD_DGNS_VRSN_CD13,
CLM_POA_IND_SW13,
ICD_DGNS_CD14,
ICD_DGNS_VRSN_CD14,
CLM_POA_IND_SW14,
ICD_DGNS_CD15,
ICD_DGNS_VRSN_CD15,
CLM_POA_IND_SW15,
ICD_DGNS_CD16,
ICD_DGNS_VRSN_CD16,
CLM_POA_IND_SW16,
ICD_DGNS_CD17,
ICD_DGNS_VRSN_CD17,
CLM_POA_IND_SW17,
ICD_DGNS_CD18,
ICD_DGNS_VRSN_CD18,
CLM_POA_IND_SW18,
ICD_DGNS_CD19,
ICD_DGNS_VRSN_CD19,
CLM_POA_IND_SW19,
ICD_DGNS_CD20,
ICD_DGNS_VRSN_CD20,
CLM_POA_IND_SW20,
ICD_DGNS_CD21,
ICD_DGNS_VRSN_CD21,
CLM_POA_IND_SW21,
ICD_DGNS_CD22,
ICD_DGNS_VRSN_CD22,
CLM_POA_IND_SW22,
ICD_DGNS_CD23,
ICD_DGNS_VRSN_CD23,
CLM_POA_IND_SW23,
ICD_DGNS_CD24,
ICD_DGNS_VRSN_CD24,
CLM_POA_IND_SW24,
ICD_DGNS_CD25,
ICD_DGNS_VRSN_CD25,
CLM_POA_IND_SW25,
FST_DGNS_E_CD,
FST_DGNS_E_VRSN_CD,
ICD_DGNS_E_CD1,
ICD_DGNS_E_VRSN_CD1,
CLM_E_POA_IND_SW1,
ICD_DGNS_E_CD2,
ICD_DGNS_E_VRSN_CD2,
CLM_E_POA_IND_SW2,
ICD_DGNS_E_CD3,
ICD_DGNS_E_VRSN_CD3,
CLM_E_POA_IND_SW3,
ICD_DGNS_E_CD4,
ICD_DGNS_E_VRSN_CD4,
CLM_E_POA_IND_SW4,
ICD_DGNS_E_CD5,
ICD_DGNS_E_VRSN_CD5,
CLM_E_POA_IND_SW5,
ICD_DGNS_E_CD6,
ICD_DGNS_E_VRSN_CD6,
CLM_E_POA_IND_SW6,
ICD_DGNS_E_CD7,
ICD_DGNS_E_VRSN_CD7,
CLM_E_POA_IND_SW7,
ICD_DGNS_E_CD8,
ICD_DGNS_E_VRSN_CD8,
CLM_E_POA_IND_SW8,
ICD_DGNS_E_CD9,
ICD_DGNS_E_VRSN_CD9,
CLM_E_POA_IND_SW9,
ICD_DGNS_E_CD10,
ICD_DGNS_E_VRSN_CD10,
CLM_E_POA_IND_SW10,
ICD_DGNS_E_CD11,
ICD_DGNS_E_VRSN_CD11,
CLM_E_POA_IND_SW11,
ICD_DGNS_E_CD12,
ICD_DGNS_E_VRSN_CD12,
CLM_E_POA_IND_SW12,
ICD_PRCDR_CD1,
ICD_PRCDR_VRSN_CD1,
PRCDR_DT1,
ICD_PRCDR_CD2,
ICD_PRCDR_VRSN_CD2,
PRCDR_DT2,
ICD_PRCDR_CD3,
ICD_PRCDR_VRSN_CD3,
PRCDR_DT3,
ICD_PRCDR_CD4,
ICD_PRCDR_VRSN_CD4,
PRCDR_DT4,
ICD_PRCDR_CD5,
ICD_PRCDR_VRSN_CD5,
PRCDR_DT5,
ICD_PRCDR_CD6,
ICD_PRCDR_VRSN_CD6,
PRCDR_DT6,
ICD_PRCDR_CD7,
ICD_PRCDR_VRSN_CD7,
PRCDR_DT7,
ICD_PRCDR_CD8,
ICD_PRCDR_VRSN_CD8,
PRCDR_DT8,
ICD_PRCDR_CD9,
ICD_PRCDR_VRSN_CD9,
PRCDR_DT9,
ICD_PRCDR_CD10,
ICD_PRCDR_VRSN_CD10,
PRCDR_DT10,
ICD_PRCDR_CD11,
ICD_PRCDR_VRSN_CD11,
PRCDR_DT11,
ICD_PRCDR_CD12,
ICD_PRCDR_VRSN_CD12,
PRCDR_DT12,
ICD_PRCDR_CD13,
ICD_PRCDR_VRSN_CD13,
PRCDR_DT13,
ICD_PRCDR_CD14,
ICD_PRCDR_VRSN_CD14,
PRCDR_DT14,
ICD_PRCDR_CD15,
ICD_PRCDR_VRSN_CD15,
PRCDR_DT15,
ICD_PRCDR_CD16,
ICD_PRCDR_VRSN_CD16,
PRCDR_DT16,
ICD_PRCDR_CD17,
ICD_PRCDR_VRSN_CD17,
PRCDR_DT17,
ICD_PRCDR_CD18,
ICD_PRCDR_VRSN_CD18,
PRCDR_DT18,
ICD_PRCDR_CD19,
ICD_PRCDR_VRSN_CD19,
PRCDR_DT19,
ICD_PRCDR_CD20,
ICD_PRCDR_VRSN_CD20,
PRCDR_DT20,
ICD_PRCDR_CD21,
ICD_PRCDR_VRSN_CD21,
PRCDR_DT21,
ICD_PRCDR_CD22,
ICD_PRCDR_VRSN_CD22,
PRCDR_DT22,
ICD_PRCDR_CD23,
ICD_PRCDR_VRSN_CD23,
PRCDR_DT23,
ICD_PRCDR_CD24,
ICD_PRCDR_VRSN_CD24,
PRCDR_DT24,
ICD_PRCDR_CD25,
ICD_PRCDR_VRSN_CD25,
PRCDR_DT25,
IME_OP_CLM_VAL_AMT,
DSH_OP_CLM_VAL_AMT,
CLM_LINE_NUM,
REV_CNTR,
HCPCS_CD,
REV_CNTR_UNIT_CNT,
REV_CNTR_RATE_AMT,
REV_CNTR_TOT_CHRG_AMT,
REV_CNTR_NCVRD_CHRG_AMT,
REV_CNTR_DDCTBL_COINSRNC_CD,
REV_CNTR_NDC_QTY,
REV_CNTR_NDC_QTY_QLFR_CD,
RNDRNG_PHYSN_UPIN,
RNDRNG_PHYSN_NPI
}
private enum CarrierFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
CARR_CLM_ENTRY_CD,
CLM_DISP_CD,
CARR_NUM,
CARR_CLM_PMT_DNL_CD,
CLM_PMT_AMT,
CARR_CLM_PRMRY_PYR_PD_AMT,
RFR_PHYSN_UPIN,
RFR_PHYSN_NPI,
CARR_CLM_PRVDR_ASGNMT_IND_SW,
NCH_CLM_PRVDR_PMT_AMT,
NCH_CLM_BENE_PMT_AMT,
NCH_CARR_CLM_SBMTD_CHRG_AMT,
NCH_CARR_CLM_ALOWD_AMT,
CARR_CLM_CASH_DDCTBL_APLD_AMT,
CARR_CLM_HCPCS_YR_CD,
CARR_CLM_RFRNG_PIN_NUM,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
CLM_CLNCL_TRIL_NUM,
LINE_NUM,
CARR_PRFRNG_PIN_NUM,
PRF_PHYSN_UPIN,
PRF_PHYSN_NPI,
ORG_NPI_NUM,
CARR_LINE_PRVDR_TYPE_CD,
TAX_NUM,
PRVDR_STATE_CD,
PRVDR_ZIP,
PRVDR_SPCLTY,
PRTCPTNG_IND_CD,
CARR_LINE_RDCD_PMT_PHYS_ASTN_C,
LINE_SRVC_CNT,
LINE_CMS_TYPE_SRVC_CD,
LINE_PLACE_OF_SRVC_CD,
CARR_LINE_PRCNG_LCLTY_CD,
LINE_1ST_EXPNS_DT,
LINE_LAST_EXPNS_DT,
HCPCS_CD,
HCPCS_1ST_MDFR_CD,
HCPCS_2ND_MDFR_CD,
BETOS_CD,
LINE_NCH_PMT_AMT,
LINE_BENE_PMT_AMT,
LINE_PRVDR_PMT_AMT,
LINE_BENE_PTB_DDCTBL_AMT,
LINE_BENE_PRMRY_PYR_CD,
LINE_BENE_PRMRY_PYR_PD_AMT,
LINE_COINSRNC_AMT,
LINE_SBMTD_CHRG_AMT,
LINE_ALOWD_CHRG_AMT,
LINE_PRCSG_IND_CD,
LINE_PMT_80_100_CD,
LINE_SERVICE_DEDUCTIBLE,
CARR_LINE_MTUS_CNT,
CARR_LINE_MTUS_CD,
LINE_ICD_DGNS_CD,
LINE_ICD_DGNS_VRSN_CD,
HPSA_SCRCTY_IND_CD,
CARR_LINE_RX_NUM,
LINE_HCT_HGB_RSLT_NUM,
LINE_HCT_HGB_TYPE_CD,
LINE_NDC_CD,
CARR_LINE_CLIA_LAB_NUM,
CARR_LINE_ANSTHSA_UNIT_CNT
}
public enum PrescriptionFields {
DML_IND,
PDE_ID,
CLM_GRP_ID,
FINAL_ACTION,
BENE_ID,
SRVC_DT,
PD_DT,
SRVC_PRVDR_ID_QLFYR_CD,
SRVC_PRVDR_ID,
PRSCRBR_ID_QLFYR_CD,
PRSCRBR_ID,
RX_SRVC_RFRNC_NUM,
PROD_SRVC_ID,
PLAN_CNTRCT_REC_ID,
PLAN_PBP_REC_NUM,
CMPND_CD,
DAW_PROD_SLCTN_CD,
QTY_DSPNSD_NUM,
DAYS_SUPLY_NUM,
FILL_NUM,
DSPNSNG_STUS_CD,
DRUG_CVRG_STUS_CD,
ADJSTMT_DLTN_CD,
NSTD_FRMT_CD,
PRCNG_EXCPTN_CD,
CTSTRPHC_CVRG_CD,
GDC_BLW_OOPT_AMT,
GDC_ABV_OOPT_AMT,
PTNT_PAY_AMT,
OTHR_TROOP_AMT,
LICS_AMT,
PLRO_AMT,
CVRD_D_PLAN_PD_AMT,
NCVRD_PLAN_PD_AMT,
TOT_RX_CST_AMT,
RX_ORGN_CD,
RPTD_GAP_DSCNT_NUM,
BRND_GNRC_CD,
PHRMCY_SRVC_TYPE_CD,
PTNT_RSDNC_CD,
SUBMSN_CLR_CD
}
/**
* Thread safe singleton pattern adopted from
* https://stackoverflow.com/questions/7048198/thread-safe-singletons-in-java
*/
private static class SingletonHolder {
/**
* Singleton instance of the CSVExporter.
*/
private static final BB2Exporter instance = new BB2Exporter();
}
/**
* Get the current instance of the BBExporter.
*
* @return the current instance of the BBExporter.
*/
public static BB2Exporter getInstance() {
return SingletonHolder.instance;
}
/**
* Utility class for writing to BB2 files.
*/
private static class SynchronizedBBLineWriter extends BufferedWriter {
private static final String BB_FIELD_SEPARATOR = "|";
/**
* Construct a new instance.
* @param file the file to write to
* @throws IOException if something goes wrong
*/
public SynchronizedBBLineWriter(File file) throws IOException {
super(new FileWriter(file));
}
/**
* Write a line of output consisting of one or more fields separated by '|' and terminated with
* a system new line.
* @param fields the fields that will be concatenated into the line
* @throws IOException if something goes wrong
*/
private void writeLine(String... fields) throws IOException {
String line = String.join(BB_FIELD_SEPARATOR, fields);
synchronized (lock) {
write(line);
newLine();
}
}
/**
* Write a BB2 file header.
* @param enumClass the enumeration class whose members define the column names
* @throws IOException if something goes wrong
*/
public <E extends Enum<E>> void writeHeader(Class<E> enumClass) throws IOException {
String[] fields = Arrays.stream(enumClass.getEnumConstants()).map(Enum::name)
.toArray(String[]::new);
writeLine(fields);
}
/**
* Write a BB2 file line.
* @param enumClass the enumeration class whose members define the column names
* @param fieldValues a sparse map of column names to values, missing values will result in
* empty values in the corresponding column
* @throws IOException if something goes wrong
*/
public <E extends Enum<E>> void writeValues(Class<E> enumClass, Map<E, String> fieldValues)
throws IOException {
String[] fields = Arrays.stream(enumClass.getEnumConstants())
.map((e) -> fieldValues.getOrDefault(e, "")).toArray(String[]::new);
writeLine(fields);
}
}
}
|
src/main/java/org/mitre/synthea/export/BB2Exporter.java
|
package org.mitre.synthea.export;
import static org.mitre.synthea.export.ExportHelper.nextFriday;
import com.google.gson.JsonObject;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.mitre.synthea.helpers.SimpleCSV;
import org.mitre.synthea.helpers.Utilities;
import org.mitre.synthea.world.agents.Payer;
import org.mitre.synthea.world.agents.Person;
import org.mitre.synthea.world.agents.Provider.ProviderType;
import org.mitre.synthea.world.concepts.HealthRecord;
import org.mitre.synthea.world.concepts.HealthRecord.EncounterType;
import org.mitre.synthea.world.concepts.HealthRecord.Medication;
/**
* BlueButton 2 Exporter.
*/
public class BB2Exporter implements Flushable {
private SynchronizedBBLineWriter beneficiary;
private SynchronizedBBLineWriter beneficiaryHistory;
private SynchronizedBBLineWriter outpatient;
private SynchronizedBBLineWriter inpatient;
private SynchronizedBBLineWriter carrier;
private SynchronizedBBLineWriter prescription;
private AtomicInteger claimId; // per claim per encounter
private AtomicInteger claimGroupId; // per encounter
private AtomicInteger pdeId; // per medication claim
private List<LinkedHashMap<String, String>> carrierLookup;
private stateCodeMapper stateLookup;
private static final String BB2_BENE_ID = "BB2_BENE_ID";
private static final String BB2_HIC_ID = "BB2_HIC_ID";
/**
* Day-Month-Year date format.
*/
private static final SimpleDateFormat BB2_DATE_FORMAT = new SimpleDateFormat("dd-MMM-yyyy");
/**
* Get a date string in the format DD-MMM-YY from the given time stamp.
*/
private static String bb2DateFromTimestamp(long time) {
synchronized (BB2_DATE_FORMAT) {
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6231579
return BB2_DATE_FORMAT.format(new Date(time));
}
}
/**
* Create the output folder and files. Write headers to each file.
*/
private BB2Exporter() {
claimId = new AtomicInteger();
claimGroupId = new AtomicInteger();
pdeId = new AtomicInteger();
try {
String csv = Utilities.readResource("payers/carriers.csv");
if (csv.startsWith("\uFEFF")) {
csv = csv.substring(1); // Removes BOM.
}
carrierLookup = SimpleCSV.parse(csv);
} catch (IOException e) {
throw new RuntimeException(e);
}
stateLookup = new stateCodeMapper();
try {
prepareOutputFiles();
} catch (IOException e) {
// wrap the exception in a runtime exception.
// the singleton pattern below doesn't work if the constructor can throw
// and if these do throw ioexceptions there's nothing we can do anyway
throw new RuntimeException(e);
}
}
/**
* Create the output folder and files. Write headers to each file.
*/
final void prepareOutputFiles() throws IOException {
// Clean up any existing output files
if (beneficiary != null) {
beneficiary.close();
}
if (beneficiaryHistory != null) {
beneficiaryHistory.close();
}
if (inpatient != null) {
inpatient.close();
}
if (outpatient != null) {
outpatient.close();
}
if (carrier != null) {
carrier.close();
}
if (prescription != null) {
prescription.close();
}
// Initialize output files
File output = Exporter.getOutputFolder("bb2", null);
output.mkdirs();
Path outputDirectory = output.toPath();
File beneficiaryFile = outputDirectory.resolve("beneficiary.csv").toFile();
beneficiary = new SynchronizedBBLineWriter(beneficiaryFile);
beneficiary.writeHeader(BeneficiaryFields.class);
File beneficiaryHistoryFile = outputDirectory.resolve("beneficiary_history.csv").toFile();
beneficiaryHistory = new SynchronizedBBLineWriter(beneficiaryHistoryFile);
beneficiaryHistory.writeHeader(BeneficiaryHistoryFields.class);
File outpatientFile = outputDirectory.resolve("outpatient.csv").toFile();
outpatient = new SynchronizedBBLineWriter(outpatientFile);
outpatient.writeHeader(OutpatientFields.class);
File inpatientFile = outputDirectory.resolve("inpatient.csv").toFile();
inpatient = new SynchronizedBBLineWriter(inpatientFile);
inpatient.writeHeader(InpatientFields.class);
File carrierFile = outputDirectory.resolve("carrier.csv").toFile();
carrier = new SynchronizedBBLineWriter(carrierFile);
carrier.writeHeader(CarrierFields.class);
File prescriptionFile = outputDirectory.resolve("prescription.csv").toFile();
prescription = new SynchronizedBBLineWriter(prescriptionFile);
prescription.writeHeader(PrescriptionFields.class);
}
/**
* Export a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
void export(Person person, long stopTime) throws IOException {
exportBeneficiary(person, stopTime);
exportBeneficiaryHistory(person, stopTime);
exportOutpatient(person, stopTime);
exportInpatient(person, stopTime);
exportCarrier(person, stopTime);
exportPrescription(person, stopTime);
}
/**
* Export a beneficiary details for single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportBeneficiary(Person person, long stopTime) throws IOException {
HashMap<BeneficiaryFields, String> fieldValues = new HashMap<>();
fieldValues.put(BeneficiaryFields.DML_IND, "INSERT");
String personId = (String)person.attributes.get(Person.ID);
String beneId = personId.split("-")[4]; // last segment of UUID
person.attributes.put(BB2_BENE_ID, beneId);
fieldValues.put(BeneficiaryFields.BENE_ID, beneId);
//String hicId = personId.split("-")[0]; // first segment of UUID
String hicId = person.attributes.get(Person.IDENTIFIER_SSN).toString();
hicId = hicId.replace("-","") + "A"; // hicId = SSN + letter (A means retired beneficiary but other options too).
System.out.println("HIC: " + hicId);
person.attributes.put(BB2_HIC_ID, hicId);
fieldValues.put(BeneficiaryFields.BENE_CRNT_HIC_NUM, hicId);
fieldValues.put(BeneficiaryFields.BENE_SEX_IDENT_CD,
(String)person.attributes.get(Person.GENDER));
fieldValues.put(BeneficiaryFields.BENE_COUNTY_CD,
(String)person.attributes.get("county"));
fieldValues.put(BeneficiaryFields.STATE_CODE,
(String)person.attributes.get(Person.STATE));
fieldValues.put(BeneficiaryFields.BENE_ZIP_CD,
(String)person.attributes.get(Person.ZIP));
fieldValues.put(BeneficiaryFields.BENE_RACE_CD,
bb2RaceCode(
(String)person.attributes.get(Person.ETHNICITY),
(String)person.attributes.get(Person.RACE)));
fieldValues.put(BeneficiaryFields.BENE_SRNM_NAME,
(String)person.attributes.get(Person.LAST_NAME));
fieldValues.put(BeneficiaryFields.BENE_GVN_NAME,
(String)person.attributes.get(Person.FIRST_NAME));
long birthdate = (long) person.attributes.get(Person.BIRTHDATE);
fieldValues.put(BeneficiaryFields.BENE_BIRTH_DT, bb2DateFromTimestamp(birthdate));
fieldValues.put(BeneficiaryFields.RFRNC_YR, String.valueOf(getYear(stopTime)));
fieldValues.put(BeneficiaryFields.AGE, String.valueOf(ageAtEndOfYear(birthdate, stopTime)));
if (person.attributes.get(Person.DEATHDATE) != null) {
long deathDate = (long) person.attributes.get(Person.DEATHDATE);
fieldValues.put(BeneficiaryFields.DEATH_DT, bb2DateFromTimestamp(deathDate));
}
beneficiary.writeValues(BeneficiaryFields.class, fieldValues);
}
/**
* Export a beneficiary history for single person. Assumes exportBeneficiary
* was called first to set up various ID on person
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportBeneficiaryHistory(Person person, long stopTime) throws IOException {
HashMap<BeneficiaryHistoryFields, String> fieldValues = new HashMap<>();
fieldValues.put(BeneficiaryHistoryFields.DML_IND, "INSERT");
String beneId = (String)person.attributes.get(BB2_BENE_ID);
fieldValues.put(BeneficiaryHistoryFields.BENE_ID, beneId);
String hicId = (String)person.attributes.get(BB2_HIC_ID);
fieldValues.put(BeneficiaryHistoryFields.BENE_CRNT_HIC_NUM, hicId);
fieldValues.put(BeneficiaryHistoryFields.BENE_SEX_IDENT_CD,
(String)person.attributes.get(Person.GENDER));
long birthdate = (long) person.attributes.get(Person.BIRTHDATE);
fieldValues.put(BeneficiaryHistoryFields.BENE_BIRTH_DT, bb2DateFromTimestamp(birthdate));
fieldValues.put(BeneficiaryHistoryFields.BENE_COUNTY_CD,
(String)person.attributes.get("county"));
fieldValues.put(BeneficiaryHistoryFields.STATE_CODE,
(String)person.attributes.get(Person.STATE));
fieldValues.put(BeneficiaryHistoryFields.BENE_ZIP_CD,
(String)person.attributes.get(Person.ZIP));
fieldValues.put(BeneficiaryHistoryFields.BENE_RACE_CD,
bb2RaceCode(
(String)person.attributes.get(Person.ETHNICITY),
(String)person.attributes.get(Person.RACE)));
fieldValues.put(BeneficiaryHistoryFields.BENE_SRNM_NAME,
(String)person.attributes.get(Person.LAST_NAME));
fieldValues.put(BeneficiaryHistoryFields.BENE_GVN_NAME,
(String)person.attributes.get(Person.FIRST_NAME));
beneficiaryHistory.writeValues(BeneficiaryHistoryFields.class, fieldValues);
}
/**
* Get the year of a point in time.
* @param time point in time specified as number of milliseconds since the epoch
* @return the year as a four figure value, e.g. 1971
*/
private static int getYear(long time) {
return 1900 + new Date(time).getYear();
}
/**
* Calculate the age of a person at the end of the year of a reference point in time.
* @param birthdate a person's birthdate specified as number of milliseconds since the epoch
* @param stopTime a reference point in time specified as number of milliseconds since the epoch
* @return the person's age
*/
private static int ageAtEndOfYear(long birthdate, long stopTime) {
return getYear(stopTime) - getYear(birthdate);
}
/**
* Export outpatient claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportOutpatient(Person person, long stopTime) throws IOException {
HashMap<OutpatientFields, String> fieldValues = new HashMap<>();
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isAmbulatory = encounter.type.equals(EncounterType.AMBULATORY.toString());
boolean isOutpatient = encounter.type.equals(EncounterType.OUTPATIENT.toString());
boolean isUrgent = encounter.type.equals(EncounterType.URGENTCARE.toString());
boolean isWellness = encounter.type.equals(EncounterType.WELLNESS.toString());
boolean isPrimary = (ProviderType.PRIMARY == encounter.provider.type);
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
if (isPrimary || !(isAmbulatory || isOutpatient || isUrgent || isWellness)) {
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(OutpatientFields.DML_IND, "INSERT");
fieldValues.put(OutpatientFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(OutpatientFields.CLM_ID, "" + claimId);
fieldValues.put(OutpatientFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(OutpatientFields.FINAL_ACTION, "F");
fieldValues.put(OutpatientFields.NCH_NEAR_LINE_REC_IDENT_CD, "W"); // W=outpatient
fieldValues.put(OutpatientFields.NCH_CLM_TYPE_CD, "40"); // 40=outpatient
fieldValues.put(OutpatientFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(OutpatientFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(OutpatientFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(OutpatientFields.CLAIM_QUERY_CODE, "3"); // 1=Interim, 3=Final, 5=Debit
fieldValues.put(OutpatientFields.PRVDR_NUM, encounter.provider.id);
fieldValues.put(OutpatientFields.CLM_FAC_TYPE_CD, "1"); // 1=Hospital, 2=SNF, 7=Dialysis
fieldValues.put(OutpatientFields.CLM_SRVC_CLSFCTN_TYPE_CD, "3"); // depends on value of above
fieldValues.put(OutpatientFields.CLM_FREQ_CD, "1"); // 1=Admit-Discharge, 9=Final
fieldValues.put(OutpatientFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(OutpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT, "0");
} else {
fieldValues.put(OutpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
//fieldValues.put(OutpatientFields.PRVDR_STATE_CD, encounter.provider.state);
String state_code = stateLookup.getStateCode(encounter.provider.state);
System.out.println("StateCD: " + state_code);
fieldValues.put(OutpatientFields.PRVDR_STATE_CD, state_code);
// PTNT_DSCHRG_STUS_CD: 1=home, 2=transfer, 3=SNF, 20=died, 30=still here
String field = null;
if (encounter.ended) {
field = "1";
} else {
field = "30"; // the patient is still here
}
if (!person.alive(encounter.stop)) {
field = "20"; // the patient died before the encounter ended
}
fieldValues.put(OutpatientFields.PTNT_DSCHRG_STUS_CD, field);
fieldValues.put(OutpatientFields.CLM_TOT_CHRG_AMT, "" + encounter.claim.getTotalClaimCost());
// TODO required in the mapping, but not in the Enum
// fieldValues.put(OutpatientFields.CLM_IP_ADMSN_TYPE_CD, null);
// fieldValues.put(OutpatientFields.CLM_PASS_THRU_PER_DIEM_AMT, null);
// fieldValues.put(OutpatientFields.NCH_BENE_IP_DDCTBL_AMT, null);
// fieldValues.put(OutpatientFields.NCH_BENE_PTA_COINSRNC_LBLTY_AM, null);
fieldValues.put(OutpatientFields.NCH_BENE_BLOOD_DDCTBL_LBLTY_AM, "0");
fieldValues.put(OutpatientFields.NCH_PROFNL_CMPNT_CHRG_AMT, "4"); // fixed $ amount?
// TODO required in the mapping, but not in the Enum
// fieldValues.put(OutpatientFields.NCH_IP_NCVRD_CHRG_AMT, null);
// fieldValues.put(OutpatientFields.NCH_IP_TOT_DDCTN_AMT, null);
// fieldValues.put(OutpatientFields.CLM_UTLZTN_DAY_CNT, null);
// fieldValues.put(OutpatientFields.BENE_TOT_COINSRNC_DAYS_CNT, null);
// fieldValues.put(OutpatientFields.CLM_NON_UTLZTN_DAYS_CNT, null);
// fieldValues.put(OutpatientFields.NCH_BLOOD_PNTS_FRNSHD_QTY, null);
// fieldValues.put(OutpatientFields.CLM_DRG_OUTLIER_STAY_CD, null);
fieldValues.put(OutpatientFields.CLM_LINE_NUM, "1");
fieldValues.put(OutpatientFields.REV_CNTR, "0001"); // total charge, lots of alternatives
fieldValues.put(OutpatientFields.REV_CNTR_UNIT_CNT, "0");
fieldValues.put(OutpatientFields.REV_CNTR_RATE_AMT, "0");
fieldValues.put(OutpatientFields.REV_CNTR_TOT_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(OutpatientFields.REV_CNTR_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
outpatient.writeValues(OutpatientFields.class, fieldValues);
}
}
/**
* Export inpatient claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportInpatient(Person person, long stopTime) throws IOException {
HashMap<InpatientFields, String> fieldValues = new HashMap<>();
HealthRecord.Encounter previous = null;
boolean previousInpatient = false;
boolean previousEmergency = false;
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isInpatient = encounter.type.equals(EncounterType.INPATIENT.toString());
boolean isEmergency = encounter.type.equals(EncounterType.EMERGENCY.toString());
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
if (!(isInpatient || isEmergency)) {
previous = encounter;
previousInpatient = false;
previousEmergency = false;
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(InpatientFields.DML_IND, "INSERT");
fieldValues.put(InpatientFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(InpatientFields.CLM_ID, "" + claimId);
fieldValues.put(InpatientFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(InpatientFields.FINAL_ACTION, "F"); // F or V
fieldValues.put(InpatientFields.NCH_NEAR_LINE_REC_IDENT_CD, "V"); // V = inpatient
fieldValues.put(InpatientFields.NCH_CLM_TYPE_CD, "60"); // Always 60 for inpatient claims
fieldValues.put(InpatientFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(InpatientFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(InpatientFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(InpatientFields.CLAIM_QUERY_CODE, "3"); // 1=Interim, 3=Final, 5=Debit
fieldValues.put(InpatientFields.PRVDR_NUM, encounter.provider.id);
fieldValues.put(InpatientFields.CLM_FAC_TYPE_CD, "1"); // 1=Hospital, 2=SNF, 7=Dialysis
fieldValues.put(InpatientFields.CLM_SRVC_CLSFCTN_TYPE_CD, "1"); // depends on value of above
fieldValues.put(InpatientFields.CLM_FREQ_CD, "1"); // 1=Admit-Discharge, 9=Final
fieldValues.put(InpatientFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(InpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT, "0");
} else {
fieldValues.put(InpatientFields.NCH_PRMRY_PYR_CLM_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
fieldValues.put(InpatientFields.PRVDR_STATE_CD, encounter.provider.state);
fieldValues.put(InpatientFields.PRVDR_STATE_CD, stateLookup.getStateCode(encounter.provider.state));
// PTNT_DSCHRG_STUS_CD: 1=home, 2=transfer, 3=SNF, 20=died, 30=still here
String field = null;
if (encounter.ended) {
field = "1"; // TODO 2=transfer if the next encounter is also inpatient
} else {
field = "30"; // the patient is still here
}
if (!person.alive(encounter.stop)) {
field = "20"; // the patient died before the encounter ended
}
fieldValues.put(InpatientFields.PTNT_DSCHRG_STUS_CD, field);
fieldValues.put(InpatientFields.CLM_TOT_CHRG_AMT, "" + encounter.claim.getTotalClaimCost());
if (isEmergency) {
field = "1"; // emergency
} else if (previousEmergency) {
field = "2"; // urgent
} else {
field = "3"; // elective
}
fieldValues.put(InpatientFields.CLM_IP_ADMSN_TYPE_CD, field);
fieldValues.put(InpatientFields.CLM_PASS_THRU_PER_DIEM_AMT, "10"); // fixed $ amount?
fieldValues.put(InpatientFields.NCH_BENE_IP_DDCTBL_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(InpatientFields.NCH_BENE_PTA_COINSRNC_LBLTY_AM,
"" + encounter.claim.getCoinsurancePaid());
fieldValues.put(InpatientFields.NCH_BENE_BLOOD_DDCTBL_LBLTY_AM, "0");
fieldValues.put(InpatientFields.NCH_PROFNL_CMPNT_CHRG_AMT, "4"); // fixed $ amount?
fieldValues.put(InpatientFields.NCH_IP_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
fieldValues.put(InpatientFields.NCH_IP_TOT_DDCTN_AMT,
"" + encounter.claim.getPatientCost());
int days = (int) ((encounter.stop - encounter.start) / (1000 * 60 * 60 * 24));
fieldValues.put(InpatientFields.CLM_UTLZTN_DAY_CNT, "" + days);
if (days > 60) {
field = "" + (days - 60);
} else {
field = "0";
}
fieldValues.put(InpatientFields.BENE_TOT_COINSRNC_DAYS_CNT, field);
fieldValues.put(InpatientFields.CLM_NON_UTLZTN_DAYS_CNT, "0");
fieldValues.put(InpatientFields.NCH_BLOOD_PNTS_FRNSHD_QTY, "0");
if (days > 60) {
field = "1"; // days outlier
} else if (encounter.claim.getTotalClaimCost() > 100_000) {
field = "2"; // cost outlier
} else {
field = "0"; // no outlier
}
fieldValues.put(InpatientFields.CLM_DRG_OUTLIER_STAY_CD, field);
fieldValues.put(InpatientFields.CLM_LINE_NUM, "1");
fieldValues.put(InpatientFields.REV_CNTR, "0001"); // total charge, lots of alternatives
fieldValues.put(InpatientFields.REV_CNTR_UNIT_CNT, "0");
fieldValues.put(InpatientFields.REV_CNTR_RATE_AMT, "0");
fieldValues.put(InpatientFields.REV_CNTR_TOT_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(InpatientFields.REV_CNTR_NCVRD_CHRG_AMT,
"" + encounter.claim.getPatientCost());
previous = encounter;
previousInpatient = isInpatient;
previousEmergency = isEmergency;
inpatient.writeValues(InpatientFields.class, fieldValues);
}
}
/**
* Export carrier claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportCarrier(Person person, long stopTime) throws IOException {
HashMap<CarrierFields, String> fieldValues = new HashMap<>();
HealthRecord.Encounter previous = null;
double latestHemoglobin = 0;
for (HealthRecord.Encounter encounter : person.record.encounters) {
boolean isPrimary = (ProviderType.PRIMARY == encounter.provider.type);
int claimId = this.claimId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
for (HealthRecord.Observation observation : encounter.observations) {
if (observation.containsCode("718-7", "http://loinc.org")) {
latestHemoglobin = (double) observation.value;
}
}
if (!isPrimary) {
previous = encounter;
continue;
}
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(CarrierFields.DML_IND, "INSERT");
fieldValues.put(CarrierFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(CarrierFields.CLM_ID, "" + claimId);
fieldValues.put(CarrierFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(CarrierFields.FINAL_ACTION, "F"); // F or V
fieldValues.put(CarrierFields.NCH_NEAR_LINE_REC_IDENT_CD, "O"); // O=physician
fieldValues.put(CarrierFields.NCH_CLM_TYPE_CD, "71"); // local carrier, non-DME
fieldValues.put(CarrierFields.CLM_FROM_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(CarrierFields.CLM_THRU_DT, bb2DateFromTimestamp(encounter.stop));
fieldValues.put(CarrierFields.NCH_WKLY_PROC_DT,
bb2DateFromTimestamp(nextFriday(encounter.stop)));
fieldValues.put(CarrierFields.CARR_CLM_ENTRY_CD, "1");
fieldValues.put(CarrierFields.CLM_DISP_CD, "01");
fieldValues.put(CarrierFields.CARR_NUM,
getCarrier(encounter.provider.state, CarrierFields.CARR_NUM));
fieldValues.put(CarrierFields.CARR_CLM_PMT_DNL_CD, "1"); // 1=paid to physician
fieldValues.put(CarrierFields.CLM_PMT_AMT, "" + encounter.claim.getTotalClaimCost());
if (encounter.claim.payer == Payer.getGovernmentPayer("Medicare")) {
fieldValues.put(CarrierFields.CARR_CLM_PRMRY_PYR_PD_AMT, "0");
} else {
fieldValues.put(CarrierFields.CARR_CLM_PRMRY_PYR_PD_AMT,
"" + encounter.claim.getCoveredCost());
}
fieldValues.put(CarrierFields.NCH_CLM_PRVDR_PMT_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.NCH_CLM_BENE_PMT_AMT, "0");
fieldValues.put(CarrierFields.NCH_CARR_CLM_SBMTD_CHRG_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.NCH_CARR_CLM_ALOWD_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.CARR_CLM_CASH_DDCTBL_APLD_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(CarrierFields.CARR_CLM_RFRNG_PIN_NUM, encounter.provider.id);
fieldValues.put(CarrierFields.LINE_NUM, "1");
fieldValues.put(CarrierFields.CARR_PRFRNG_PIN_NUM, encounter.provider.id);
fieldValues.put(CarrierFields.CARR_LINE_PRVDR_TYPE_CD, "0");
fieldValues.put(CarrierFields.TAX_NUM,
"" + encounter.clinician.attributes.get(Person.IDENTIFIER_SSN));
fieldValues.put(CarrierFields.CARR_LINE_RDCD_PMT_PHYS_ASTN_C, "0");
fieldValues.put(CarrierFields.LINE_SRVC_CNT, "" + encounter.claim.items.size());
fieldValues.put(CarrierFields.LINE_CMS_TYPE_SRVC_CD, "1");
fieldValues.put(CarrierFields.LINE_PLACE_OF_SRVC_CD, "11"); // 11=office
fieldValues.put(CarrierFields.CARR_LINE_PRCNG_LCLTY_CD,
getCarrier(encounter.provider.state, CarrierFields.CARR_LINE_PRCNG_LCLTY_CD));
fieldValues.put(CarrierFields.LINE_NCH_PMT_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.LINE_BENE_PMT_AMT, "0");
fieldValues.put(CarrierFields.LINE_PRVDR_PMT_AMT,
"" + encounter.claim.getCoveredCost());
fieldValues.put(CarrierFields.LINE_BENE_PTB_DDCTBL_AMT,
"" + encounter.claim.getDeductiblePaid());
fieldValues.put(CarrierFields.LINE_BENE_PRMRY_PYR_PD_AMT, "0");
fieldValues.put(CarrierFields.LINE_COINSRNC_AMT,
"" + encounter.claim.getCoinsurancePaid());
fieldValues.put(CarrierFields.LINE_SBMTD_CHRG_AMT,
"" + encounter.claim.getTotalClaimCost());
fieldValues.put(CarrierFields.LINE_ALOWD_CHRG_AMT,
"" + encounter.claim.getCoveredCost());
// length of encounter in minutes
fieldValues.put(CarrierFields.CARR_LINE_MTUS_CNT,
"" + ((encounter.stop - encounter.start) / (1000 * 60)));
fieldValues.put(CarrierFields.LINE_HCT_HGB_RSLT_NUM,
"" + latestHemoglobin);
fieldValues.put(CarrierFields.CARR_LINE_ANSTHSA_UNIT_CNT, "0");
carrier.writeValues(CarrierFields.class, fieldValues);
}
}
/**
* Export prescription claims details for a single person.
* @param person the person to export
* @param stopTime end time of simulation
* @throws IOException if something goes wrong
*/
private void exportPrescription(Person person, long stopTime) throws IOException {
HashMap<PrescriptionFields, String> fieldValues = new HashMap<>();
HashMap<String, Integer> fillNum = new HashMap<>();
double costs = 0;
int costYear = 0;
for (HealthRecord.Encounter encounter : person.record.encounters) {
for (Medication medication : encounter.medications) {
int pdeId = this.pdeId.incrementAndGet();
int claimGroupId = this.claimGroupId.incrementAndGet();
fieldValues.clear();
// The REQUIRED fields
fieldValues.put(PrescriptionFields.DML_IND, "INSERT");
fieldValues.put(PrescriptionFields.PDE_ID, "" + pdeId);
fieldValues.put(PrescriptionFields.CLM_GRP_ID, "" + claimGroupId);
fieldValues.put(PrescriptionFields.FINAL_ACTION, "F");
fieldValues.put(PrescriptionFields.BENE_ID, (String) person.attributes.get(BB2_BENE_ID));
fieldValues.put(PrescriptionFields.SRVC_DT, bb2DateFromTimestamp(encounter.start));
fieldValues.put(PrescriptionFields.SRVC_PRVDR_ID_QLFYR_CD, "01"); // undefined
fieldValues.put(PrescriptionFields.SRVC_PRVDR_ID, encounter.provider.id);
fieldValues.put(PrescriptionFields.PRSCRBR_ID_QLFYR_CD, "01"); // undefined
fieldValues.put(PrescriptionFields.PRSCRBR_ID,
"" + (9_999_999_999L - encounter.clinician.identifier));
fieldValues.put(PrescriptionFields.RX_SRVC_RFRNC_NUM, "" + pdeId);
// TODO this should be an NDC code, not RxNorm
fieldValues.put(PrescriptionFields.PROD_SRVC_ID, medication.codes.get(0).code);
// H=hmo, R=ppo, S=stand-alone, E=employer direct, X=limited income
fieldValues.put(PrescriptionFields.PLAN_CNTRCT_REC_ID,
("R" + Math.abs(
UUID.fromString(medication.claim.payer.uuid)
.getMostSignificantBits())).substring(0, 5));
fieldValues.put(PrescriptionFields.PLAN_PBP_REC_NUM, "999");
// 0=not specified, 1=not compound, 2=compound
fieldValues.put(PrescriptionFields.CMPND_CD, "0");
fieldValues.put(PrescriptionFields.DAW_PROD_SLCTN_CD, "" + (int) person.rand(0, 9));
fieldValues.put(PrescriptionFields.QTY_DSPNSD_NUM, "" + getQuantity(medication, stopTime));
fieldValues.put(PrescriptionFields.DAYS_SUPLY_NUM, "" + getDays(medication, stopTime));
Integer fill = 1;
if (fillNum.containsKey(medication.codes.get(0).code)) {
fill = 1 + fillNum.get(medication.codes.get(0).code);
}
fillNum.put(medication.codes.get(0).code, fill);
fieldValues.put(PrescriptionFields.FILL_NUM, "" + fill);
fieldValues.put(PrescriptionFields.DRUG_CVRG_STUS_CD, "C");
int year = Utilities.getYear(medication.start);
if (year != costYear) {
costYear = year;
costs = 0;
}
costs += medication.claim.getPatientCost();
if (costs <= 4550.00) {
fieldValues.put(PrescriptionFields.GDC_BLW_OOPT_AMT, "" + costs);
fieldValues.put(PrescriptionFields.GDC_ABV_OOPT_AMT, "0");
} else {
fieldValues.put(PrescriptionFields.GDC_BLW_OOPT_AMT, "4550.00");
fieldValues.put(PrescriptionFields.GDC_ABV_OOPT_AMT, "" + (costs - 4550));
}
fieldValues.put(PrescriptionFields.PTNT_PAY_AMT, "" + medication.claim.getPatientCost());
fieldValues.put(PrescriptionFields.OTHR_TROOP_AMT, "0");
fieldValues.put(PrescriptionFields.LICS_AMT, "0");
fieldValues.put(PrescriptionFields.PLRO_AMT, "0");
fieldValues.put(PrescriptionFields.CVRD_D_PLAN_PD_AMT,
"" + medication.claim.getCoveredCost());
fieldValues.put(PrescriptionFields.NCVRD_PLAN_PD_AMT,
"" + medication.claim.getPatientCost());
fieldValues.put(PrescriptionFields.TOT_RX_CST_AMT,
"" + medication.claim.getTotalClaimCost());
fieldValues.put(PrescriptionFields.RPTD_GAP_DSCNT_NUM, "0");
fieldValues.put(PrescriptionFields.PHRMCY_SRVC_TYPE_CD, "0" + (int) person.rand(1, 8));
// 00=not specified, 01=home, 02=SNF, 03=long-term, 11=hospice, 14=homeless
if (person.attributes.containsKey("homeless")
&& ((Boolean) person.attributes.get("homeless") == true)) {
fieldValues.put(PrescriptionFields.PTNT_RSDNC_CD, "14");
} else {
fieldValues.put(PrescriptionFields.PTNT_RSDNC_CD, "01");
}
prescription.writeValues(PrescriptionFields.class, fieldValues);
}
}
}
/**
* Flush contents of any buffered streams to disk.
* @throws IOException if something goes wrong
*/
@Override
public void flush() throws IOException {
beneficiary.flush();
beneficiaryHistory.flush();
inpatient.flush();
outpatient.flush();
carrier.flush();
prescription.flush();
}
/**
* Get the BB2 race code. BB2 uses a single code to represent race and ethnicity, we assume
* ethnicity gets priority here.
* @param ethnicity the Synthea ethnicity
* @param race the Synthea race
* @return the BB2 race code
*/
private String bb2RaceCode(String ethnicity, String race) {
if ("hispanic".equals(ethnicity)) {
return "5";
} else {
String bbRaceCode = "0"; // unknown
switch (race) {
case "white":
bbRaceCode = "1";
break;
case "black":
bbRaceCode = "2";
break;
case "asian":
bbRaceCode = "4";
break;
case "native":
bbRaceCode = "6";
break;
case "other":
default:
bbRaceCode = "3";
break;
}
return bbRaceCode;
}
}
private String getCarrier(String state, CarrierFields column) {
for (LinkedHashMap<String, String> row : carrierLookup) {
if (row.get("STATE").equals(state) || row.get("STATE_CODE").equals(state)) {
return row.get(column.toString());
}
}
return "0";
}
private int getQuantity(Medication medication, long stopTime) {
double amountPerDay = 1;
double days = getDays(medication, stopTime);
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("dosage")) {
JsonObject dosage = medication.prescriptionDetails.getAsJsonObject("dosage");
long amount = dosage.get("amount").getAsLong();
long frequency = dosage.get("frequency").getAsLong();
long period = dosage.get("period").getAsLong();
String units = dosage.get("unit").getAsString();
long periodTime = Utilities.convertTime(units, period);
long perPeriod = amount * frequency;
amountPerDay = (double) ((double) (perPeriod * periodTime) / (1000.0 * 60 * 60 * 24));
if (amountPerDay == 0) {
amountPerDay = 1;
}
}
return (int) (amountPerDay * days);
}
private int getDays(Medication medication, long stopTime) {
double days = 1;
long stop = medication.stop;
if (stop == 0L) {
stop = stopTime;
}
long medDuration = stop - medication.start;
days = (double) (medDuration / (1000 * 60 * 60 * 24));
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("duration")) {
JsonObject duration = medication.prescriptionDetails.getAsJsonObject("duration");
long quantity = duration.get("quantity").getAsLong();
String unit = duration.get("unit").getAsString();
long durationTime = Utilities.convertTime(unit, quantity);
double durationTimeInDays = (double) (durationTime / (1000 * 60 * 60 * 24));
if (durationTimeInDays > days) {
days = durationTimeInDays;
}
}
return (int) days;
}
/**
* Utility class for converting state names and abbreviations to provider state codes
*/
class stateCodeMapper {
private HashMap<String, String> ProviderStateCodes;
private Map<String, String> StateToAbbrev = this.buildStateAbbrevTable();
private Map<String, String> AbbrevToState;
public stateCodeMapper(){
this.ProviderStateCodes = this.buildProviderStateTable();
this.StateToAbbrev = this.buildStateAbbrevTable();
// support two-way conversion between state name and abbreviations
Map<String, String> AbbrevToState = new HashMap<String, String>();
for(Map.Entry<String, String> entry : StateToAbbrev.entrySet()){
AbbrevToState.put(entry.getValue(), entry.getKey());
}
this.AbbrevToState = AbbrevToState;
}
/**
* Return state code for a given state
* @param state (either state name or abbreviation)
* @return 2-digit state code
*/
private String getStateCode(String state){
if (state.length() == 2) {
state = this.changeStateFormat(state);
}else{
state = this.capitalizeWords(state);
}
String res = this.ProviderStateCodes.getOrDefault(state, "NONE");
return res;
}
/**
* Switch between state name and abbreviation. If state is abbreviation, will return name, and vice versa
* @param state
* @return
*/
private String changeStateFormat(String state){
if (state.length() == 2) {
return this.AbbrevToState.getOrDefault(state.toUpperCase(), null);
}else{
String stateClean = this.capitalizeWords(state.toLowerCase());
return this.StateToAbbrev.getOrDefault(stateClean, null);
}
}
private Map<String, String> buildStateAbbrevTable(){
Map<String, String> states = new HashMap<String, String>();
states.put("Alabama","AL");
states.put("Alaska","AK");
states.put("Alberta","AB");
states.put("American Samoa","AS");
states.put("Arizona","AZ");
states.put("Arkansas","AR");
states.put("Armed Forces (AE)","AE");
states.put("Armed Forces Americas","AA");
states.put("Armed Forces Pacific","AP");
states.put("British Columbia","BC");
states.put("California","CA");
states.put("Colorado","CO");
states.put("Connecticut","CT");
states.put("Delaware","DE");
states.put("District Of Columbia","DC");
states.put("Florida","FL");
states.put("Georgia","GA");
states.put("Guam","GU");
states.put("Hawaii","HI");
states.put("Idaho","ID");
states.put("Illinois","IL");
states.put("Indiana","IN");
states.put("Iowa","IA");
states.put("Kansas","KS");
states.put("Kentucky","KY");
states.put("Louisiana","LA");
states.put("Maine","ME");
states.put("Manitoba","MB");
states.put("Maryland","MD");
states.put("Massachusetts","MA");
states.put("Michigan","MI");
states.put("Minnesota","MN");
states.put("Mississippi","MS");
states.put("Missouri","MO");
states.put("Montana","MT");
states.put("Nebraska","NE");
states.put("Nevada","NV");
states.put("New Brunswick","NB");
states.put("New Hampshire","NH");
states.put("New Jersey","NJ");
states.put("New Mexico","NM");
states.put("New York","NY");
states.put("Newfoundland","NF");
states.put("North Carolina","NC");
states.put("North Dakota","ND");
states.put("Northwest Territories","NT");
states.put("Nova Scotia","NS");
states.put("Nunavut","NU");
states.put("Ohio","OH");
states.put("Oklahoma","OK");
states.put("Ontario","ON");
states.put("Oregon","OR");
states.put("Pennsylvania","PA");
states.put("Prince Edward Island","PE");
states.put("Puerto Rico","PR");
states.put("Quebec","QC");
states.put("Rhode Island","RI");
states.put("Saskatchewan","SK");
states.put("South Carolina","SC");
states.put("South Dakota","SD");
states.put("Tennessee","TN");
states.put("Texas","TX");
states.put("Utah","UT");
states.put("Vermont","VT");
states.put("Virgin Islands","VI");
states.put("Virginia","VA");
states.put("Washington","WA");
states.put("West Virginia","WV");
states.put("Wisconsin","WI");
states.put("Wyoming","WY");
states.put("Yukon Territory","YT");
return states;
}
private HashMap<String, String> buildProviderStateTable(){
HashMap<String, String> ProviderStateCode = new HashMap<String, String>();
ProviderStateCode.put("Alabama", "01");
ProviderStateCode.put("Alaska", "02");
ProviderStateCode.put("Arizona", "03");
ProviderStateCode.put("Arkansas", "04");
ProviderStateCode.put("California", "05");
ProviderStateCode.put("Colorado", "06");
ProviderStateCode.put("Connecticut", "07");
ProviderStateCode.put("Delaware", "08");
ProviderStateCode.put("District of Columbia", "09");
ProviderStateCode.put("Florida", "10");
ProviderStateCode.put("Georgia", "11");
ProviderStateCode.put("Hawaii", "12");
ProviderStateCode.put("Idaho", "13");
ProviderStateCode.put("Illinois", "14");
ProviderStateCode.put("Indiana", "15");
ProviderStateCode.put("Iowa", "16");
ProviderStateCode.put("Kansas", "17");
ProviderStateCode.put("Kentucky", "18");
ProviderStateCode.put("Louisiana", "19");
ProviderStateCode.put("Maine", "20");
ProviderStateCode.put("Maryland", "21");
ProviderStateCode.put("Massachusetts", "22");
ProviderStateCode.put("Michigan", "23");
ProviderStateCode.put("Minnesota", "24");
ProviderStateCode.put("Mississippi", "25");
ProviderStateCode.put("Missouri", "26");
ProviderStateCode.put("Montana", "27");
ProviderStateCode.put("Nebraska", "28");
ProviderStateCode.put("Nevada", "29");
ProviderStateCode.put("New Hampshire", "30");
ProviderStateCode.put("New Jersey", "31");
ProviderStateCode.put("New Mexico", "32");
ProviderStateCode.put("New York", "33");
ProviderStateCode.put("North Carolina", "34");
ProviderStateCode.put("North Dakota", "35");
ProviderStateCode.put("Ohio", "36");
ProviderStateCode.put("Oklahoma", "37");
ProviderStateCode.put("Oregon", "38");
ProviderStateCode.put("Pennsylvania", "39");
ProviderStateCode.put("Puerto Rico", "40");
ProviderStateCode.put("Rhode Island", "41");
ProviderStateCode.put("South Carolina", "42");
ProviderStateCode.put("South Dakota", "43");
ProviderStateCode.put("Tennessee", "44");
ProviderStateCode.put("Texas", "45");
ProviderStateCode.put("Utah", "46");
ProviderStateCode.put("Vermont", "47");
ProviderStateCode.put("Virgin Islands", "48");
ProviderStateCode.put("Virginia", "49");
ProviderStateCode.put("Washington", "50");
ProviderStateCode.put("West Virginia", "51");
ProviderStateCode.put("Wisconsin", "52");
ProviderStateCode.put("Wyoming", "53");
ProviderStateCode.put("Africa", "54");
ProviderStateCode.put("California", "55");
ProviderStateCode.put("Canada & Islands", "56");
ProviderStateCode.put("Central America and West Indies", "57");
ProviderStateCode.put("Europe", "58");
ProviderStateCode.put("Mexico", "59");
ProviderStateCode.put("Oceania", "60");
ProviderStateCode.put("Philippines", "61");
ProviderStateCode.put("South America", "62");
ProviderStateCode.put("U.S. Possessions", "63");
ProviderStateCode.put("American Samoa", "64");
ProviderStateCode.put("Guam", "65");
ProviderStateCode.put("Commonwealth of the Northern Marianas Islands", "66");
return ProviderStateCode;
}
private String capitalizeWords(String str){
String words[]=str.split("\\s");
String capitalizeWords="";
for(String w:words){
String first=w.substring(0,1);
String afterFirst=w.substring(1);
capitalizeWords+=first.toUpperCase()+afterFirst+" ";
}
return capitalizeWords.trim();
}
}
/**
* Defines the fields used in the beneficiary file. Note that order is significant, columns will
* be written in the order specified.
*/
private enum BeneficiaryFields {
DML_IND,
BENE_ID,
STATE_CODE,
BENE_COUNTY_CD,
BENE_ZIP_CD,
BENE_BIRTH_DT,
BENE_SEX_IDENT_CD,
BENE_RACE_CD,
BENE_ENTLMT_RSN_ORIG,
BENE_ENTLMT_RSN_CURR,
BENE_ESRD_IND,
BENE_MDCR_STATUS_CD,
BENE_PTA_TRMNTN_CD,
BENE_PTB_TRMNTN_CD,
// BENE_PTD_TRMNTN_CD, // The spreadsheet has a gap for this column, examples do not include it
BENE_CRNT_HIC_NUM,
BENE_SRNM_NAME,
BENE_GVN_NAME,
BENE_MDL_NAME,
MBI_NUM,
DEATH_DT,
RFRNC_YR,
A_MO_CNT,
B_MO_CNT,
BUYIN_MO_CNT,
HMO_MO_CNT,
RDS_MO_CNT,
ENRL_SRC,
SAMPLE_GROUP,
EFIVEPCT,
CRNT_BIC,
AGE,
COVSTART,
DUAL_MO_CNT,
FIPS_STATE_CNTY_JAN_CD,
FIPS_STATE_CNTY_FEB_CD,
FIPS_STATE_CNTY_MAR_CD,
FIPS_STATE_CNTY_APR_CD,
FIPS_STATE_CNTY_MAY_CD,
FIPS_STATE_CNTY_JUN_CD,
FIPS_STATE_CNTY_JUL_CD,
FIPS_STATE_CNTY_AUG_CD,
FIPS_STATE_CNTY_SEPT_CD,
FIPS_STATE_CNTY_OCT_CD,
FIPS_STATE_CNTY_NOV_CD,
FIPS_STATE_CNTY_DEC_CD,
V_DOD_SW,
RTI_RACE_CD,
MDCR_STUS_JAN_CD,
MDCR_STUS_FEB_CD,
MDCR_STUS_MAR_CD,
MDCR_STUS_APR_CD,
MDCR_STUS_MAY_CD,
MDCR_STUS_JUN_CD,
MDCR_STUS_JUL_CD,
MDCR_STUS_AUG_CD,
MDCR_STUS_SEPT_CD,
MDCR_STUS_OCT_CD,
MDCR_STUS_NOV_CD,
MDCR_STUS_DEC_CD,
PLAN_CVRG_MO_CNT,
MDCR_ENTLMT_BUYIN_1_IND,
MDCR_ENTLMT_BUYIN_2_IND,
MDCR_ENTLMT_BUYIN_3_IND,
MDCR_ENTLMT_BUYIN_4_IND,
MDCR_ENTLMT_BUYIN_5_IND,
MDCR_ENTLMT_BUYIN_6_IND,
MDCR_ENTLMT_BUYIN_7_IND,
MDCR_ENTLMT_BUYIN_8_IND,
MDCR_ENTLMT_BUYIN_9_IND,
MDCR_ENTLMT_BUYIN_10_IND,
MDCR_ENTLMT_BUYIN_11_IND,
MDCR_ENTLMT_BUYIN_12_IND,
HMO_1_IND,
HMO_2_IND,
HMO_3_IND,
HMO_4_IND,
HMO_5_IND,
HMO_6_IND,
HMO_7_IND,
HMO_8_IND,
HMO_9_IND,
HMO_10_IND,
HMO_11_IND,
HMO_12_IND,
PTC_CNTRCT_JAN_ID,
PTC_CNTRCT_FEB_ID,
PTC_CNTRCT_MAR_ID,
PTC_CNTRCT_APR_ID,
PTC_CNTRCT_MAY_ID,
PTC_CNTRCT_JUN_ID,
PTC_CNTRCT_JUL_ID,
PTC_CNTRCT_AUG_ID,
PTC_CNTRCT_SEPT_ID,
PTC_CNTRCT_OCT_ID,
PTC_CNTRCT_NOV_ID,
PTC_CNTRCT_DEC_ID,
PTC_PBP_JAN_ID,
PTC_PBP_FEB_ID,
PTC_PBP_MAR_ID,
PTC_PBP_APR_ID,
PTC_PBP_MAY_ID,
PTC_PBP_JUN_ID,
PTC_PBP_JUL_ID,
PTC_PBP_AUG_ID,
PTC_PBP_SEPT_ID,
PTC_PBP_OCT_ID,
PTC_PBP_NOV_ID,
PTC_PBP_DEC_ID,
PTC_PLAN_TYPE_JAN_CD,
PTC_PLAN_TYPE_FEB_CD,
PTC_PLAN_TYPE_MAR_CD,
PTC_PLAN_TYPE_APR_CD,
PTC_PLAN_TYPE_MAY_CD,
PTC_PLAN_TYPE_JUN_CD,
PTC_PLAN_TYPE_JUL_CD,
PTC_PLAN_TYPE_AUG_CD,
PTC_PLAN_TYPE_SEPT_CD,
PTC_PLAN_TYPE_OCT_CD,
PTC_PLAN_TYPE_NOV_CD,
PTC_PLAN_TYPE_DEC_CD,
PTD_CNTRCT_JAN_ID,
PTD_CNTRCT_FEB_ID,
PTD_CNTRCT_MAR_ID,
PTD_CNTRCT_APR_ID,
PTD_CNTRCT_MAY_ID,
PTD_CNTRCT_JUN_ID,
PTD_CNTRCT_JUL_ID,
PTD_CNTRCT_AUG_ID,
PTD_CNTRCT_SEPT_ID,
PTD_CNTRCT_OCT_ID,
PTD_CNTRCT_NOV_ID,
PTD_CNTRCT_DEC_ID,
PTD_PBP_JAN_ID,
PTD_PBP_FEB_ID,
PTD_PBP_MAR_ID,
PTD_PBP_APR_ID,
PTD_PBP_MAY_ID,
PTD_PBP_JUN_ID,
PTD_PBP_JUL_ID,
PTD_PBP_AUG_ID,
PTD_PBP_SEPT_ID,
PTD_PBP_OCT_ID,
PTD_PBP_NOV_ID,
PTD_PBP_DEC_ID,
PTD_SGMT_JAN_ID,
PTD_SGMT_FEB_ID,
PTD_SGMT_MAR_ID,
PTD_SGMT_APR_ID,
PTD_SGMT_MAY_ID,
PTD_SGMT_JUN_ID,
PTD_SGMT_JUL_ID,
PTD_SGMT_AUG_ID,
PTD_SGMT_SEPT_ID,
PTD_SGMT_OCT_ID,
PTD_SGMT_NOV_ID,
PTD_SGMT_DEC_ID,
RDS_JAN_IND,
RDS_FEB_IND,
RDS_MAR_IND,
RDS_APR_IND,
RDS_MAY_IND,
RDS_JUN_IND,
RDS_JUL_IND,
RDS_AUG_IND,
RDS_SEPT_IND,
RDS_OCT_IND,
RDS_NOV_IND,
RDS_DEC_IND,
META_DUAL_ELGBL_STUS_JAN_CD,
META_DUAL_ELGBL_STUS_FEB_CD,
META_DUAL_ELGBL_STUS_MAR_CD,
META_DUAL_ELGBL_STUS_APR_CD,
META_DUAL_ELGBL_STUS_MAY_CD,
META_DUAL_ELGBL_STUS_JUN_CD,
META_DUAL_ELGBL_STUS_JUL_CD,
META_DUAL_ELGBL_STUS_AUG_CD,
META_DUAL_ELGBL_STUS_SEPT_CD,
META_DUAL_ELGBL_STUS_OCT_CD,
META_DUAL_ELGBL_STUS_NOV_CD,
META_DUAL_ELGBL_STUS_DEC_CD,
CST_SHR_GRP_JAN_CD,
CST_SHR_GRP_FEB_CD,
CST_SHR_GRP_MAR_CD,
CST_SHR_GRP_APR_CD,
CST_SHR_GRP_MAY_CD,
CST_SHR_GRP_JUN_CD,
CST_SHR_GRP_JUL_CD,
CST_SHR_GRP_AUG_CD,
CST_SHR_GRP_SEPT_CD,
CST_SHR_GRP_OCT_CD,
CST_SHR_GRP_NOV_CD,
CST_SHR_GRP_DEC_CD
}
private enum BeneficiaryHistoryFields {
DML_IND,
BENE_ID,
STATE_CODE,
BENE_COUNTY_CD,
BENE_ZIP_CD,
BENE_BIRTH_DT,
BENE_SEX_IDENT_CD,
BENE_RACE_CD,
BENE_ENTLMT_RSN_ORIG,
BENE_ENTLMT_RSN_CURR,
BENE_ESRD_IND,
BENE_MDCR_STATUS_CD,
BENE_PTA_TRMNTN_CD,
BENE_PTB_TRMNTN_CD,
BENE_CRNT_HIC_NUM,
BENE_SRNM_NAME,
BENE_GVN_NAME,
BENE_MDL_NAME,
MBI_NUM
}
private enum OutpatientFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
FI_CLM_PROC_DT,
CLAIM_QUERY_CODE,
PRVDR_NUM,
CLM_FAC_TYPE_CD,
CLM_SRVC_CLSFCTN_TYPE_CD,
CLM_FREQ_CD,
FI_NUM,
CLM_MDCR_NON_PMT_RSN_CD,
CLM_PMT_AMT,
NCH_PRMRY_PYR_CLM_PD_AMT,
NCH_PRMRY_PYR_CD,
PRVDR_STATE_CD,
ORG_NPI_NUM,
AT_PHYSN_UPIN,
AT_PHYSN_NPI,
OP_PHYSN_UPIN,
OP_PHYSN_NPI,
OT_PHYSN_UPIN,
OT_PHYSN_NPI,
CLM_MCO_PD_SW,
PTNT_DSCHRG_STUS_CD,
CLM_TOT_CHRG_AMT,
NCH_BENE_BLOOD_DDCTBL_LBLTY_AM,
NCH_PROFNL_CMPNT_CHRG_AMT,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
ICD_DGNS_CD13,
ICD_DGNS_VRSN_CD13,
ICD_DGNS_CD14,
ICD_DGNS_VRSN_CD14,
ICD_DGNS_CD15,
ICD_DGNS_VRSN_CD15,
ICD_DGNS_CD16,
ICD_DGNS_VRSN_CD16,
ICD_DGNS_CD17,
ICD_DGNS_VRSN_CD17,
ICD_DGNS_CD18,
ICD_DGNS_VRSN_CD18,
ICD_DGNS_CD19,
ICD_DGNS_VRSN_CD19,
ICD_DGNS_CD20,
ICD_DGNS_VRSN_CD20,
ICD_DGNS_CD21,
ICD_DGNS_VRSN_CD21,
ICD_DGNS_CD22,
ICD_DGNS_VRSN_CD22,
ICD_DGNS_CD23,
ICD_DGNS_VRSN_CD23,
ICD_DGNS_CD24,
ICD_DGNS_VRSN_CD24,
ICD_DGNS_CD25,
ICD_DGNS_VRSN_CD25,
FST_DGNS_E_CD,
FST_DGNS_E_VRSN_CD,
ICD_DGNS_E_CD1,
ICD_DGNS_E_VRSN_CD1,
ICD_DGNS_E_CD2,
ICD_DGNS_E_VRSN_CD2,
ICD_DGNS_E_CD3,
ICD_DGNS_E_VRSN_CD3,
ICD_DGNS_E_CD4,
ICD_DGNS_E_VRSN_CD4,
ICD_DGNS_E_CD5,
ICD_DGNS_E_VRSN_CD5,
ICD_DGNS_E_CD6,
ICD_DGNS_E_VRSN_CD6,
ICD_DGNS_E_CD7,
ICD_DGNS_E_VRSN_CD7,
ICD_DGNS_E_CD8,
ICD_DGNS_E_VRSN_CD8,
ICD_DGNS_E_CD9,
ICD_DGNS_E_VRSN_CD9,
ICD_DGNS_E_CD10,
ICD_DGNS_E_VRSN_CD10,
ICD_DGNS_E_CD11,
ICD_DGNS_E_VRSN_CD11,
ICD_DGNS_E_CD12,
ICD_DGNS_E_VRSN_CD12,
ICD_PRCDR_CD1,
ICD_PRCDR_VRSN_CD1,
PRCDR_DT1,
ICD_PRCDR_CD2,
ICD_PRCDR_VRSN_CD2,
PRCDR_DT2,
ICD_PRCDR_CD3,
ICD_PRCDR_VRSN_CD3,
PRCDR_DT3,
ICD_PRCDR_CD4,
ICD_PRCDR_VRSN_CD4,
PRCDR_DT4,
ICD_PRCDR_CD5,
ICD_PRCDR_VRSN_CD5,
PRCDR_DT5,
ICD_PRCDR_CD6,
ICD_PRCDR_VRSN_CD6,
PRCDR_DT6,
ICD_PRCDR_CD7,
ICD_PRCDR_VRSN_CD7,
PRCDR_DT7,
ICD_PRCDR_CD8,
ICD_PRCDR_VRSN_CD8,
PRCDR_DT8,
ICD_PRCDR_CD9,
ICD_PRCDR_VRSN_CD9,
PRCDR_DT9,
ICD_PRCDR_CD10,
ICD_PRCDR_VRSN_CD10,
PRCDR_DT10,
ICD_PRCDR_CD11,
ICD_PRCDR_VRSN_CD11,
PRCDR_DT11,
ICD_PRCDR_CD12,
ICD_PRCDR_VRSN_CD12,
PRCDR_DT12,
ICD_PRCDR_CD13,
ICD_PRCDR_VRSN_CD13,
PRCDR_DT13,
ICD_PRCDR_CD14,
ICD_PRCDR_VRSN_CD14,
PRCDR_DT14,
ICD_PRCDR_CD15,
ICD_PRCDR_VRSN_CD15,
PRCDR_DT15,
ICD_PRCDR_CD16,
ICD_PRCDR_VRSN_CD16,
PRCDR_DT16,
ICD_PRCDR_CD17,
ICD_PRCDR_VRSN_CD17,
PRCDR_DT17,
ICD_PRCDR_CD18,
ICD_PRCDR_VRSN_CD18,
PRCDR_DT18,
ICD_PRCDR_CD19,
ICD_PRCDR_VRSN_CD19,
PRCDR_DT19,
ICD_PRCDR_CD20,
ICD_PRCDR_VRSN_CD20,
PRCDR_DT20,
ICD_PRCDR_CD21,
ICD_PRCDR_VRSN_CD21,
PRCDR_DT21,
ICD_PRCDR_CD22,
ICD_PRCDR_VRSN_CD22,
PRCDR_DT22,
ICD_PRCDR_CD23,
ICD_PRCDR_VRSN_CD23,
PRCDR_DT23,
ICD_PRCDR_CD24,
ICD_PRCDR_VRSN_CD24,
PRCDR_DT24,
ICD_PRCDR_CD25,
ICD_PRCDR_VRSN_CD25,
PRCDR_DT25,
RSN_VISIT_CD1,
RSN_VISIT_VRSN_CD1,
RSN_VISIT_CD2,
RSN_VISIT_VRSN_CD2,
RSN_VISIT_CD3,
RSN_VISIT_VRSN_CD3,
NCH_BENE_PTB_DDCTBL_AMT,
NCH_BENE_PTB_COINSRNC_AMT,
CLM_OP_PRVDR_PMT_AMT,
CLM_OP_BENE_PMT_AMT,
CLM_LINE_NUM,
REV_CNTR,
REV_CNTR_DT,
REV_CNTR_1ST_ANSI_CD,
REV_CNTR_2ND_ANSI_CD,
REV_CNTR_3RD_ANSI_CD,
REV_CNTR_4TH_ANSI_CD,
REV_CNTR_APC_HIPPS_CD,
HCPCS_CD,
HCPCS_1ST_MDFR_CD,
HCPCS_2ND_MDFR_CD,
REV_CNTR_PMT_MTHD_IND_CD,
REV_CNTR_DSCNT_IND_CD,
REV_CNTR_PACKG_IND_CD,
REV_CNTR_OTAF_PMT_CD,
REV_CNTR_IDE_NDC_UPC_NUM,
REV_CNTR_UNIT_CNT,
REV_CNTR_RATE_AMT,
REV_CNTR_BLOOD_DDCTBL_AMT,
REV_CNTR_CASH_DDCTBL_AMT,
REV_CNTR_COINSRNC_WGE_ADJSTD_C,
REV_CNTR_RDCD_COINSRNC_AMT,
REV_CNTR_1ST_MSP_PD_AMT,
REV_CNTR_2ND_MSP_PD_AMT,
REV_CNTR_PRVDR_PMT_AMT,
REV_CNTR_BENE_PMT_AMT,
REV_CNTR_PTNT_RSPNSBLTY_PMT,
REV_CNTR_PMT_AMT_AMT,
REV_CNTR_TOT_CHRG_AMT,
REV_CNTR_NCVRD_CHRG_AMT,
REV_CNTR_STUS_IND_CD,
REV_CNTR_NDC_QTY,
REV_CNTR_NDC_QTY_QLFR_CD,
RNDRNG_PHYSN_UPIN,
RNDRNG_PHYSN_NPI
}
private enum InpatientFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
FI_CLM_PROC_DT,
CLAIM_QUERY_CODE,
PRVDR_NUM,
CLM_FAC_TYPE_CD,
CLM_SRVC_CLSFCTN_TYPE_CD,
CLM_FREQ_CD,
FI_NUM,
CLM_MDCR_NON_PMT_RSN_CD,
CLM_PMT_AMT,
NCH_PRMRY_PYR_CLM_PD_AMT,
NCH_PRMRY_PYR_CD,
FI_CLM_ACTN_CD,
PRVDR_STATE_CD,
ORG_NPI_NUM,
AT_PHYSN_UPIN,
AT_PHYSN_NPI,
OP_PHYSN_UPIN,
OP_PHYSN_NPI,
OT_PHYSN_UPIN,
OT_PHYSN_NPI,
CLM_MCO_PD_SW,
PTNT_DSCHRG_STUS_CD,
CLM_PPS_IND_CD,
CLM_TOT_CHRG_AMT,
CLM_ADMSN_DT,
CLM_IP_ADMSN_TYPE_CD,
CLM_SRC_IP_ADMSN_CD,
NCH_PTNT_STATUS_IND_CD,
CLM_PASS_THRU_PER_DIEM_AMT,
NCH_BENE_IP_DDCTBL_AMT,
NCH_BENE_PTA_COINSRNC_LBLTY_AM,
NCH_BENE_BLOOD_DDCTBL_LBLTY_AM,
NCH_PROFNL_CMPNT_CHRG_AMT,
NCH_IP_NCVRD_CHRG_AMT,
NCH_IP_TOT_DDCTN_AMT,
CLM_TOT_PPS_CPTL_AMT,
CLM_PPS_CPTL_FSP_AMT,
CLM_PPS_CPTL_OUTLIER_AMT,
CLM_PPS_CPTL_DSPRPRTNT_SHR_AMT,
CLM_PPS_CPTL_IME_AMT,
CLM_PPS_CPTL_EXCPTN_AMT,
CLM_PPS_OLD_CPTL_HLD_HRMLS_AMT,
CLM_PPS_CPTL_DRG_WT_NUM,
CLM_UTLZTN_DAY_CNT,
BENE_TOT_COINSRNC_DAYS_CNT,
BENE_LRD_USED_CNT,
CLM_NON_UTLZTN_DAYS_CNT,
NCH_BLOOD_PNTS_FRNSHD_QTY,
NCH_VRFD_NCVRD_STAY_FROM_DT,
NCH_VRFD_NCVRD_STAY_THRU_DT,
NCH_ACTV_OR_CVRD_LVL_CARE_THRU,
NCH_BENE_MDCR_BNFTS_EXHTD_DT_I,
NCH_BENE_DSCHRG_DT,
CLM_DRG_CD,
CLM_DRG_OUTLIER_STAY_CD,
NCH_DRG_OUTLIER_APRVD_PMT_AMT,
ADMTG_DGNS_CD,
ADMTG_DGNS_VRSN_CD,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
CLM_POA_IND_SW1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
CLM_POA_IND_SW2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
CLM_POA_IND_SW3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
CLM_POA_IND_SW4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
CLM_POA_IND_SW5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
CLM_POA_IND_SW6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
CLM_POA_IND_SW7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
CLM_POA_IND_SW8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
CLM_POA_IND_SW9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
CLM_POA_IND_SW10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
CLM_POA_IND_SW11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
CLM_POA_IND_SW12,
ICD_DGNS_CD13,
ICD_DGNS_VRSN_CD13,
CLM_POA_IND_SW13,
ICD_DGNS_CD14,
ICD_DGNS_VRSN_CD14,
CLM_POA_IND_SW14,
ICD_DGNS_CD15,
ICD_DGNS_VRSN_CD15,
CLM_POA_IND_SW15,
ICD_DGNS_CD16,
ICD_DGNS_VRSN_CD16,
CLM_POA_IND_SW16,
ICD_DGNS_CD17,
ICD_DGNS_VRSN_CD17,
CLM_POA_IND_SW17,
ICD_DGNS_CD18,
ICD_DGNS_VRSN_CD18,
CLM_POA_IND_SW18,
ICD_DGNS_CD19,
ICD_DGNS_VRSN_CD19,
CLM_POA_IND_SW19,
ICD_DGNS_CD20,
ICD_DGNS_VRSN_CD20,
CLM_POA_IND_SW20,
ICD_DGNS_CD21,
ICD_DGNS_VRSN_CD21,
CLM_POA_IND_SW21,
ICD_DGNS_CD22,
ICD_DGNS_VRSN_CD22,
CLM_POA_IND_SW22,
ICD_DGNS_CD23,
ICD_DGNS_VRSN_CD23,
CLM_POA_IND_SW23,
ICD_DGNS_CD24,
ICD_DGNS_VRSN_CD24,
CLM_POA_IND_SW24,
ICD_DGNS_CD25,
ICD_DGNS_VRSN_CD25,
CLM_POA_IND_SW25,
FST_DGNS_E_CD,
FST_DGNS_E_VRSN_CD,
ICD_DGNS_E_CD1,
ICD_DGNS_E_VRSN_CD1,
CLM_E_POA_IND_SW1,
ICD_DGNS_E_CD2,
ICD_DGNS_E_VRSN_CD2,
CLM_E_POA_IND_SW2,
ICD_DGNS_E_CD3,
ICD_DGNS_E_VRSN_CD3,
CLM_E_POA_IND_SW3,
ICD_DGNS_E_CD4,
ICD_DGNS_E_VRSN_CD4,
CLM_E_POA_IND_SW4,
ICD_DGNS_E_CD5,
ICD_DGNS_E_VRSN_CD5,
CLM_E_POA_IND_SW5,
ICD_DGNS_E_CD6,
ICD_DGNS_E_VRSN_CD6,
CLM_E_POA_IND_SW6,
ICD_DGNS_E_CD7,
ICD_DGNS_E_VRSN_CD7,
CLM_E_POA_IND_SW7,
ICD_DGNS_E_CD8,
ICD_DGNS_E_VRSN_CD8,
CLM_E_POA_IND_SW8,
ICD_DGNS_E_CD9,
ICD_DGNS_E_VRSN_CD9,
CLM_E_POA_IND_SW9,
ICD_DGNS_E_CD10,
ICD_DGNS_E_VRSN_CD10,
CLM_E_POA_IND_SW10,
ICD_DGNS_E_CD11,
ICD_DGNS_E_VRSN_CD11,
CLM_E_POA_IND_SW11,
ICD_DGNS_E_CD12,
ICD_DGNS_E_VRSN_CD12,
CLM_E_POA_IND_SW12,
ICD_PRCDR_CD1,
ICD_PRCDR_VRSN_CD1,
PRCDR_DT1,
ICD_PRCDR_CD2,
ICD_PRCDR_VRSN_CD2,
PRCDR_DT2,
ICD_PRCDR_CD3,
ICD_PRCDR_VRSN_CD3,
PRCDR_DT3,
ICD_PRCDR_CD4,
ICD_PRCDR_VRSN_CD4,
PRCDR_DT4,
ICD_PRCDR_CD5,
ICD_PRCDR_VRSN_CD5,
PRCDR_DT5,
ICD_PRCDR_CD6,
ICD_PRCDR_VRSN_CD6,
PRCDR_DT6,
ICD_PRCDR_CD7,
ICD_PRCDR_VRSN_CD7,
PRCDR_DT7,
ICD_PRCDR_CD8,
ICD_PRCDR_VRSN_CD8,
PRCDR_DT8,
ICD_PRCDR_CD9,
ICD_PRCDR_VRSN_CD9,
PRCDR_DT9,
ICD_PRCDR_CD10,
ICD_PRCDR_VRSN_CD10,
PRCDR_DT10,
ICD_PRCDR_CD11,
ICD_PRCDR_VRSN_CD11,
PRCDR_DT11,
ICD_PRCDR_CD12,
ICD_PRCDR_VRSN_CD12,
PRCDR_DT12,
ICD_PRCDR_CD13,
ICD_PRCDR_VRSN_CD13,
PRCDR_DT13,
ICD_PRCDR_CD14,
ICD_PRCDR_VRSN_CD14,
PRCDR_DT14,
ICD_PRCDR_CD15,
ICD_PRCDR_VRSN_CD15,
PRCDR_DT15,
ICD_PRCDR_CD16,
ICD_PRCDR_VRSN_CD16,
PRCDR_DT16,
ICD_PRCDR_CD17,
ICD_PRCDR_VRSN_CD17,
PRCDR_DT17,
ICD_PRCDR_CD18,
ICD_PRCDR_VRSN_CD18,
PRCDR_DT18,
ICD_PRCDR_CD19,
ICD_PRCDR_VRSN_CD19,
PRCDR_DT19,
ICD_PRCDR_CD20,
ICD_PRCDR_VRSN_CD20,
PRCDR_DT20,
ICD_PRCDR_CD21,
ICD_PRCDR_VRSN_CD21,
PRCDR_DT21,
ICD_PRCDR_CD22,
ICD_PRCDR_VRSN_CD22,
PRCDR_DT22,
ICD_PRCDR_CD23,
ICD_PRCDR_VRSN_CD23,
PRCDR_DT23,
ICD_PRCDR_CD24,
ICD_PRCDR_VRSN_CD24,
PRCDR_DT24,
ICD_PRCDR_CD25,
ICD_PRCDR_VRSN_CD25,
PRCDR_DT25,
IME_OP_CLM_VAL_AMT,
DSH_OP_CLM_VAL_AMT,
CLM_LINE_NUM,
REV_CNTR,
HCPCS_CD,
REV_CNTR_UNIT_CNT,
REV_CNTR_RATE_AMT,
REV_CNTR_TOT_CHRG_AMT,
REV_CNTR_NCVRD_CHRG_AMT,
REV_CNTR_DDCTBL_COINSRNC_CD,
REV_CNTR_NDC_QTY,
REV_CNTR_NDC_QTY_QLFR_CD,
RNDRNG_PHYSN_UPIN,
RNDRNG_PHYSN_NPI
}
private enum CarrierFields {
DML_IND,
BENE_ID,
CLM_ID,
CLM_GRP_ID,
FINAL_ACTION,
NCH_NEAR_LINE_REC_IDENT_CD,
NCH_CLM_TYPE_CD,
CLM_FROM_DT,
CLM_THRU_DT,
NCH_WKLY_PROC_DT,
CARR_CLM_ENTRY_CD,
CLM_DISP_CD,
CARR_NUM,
CARR_CLM_PMT_DNL_CD,
CLM_PMT_AMT,
CARR_CLM_PRMRY_PYR_PD_AMT,
RFR_PHYSN_UPIN,
RFR_PHYSN_NPI,
CARR_CLM_PRVDR_ASGNMT_IND_SW,
NCH_CLM_PRVDR_PMT_AMT,
NCH_CLM_BENE_PMT_AMT,
NCH_CARR_CLM_SBMTD_CHRG_AMT,
NCH_CARR_CLM_ALOWD_AMT,
CARR_CLM_CASH_DDCTBL_APLD_AMT,
CARR_CLM_HCPCS_YR_CD,
CARR_CLM_RFRNG_PIN_NUM,
PRNCPAL_DGNS_CD,
PRNCPAL_DGNS_VRSN_CD,
ICD_DGNS_CD1,
ICD_DGNS_VRSN_CD1,
ICD_DGNS_CD2,
ICD_DGNS_VRSN_CD2,
ICD_DGNS_CD3,
ICD_DGNS_VRSN_CD3,
ICD_DGNS_CD4,
ICD_DGNS_VRSN_CD4,
ICD_DGNS_CD5,
ICD_DGNS_VRSN_CD5,
ICD_DGNS_CD6,
ICD_DGNS_VRSN_CD6,
ICD_DGNS_CD7,
ICD_DGNS_VRSN_CD7,
ICD_DGNS_CD8,
ICD_DGNS_VRSN_CD8,
ICD_DGNS_CD9,
ICD_DGNS_VRSN_CD9,
ICD_DGNS_CD10,
ICD_DGNS_VRSN_CD10,
ICD_DGNS_CD11,
ICD_DGNS_VRSN_CD11,
ICD_DGNS_CD12,
ICD_DGNS_VRSN_CD12,
CLM_CLNCL_TRIL_NUM,
LINE_NUM,
CARR_PRFRNG_PIN_NUM,
PRF_PHYSN_UPIN,
PRF_PHYSN_NPI,
ORG_NPI_NUM,
CARR_LINE_PRVDR_TYPE_CD,
TAX_NUM,
PRVDR_STATE_CD,
PRVDR_ZIP,
PRVDR_SPCLTY,
PRTCPTNG_IND_CD,
CARR_LINE_RDCD_PMT_PHYS_ASTN_C,
LINE_SRVC_CNT,
LINE_CMS_TYPE_SRVC_CD,
LINE_PLACE_OF_SRVC_CD,
CARR_LINE_PRCNG_LCLTY_CD,
LINE_1ST_EXPNS_DT,
LINE_LAST_EXPNS_DT,
HCPCS_CD,
HCPCS_1ST_MDFR_CD,
HCPCS_2ND_MDFR_CD,
BETOS_CD,
LINE_NCH_PMT_AMT,
LINE_BENE_PMT_AMT,
LINE_PRVDR_PMT_AMT,
LINE_BENE_PTB_DDCTBL_AMT,
LINE_BENE_PRMRY_PYR_CD,
LINE_BENE_PRMRY_PYR_PD_AMT,
LINE_COINSRNC_AMT,
LINE_SBMTD_CHRG_AMT,
LINE_ALOWD_CHRG_AMT,
LINE_PRCSG_IND_CD,
LINE_PMT_80_100_CD,
LINE_SERVICE_DEDUCTIBLE,
CARR_LINE_MTUS_CNT,
CARR_LINE_MTUS_CD,
LINE_ICD_DGNS_CD,
LINE_ICD_DGNS_VRSN_CD,
HPSA_SCRCTY_IND_CD,
CARR_LINE_RX_NUM,
LINE_HCT_HGB_RSLT_NUM,
LINE_HCT_HGB_TYPE_CD,
LINE_NDC_CD,
CARR_LINE_CLIA_LAB_NUM,
CARR_LINE_ANSTHSA_UNIT_CNT
}
public enum PrescriptionFields {
DML_IND,
PDE_ID,
CLM_GRP_ID,
FINAL_ACTION,
BENE_ID,
SRVC_DT,
PD_DT,
SRVC_PRVDR_ID_QLFYR_CD,
SRVC_PRVDR_ID,
PRSCRBR_ID_QLFYR_CD,
PRSCRBR_ID,
RX_SRVC_RFRNC_NUM,
PROD_SRVC_ID,
PLAN_CNTRCT_REC_ID,
PLAN_PBP_REC_NUM,
CMPND_CD,
DAW_PROD_SLCTN_CD,
QTY_DSPNSD_NUM,
DAYS_SUPLY_NUM,
FILL_NUM,
DSPNSNG_STUS_CD,
DRUG_CVRG_STUS_CD,
ADJSTMT_DLTN_CD,
NSTD_FRMT_CD,
PRCNG_EXCPTN_CD,
CTSTRPHC_CVRG_CD,
GDC_BLW_OOPT_AMT,
GDC_ABV_OOPT_AMT,
PTNT_PAY_AMT,
OTHR_TROOP_AMT,
LICS_AMT,
PLRO_AMT,
CVRD_D_PLAN_PD_AMT,
NCVRD_PLAN_PD_AMT,
TOT_RX_CST_AMT,
RX_ORGN_CD,
RPTD_GAP_DSCNT_NUM,
BRND_GNRC_CD,
PHRMCY_SRVC_TYPE_CD,
PTNT_RSDNC_CD,
SUBMSN_CLR_CD
}
/**
* Thread safe singleton pattern adopted from
* https://stackoverflow.com/questions/7048198/thread-safe-singletons-in-java
*/
private static class SingletonHolder {
/**
* Singleton instance of the CSVExporter.
*/
private static final BB2Exporter instance = new BB2Exporter();
}
/**
* Get the current instance of the BBExporter.
*
* @return the current instance of the BBExporter.
*/
public static BB2Exporter getInstance() {
return SingletonHolder.instance;
}
/**
* Utility class for writing to BB2 files.
*/
private static class SynchronizedBBLineWriter extends BufferedWriter {
private static final String BB_FIELD_SEPARATOR = "|";
/**
* Construct a new instance.
* @param file the file to write to
* @throws IOException if something goes wrong
*/
public SynchronizedBBLineWriter(File file) throws IOException {
super(new FileWriter(file));
}
/**
* Write a line of output consisting of one or more fields separated by '|' and terminated with
* a system new line.
* @param fields the fields that will be concatenated into the line
* @throws IOException if something goes wrong
*/
private void writeLine(String... fields) throws IOException {
String line = String.join(BB_FIELD_SEPARATOR, fields);
synchronized (lock) {
write(line);
newLine();
}
}
/**
* Write a BB2 file header.
* @param enumClass the enumeration class whose members define the column names
* @throws IOException if something goes wrong
*/
public <E extends Enum<E>> void writeHeader(Class<E> enumClass) throws IOException {
String[] fields = Arrays.stream(enumClass.getEnumConstants()).map(Enum::name)
.toArray(String[]::new);
writeLine(fields);
}
/**
* Write a BB2 file line.
* @param enumClass the enumeration class whose members define the column names
* @param fieldValues a sparse map of column names to values, missing values will result in
* empty values in the corresponding column
* @throws IOException if something goes wrong
*/
public <E extends Enum<E>> void writeValues(Class<E> enumClass, Map<E, String> fieldValues)
throws IOException {
String[] fields = Arrays.stream(enumClass.getEnumConstants())
.map((e) -> fieldValues.getOrDefault(e, "")).toArray(String[]::new);
writeLine(fields);
}
}
}
|
removed print statement
|
src/main/java/org/mitre/synthea/export/BB2Exporter.java
|
removed print statement
|
|
Java
|
apache-2.0
|
ff45585768bb50b36b293872c8f83eef13c36bae
| 0
|
alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,DwayneJengSage/BridgePF
|
package org.sagebionetworks.bridge.services;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.sagebionetworks.bridge.models.accounts.SharingScope.NO_SHARING;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.bridge.BridgeUtils;
import org.sagebionetworks.bridge.cache.CacheProvider;
import org.sagebionetworks.bridge.dao.AccountDao;
import org.sagebionetworks.bridge.dao.AccountSecretDao;
import org.sagebionetworks.bridge.exceptions.ConsentRequiredException;
import org.sagebionetworks.bridge.time.DateUtils;
import org.sagebionetworks.bridge.models.CriteriaContext;
import org.sagebionetworks.bridge.models.accounts.Account;
import org.sagebionetworks.bridge.models.accounts.AccountId;
import org.sagebionetworks.bridge.models.accounts.AccountSecretType;
import org.sagebionetworks.bridge.models.accounts.ConsentStatus;
import org.sagebionetworks.bridge.models.accounts.IdentifierHolder;
import org.sagebionetworks.bridge.models.accounts.SignIn;
import org.sagebionetworks.bridge.models.accounts.StudyParticipant;
import org.sagebionetworks.bridge.models.accounts.UserSession;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.subpopulations.ConsentSignature;
import org.sagebionetworks.bridge.models.subpopulations.SubpopulationGuid;
import org.sagebionetworks.bridge.validators.SignInValidator;
import org.sagebionetworks.bridge.validators.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component("userAdminService")
public class UserAdminService {
private AuthenticationService authenticationService;
private NotificationsService notificationsService;
private ParticipantService participantService;
private AccountDao accountDao;
private ConsentService consentService;
private HealthDataService healthDataService;
private ScheduledActivityService scheduledActivityService;
private ActivityEventService activityEventService;
private CacheProvider cacheProvider;
private ExternalIdService externalIdService;
private UploadService uploadService;
private AccountSecretDao accountSecretDao;
@Autowired
final void setAuthenticationService(AuthenticationService authenticationService) {
this.authenticationService = authenticationService;
}
/** Notifications service, used to clean up notification registrations when we delete users. */
@Autowired
final void setNotificationsService(NotificationsService notificationsService) {
this.notificationsService = notificationsService;
}
@Autowired
final void setParticipantService(ParticipantService participantService) {
this.participantService = participantService;
}
@Autowired
final void setAccountDao(AccountDao accountDao) {
this.accountDao = accountDao;
}
@Autowired
final void setConsentService(ConsentService consentService) {
this.consentService = consentService;
}
@Autowired
final void setHealthDataService(HealthDataService healthDataService) {
this.healthDataService = healthDataService;
}
@Autowired
final void setScheduledActivityService(ScheduledActivityService scheduledActivityService) {
this.scheduledActivityService = scheduledActivityService;
}
@Autowired
final void setActivityEventService(ActivityEventService activityEventService) {
this.activityEventService = activityEventService;
}
@Autowired
final void setCacheProvider(CacheProvider cache) {
this.cacheProvider = cache;
}
@Autowired
final void setExternalIdService(ExternalIdService externalIdService) {
this.externalIdService = externalIdService;
}
@Autowired
final void setUploadService(UploadService uploadService) {
this.uploadService = uploadService;
}
@Autowired
final void setAccountSecretDao(AccountSecretDao accountSecretDao) {
this.accountSecretDao = accountSecretDao;
}
/**
* Create a user and optionally consent the user and/or sign the user in. If a specific subpopulation
* is not specified (and currently the API for this method does not allow it), than the method iterates
* through all subpopulations in the study and consents the user to all required consents. This should
* allow the user to make calls without receiving a 412 response.
*
* @param study
* the study of the target user
* @param participant
* sign up information for the target user
* @param subpopGuid
* the subpopulation to consent to (if null, it will use the default/study subpopulation).
* @param signUserIn
* should the user be signed into Bridge after creation?
* @param consentUser
* should the user be consented to the research?
*
* @return UserSession for the newly created user
*/
public UserSession createUser(Study study, StudyParticipant participant, SubpopulationGuid subpopGuid,
boolean signUserIn, boolean consentUser) {
checkNotNull(study, "Study cannot be null");
checkNotNull(participant, "Participant cannot be null");
// Validate study + email or phone. This is the minimum we need to create a functional account.
SignIn signIn = new SignIn.Builder().withStudy(study.getIdentifier()).withEmail(participant.getEmail())
.withPhone(participant.getPhone()).withPassword(participant.getPassword()).build();
Validate.entityThrowingException(SignInValidator.MINIMAL, signIn);
IdentifierHolder identifier = null;
try {
// This used to hard-code the admin role to allow assignment of roles; now it must actually be called by an
// admin user (previously this was only checked in the related controller method).
identifier = participantService.createParticipant(study, participant, false);
StudyParticipant updatedParticipant = participantService.getParticipant(study, identifier.getIdentifier(), false);
// We don't filter users by any of these filtering criteria in the admin API.
CriteriaContext context = new CriteriaContext.Builder()
.withUserId(identifier.getIdentifier())
.withStudyIdentifier(study.getStudyIdentifier()).build();
if (consentUser) {
String name = String.format("[Signature for %s]", updatedParticipant.getEmail());
ConsentSignature signature = new ConsentSignature.Builder().withName(name)
.withBirthdate("1989-08-19").withSignedOn(DateUtils.getCurrentMillisFromEpoch()).build();
if (subpopGuid != null) {
consentService.consentToResearch(study, subpopGuid, updatedParticipant, signature, NO_SHARING, false);
} else {
Map<SubpopulationGuid,ConsentStatus> statuses = consentService.getConsentStatuses(context);
for (ConsentStatus consentStatus : statuses.values()) {
if (consentStatus.isRequired()) {
SubpopulationGuid guid = SubpopulationGuid.create(consentStatus.getSubpopulationGuid());
consentService.consentToResearch(study, guid, updatedParticipant, signature, NO_SHARING, false);
}
}
}
}
if (signUserIn) {
// We do ignore consent state here as our intention may be to create a user who is signed in but not
// consented.
try {
return authenticationService.signIn(study, context, signIn);
} catch(ConsentRequiredException e) {
return e.getUserSession();
}
}
// Return a session *without* signing in because we have 3 sign in pathways that we want to test. In this case
// we're creating a session but not authenticating you which is only a thing that's useful for tests.
UserSession session = authenticationService.getSession(study, context);
session.setAuthenticated(false);
return session;
} catch(RuntimeException e) {
// Created the account, but failed to process the account properly. To avoid leaving behind a bunch of test
// accounts, delete this account.
if (identifier != null) {
deleteUser(study, identifier.getIdentifier());
}
throw e;
}
}
/**
* Delete the target user.
*
* @param study
* target user's study
* @param id
* target user's ID
*/
public void deleteUser(Study study, String id) {
checkNotNull(study);
checkArgument(StringUtils.isNotBlank(id));
AccountId accountId = AccountId.forId(study.getIdentifier(), id);
Account account = accountDao.getAccount(accountId);
if (account != null) {
// remove this first so if account is partially deleted, re-authenticating will pick
// up accurate information about the state of the account (as we can recover it)
cacheProvider.removeSessionByUserId(account.getId());
cacheProvider.removeRequestInfo(account.getId());
String healthCode = account.getHealthCode();
healthDataService.deleteRecordsForHealthCode(healthCode);
notificationsService.deleteAllRegistrations(study.getStudyIdentifier(), healthCode);
uploadService.deleteUploadsForHealthCode(healthCode);
scheduledActivityService.deleteActivitiesForUser(healthCode);
activityEventService.deleteActivityEvents(healthCode);
for (String externalId : BridgeUtils.collectExternalIds(account)) {
externalIdService.unassignExternalId(account, externalId);
}
// AccountSecret records and AccountsSubstudies records are are deleted on a
// cascading delete from Account
accountDao.deleteAccount(accountId);
}
}
}
|
app/org/sagebionetworks/bridge/services/UserAdminService.java
|
package org.sagebionetworks.bridge.services;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.sagebionetworks.bridge.models.accounts.SharingScope.NO_SHARING;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.bridge.BridgeUtils;
import org.sagebionetworks.bridge.cache.CacheProvider;
import org.sagebionetworks.bridge.dao.AccountDao;
import org.sagebionetworks.bridge.dao.AccountSecretDao;
import org.sagebionetworks.bridge.exceptions.ConsentRequiredException;
import org.sagebionetworks.bridge.time.DateUtils;
import org.sagebionetworks.bridge.models.CriteriaContext;
import org.sagebionetworks.bridge.models.accounts.Account;
import org.sagebionetworks.bridge.models.accounts.AccountId;
import org.sagebionetworks.bridge.models.accounts.AccountSecretType;
import org.sagebionetworks.bridge.models.accounts.ConsentStatus;
import org.sagebionetworks.bridge.models.accounts.IdentifierHolder;
import org.sagebionetworks.bridge.models.accounts.SignIn;
import org.sagebionetworks.bridge.models.accounts.StudyParticipant;
import org.sagebionetworks.bridge.models.accounts.UserSession;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.subpopulations.ConsentSignature;
import org.sagebionetworks.bridge.models.subpopulations.SubpopulationGuid;
import org.sagebionetworks.bridge.validators.SignInValidator;
import org.sagebionetworks.bridge.validators.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component("userAdminService")
public class UserAdminService {
private AuthenticationService authenticationService;
private NotificationsService notificationsService;
private ParticipantService participantService;
private AccountDao accountDao;
private ConsentService consentService;
private HealthDataService healthDataService;
private ScheduledActivityService scheduledActivityService;
private ActivityEventService activityEventService;
private CacheProvider cacheProvider;
private ExternalIdService externalIdService;
private UploadService uploadService;
private AccountSecretDao accountSecretDao;
@Autowired
final void setAuthenticationService(AuthenticationService authenticationService) {
this.authenticationService = authenticationService;
}
/** Notifications service, used to clean up notification registrations when we delete users. */
@Autowired
final void setNotificationsService(NotificationsService notificationsService) {
this.notificationsService = notificationsService;
}
@Autowired
final void setParticipantService(ParticipantService participantService) {
this.participantService = participantService;
}
@Autowired
final void setAccountDao(AccountDao accountDao) {
this.accountDao = accountDao;
}
@Autowired
final void setConsentService(ConsentService consentService) {
this.consentService = consentService;
}
@Autowired
final void setHealthDataService(HealthDataService healthDataService) {
this.healthDataService = healthDataService;
}
@Autowired
final void setScheduledActivityService(ScheduledActivityService scheduledActivityService) {
this.scheduledActivityService = scheduledActivityService;
}
@Autowired
final void setActivityEventService(ActivityEventService activityEventService) {
this.activityEventService = activityEventService;
}
@Autowired
final void setCacheProvider(CacheProvider cache) {
this.cacheProvider = cache;
}
@Autowired
final void setExternalIdService(ExternalIdService externalIdService) {
this.externalIdService = externalIdService;
}
@Autowired
final void setUploadService(UploadService uploadService) {
this.uploadService = uploadService;
}
@Autowired
final void setAccountSecretDao(AccountSecretDao accountSecretDao) {
this.accountSecretDao = accountSecretDao;
}
/**
* Create a user and optionally consent the user and/or sign the user in. If a specific subpopulation
* is not specified (and currently the API for this method does not allow it), than the method iterates
* through all subpopulations in the study and consents the user to all required consents. This should
* allow the user to make calls without receiving a 412 response.
*
* @param study
* the study of the target user
* @param participant
* sign up information for the target user
* @param subpopGuid
* the subpopulation to consent to (if null, it will use the default/study subpopulation).
* @param signUserIn
* should the user be signed into Bridge after creation?
* @param consentUser
* should the user be consented to the research?
*
* @return UserSession for the newly created user
*/
public UserSession createUser(Study study, StudyParticipant participant, SubpopulationGuid subpopGuid,
boolean signUserIn, boolean consentUser) {
checkNotNull(study, "Study cannot be null");
checkNotNull(participant, "Participant cannot be null");
// Validate study + email or phone. This is the minimum we need to create a functional account.
SignIn signIn = new SignIn.Builder().withStudy(study.getIdentifier()).withEmail(participant.getEmail())
.withPhone(participant.getPhone()).withPassword(participant.getPassword()).build();
Validate.entityThrowingException(SignInValidator.MINIMAL, signIn);
IdentifierHolder identifier = null;
try {
// This used to hard-code the admin role to allow assignment of roles; now it must actually be called by an
// admin user (previously this was only checked in the related controller method).
identifier = participantService.createParticipant(study, participant, false);
StudyParticipant updatedParticipant = participantService.getParticipant(study, identifier.getIdentifier(), false);
// We don't filter users by any of these filtering criteria in the admin API.
CriteriaContext context = new CriteriaContext.Builder()
.withUserId(identifier.getIdentifier())
.withStudyIdentifier(study.getStudyIdentifier()).build();
if (consentUser) {
String name = String.format("[Signature for %s]", updatedParticipant.getEmail());
ConsentSignature signature = new ConsentSignature.Builder().withName(name)
.withBirthdate("1989-08-19").withSignedOn(DateUtils.getCurrentMillisFromEpoch()).build();
if (subpopGuid != null) {
consentService.consentToResearch(study, subpopGuid, updatedParticipant, signature, NO_SHARING, false);
} else {
Map<SubpopulationGuid,ConsentStatus> statuses = consentService.getConsentStatuses(context);
for (ConsentStatus consentStatus : statuses.values()) {
if (consentStatus.isRequired()) {
SubpopulationGuid guid = SubpopulationGuid.create(consentStatus.getSubpopulationGuid());
consentService.consentToResearch(study, guid, updatedParticipant, signature, NO_SHARING, false);
}
}
}
}
if (signUserIn) {
// We do ignore consent state here as our intention may be to create a user who is signed in but not
// consented.
try {
return authenticationService.signIn(study, context, signIn);
} catch(ConsentRequiredException e) {
return e.getUserSession();
}
}
// Return a session *without* signing in because we have 3 sign in pathways that we want to test. In this case
// we're creating a session but not authenticating you which is only a thing that's useful for tests.
UserSession session = authenticationService.getSession(study, context);
session.setAuthenticated(false);
return session;
} catch(RuntimeException e) {
// Created the account, but failed to process the account properly. To avoid leaving behind a bunch of test
// accounts, delete this account.
if (identifier != null) {
deleteUser(study, identifier.getIdentifier());
}
throw e;
}
}
/**
* Delete the target user.
*
* @param study
* target user's study
* @param id
* target user's ID
*/
public void deleteUser(Study study, String id) {
checkNotNull(study);
checkArgument(StringUtils.isNotBlank(id));
AccountId accountId = AccountId.forId(study.getIdentifier(), id);
Account account = accountDao.getAccount(accountId);
if (account != null) {
// remove this first so if account is partially deleted, re-authenticating will pick
// up accurate information about the state of the account (as we can recover it)
cacheProvider.removeSessionByUserId(account.getId());
cacheProvider.removeRequestInfo(account.getId());
String healthCode = account.getHealthCode();
healthDataService.deleteRecordsForHealthCode(healthCode);
notificationsService.deleteAllRegistrations(study.getStudyIdentifier(), healthCode);
uploadService.deleteUploadsForHealthCode(healthCode);
scheduledActivityService.deleteActivitiesForUser(healthCode);
activityEventService.deleteActivityEvents(healthCode);
for (String externalId : BridgeUtils.collectExternalIds(account)) {
externalIdService.unassignExternalId(account, externalId);
}
accountSecretDao.removeSecrets(AccountSecretType.REAUTH, account.getId());
accountDao.deleteAccount(accountId);
}
}
}
|
Using a DB cascading delete from account to clean up the secrets table.
|
app/org/sagebionetworks/bridge/services/UserAdminService.java
|
Using a DB cascading delete from account to clean up the secrets table.
|
|
Java
|
apache-2.0
|
4fcef04b896faf530b47e096465b483c7b2e44ab
| 0
|
mediascience/jaxrs-load-balancer
|
/**
* Licensed under the Apache License, Version 2.0 (the "License") under
* one or more contributor license agreements. See the NOTICE file
* distributed with this work for information regarding copyright
* ownership. You may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.msiops.jaxrs.loadbalancer;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A pre-matching filter that detects load balancer scheme change and sets the
* incoming scheme accordingly.
*/
@PreMatching()
public class ForwardedProtocolFilter implements ContainerRequestFilter {
private static Logger LOG = LoggerFactory
.getLogger(ForwardedProtocolFilter.class);
@Override
public void filter(final ContainerRequestContext requestContext)
throws IOException {
final String fproto = requestContext
.getHeaderString("x-forwarded-proto");
final String fport = requestContext.getHeaderString("x-forwarded-port");
LOG.debug("fproto {}, fport {}", fproto, fport);
final URI in = requestContext.getUriInfo().getRequestUri();
LOG.debug("in {}", in);
if (!in.isAbsolute()) {
/*
* no point dealing with a non-absolute uri
*/
return;
}
if (in.isOpaque()) {
/*
* no point dealing with something definitely not a url
*/
return;
}
final boolean haveHttps;
if (in.getScheme().equalsIgnoreCase("http")) {
haveHttps = false;
} else if (in.getScheme().equalsIgnoreCase("https")) {
haveHttps = true;
} else {
/*
* not an HTTP(S) request URI, do nothing
*/
return;
}
final boolean wantHttps;
if (fproto == null) {
wantHttps = haveHttps;
} else if (fproto.equalsIgnoreCase("http")) {
wantHttps = false;
} else if (fproto.equalsIgnoreCase("https")) {
wantHttps = true;
} else {
/*
* not an HTTP(S) request URI, do nothing
*/
return;
}
final int havePort;
if (in.getPort() == -1) {
havePort = haveHttps ? 443 : 80;
} else {
havePort = in.getPort();
}
final int wantPort;
if (fport == null) {
wantPort = havePort;
} else {
try {
wantPort = Integer.parseInt(fport.trim());
} catch (final Exception x) {
// error parsing port, change nothing
return;
}
}
LOG.debug("haveHttps {}, havePort {}", haveHttps, havePort);
LOG.debug("wantHttps {}, wantPort {}", wantHttps, wantPort);
final String newScheme = haveHttps == wantHttps ? in.getScheme()
: (wantHttps ? "https" : "http");
final int stdPort = wantHttps ? 443 : 80;
final int newPort = wantPort == stdPort ? -1 : wantPort;
final URI out;
try {
out = new URI(newScheme, in.getUserInfo(), in.getHost(), newPort,
in.getPath(), in.getQuery(), in.getFragment());
} catch (final URISyntaxException e) {
// any problem and we just don't modify the url
return;
}
LOG.debug("out {}", out);
if (!in.equals(out)) {
LOG.debug("set request uri to {}", out);
requestContext.setRequestUri(out);
}
}
}
|
src/main/java/com/msiops/jaxrs/loadbalancer/ForwardedProtocolFilter.java
|
/**
* Licensed under the Apache License, Version 2.0 (the "License") under
* one or more contributor license agreements. See the NOTICE file
* distributed with this work for information regarding copyright
* ownership. You may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.msiops.jaxrs.loadbalancer;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
/**
* A pre-matching filter that detects load balancer scheme change and sets the
* incoming scheme accordingly.
*/
@PreMatching()
public class ForwardedProtocolFilter implements ContainerRequestFilter {
@Override
public void filter(final ContainerRequestContext requestContext)
throws IOException {
final String fproto = requestContext
.getHeaderString("x-forwarded-proto");
final String fport = requestContext.getHeaderString("x-forwarded-port");
final URI in = requestContext.getUriInfo().getRequestUri();
if (!in.isAbsolute()) {
/*
* no point dealing with a non-absolute uri
*/
return;
}
if (in.isOpaque()) {
/*
* no point dealing with something definitely not a url
*/
return;
}
final boolean haveHttps;
if (in.getScheme().equalsIgnoreCase("http")) {
haveHttps = false;
} else if (in.getScheme().equalsIgnoreCase("https")) {
haveHttps = true;
} else {
/*
* not an HTTP(S) request URI, do nothing
*/
return;
}
final boolean wantHttps;
if (fproto == null) {
wantHttps = haveHttps;
} else if (fproto.equalsIgnoreCase("http")) {
wantHttps = false;
} else if (fproto.equalsIgnoreCase("https")) {
wantHttps = true;
} else {
/*
* not an HTTP(S) request URI, do nothing
*/
return;
}
final int havePort;
if (in.getPort() == -1) {
havePort = haveHttps ? 443 : 80;
} else {
havePort = in.getPort();
}
final int wantPort;
if (fport == null) {
wantPort = havePort;
} else {
try {
wantPort = Integer.parseInt(fport.trim());
} catch (final Exception x) {
// error parsing port, change nothing
return;
}
}
final String newScheme = haveHttps == wantHttps ? in.getScheme()
: (wantHttps ? "https" : "http");
final int stdPort = wantHttps ? 443 : 80;
final int newPort = wantPort == stdPort ? -1 : wantPort;
final URI out;
try {
out = new URI(newScheme, in.getUserInfo(), in.getHost(), newPort,
in.getPath(), in.getQuery(), in.getFragment());
} catch (final URISyntaxException e) {
// any problem and we just don't modify the url
return;
}
if (!in.equals(out)) {
requestContext.setRequestUri(out);
}
}
}
|
debug logging in filter
|
src/main/java/com/msiops/jaxrs/loadbalancer/ForwardedProtocolFilter.java
|
debug logging in filter
|
|
Java
|
apache-2.0
|
5051e30af2609112fa22c6511501cc8592f881b9
| 0
|
robin13/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,HonzaKral/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,uschindler/elasticsearch,HonzaKral/elasticsearch,HonzaKral/elasticsearch,strapdata/elassandra,nknize/elasticsearch,uschindler/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,vroyer/elassandra,nknize/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,strapdata/elassandra,vroyer/elassandra,coding0011/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,coding0011/elasticsearch,gfyoung/elasticsearch,vroyer/elassandra,uschindler/elasticsearch,strapdata/elassandra,robin13/elasticsearch,coding0011/elasticsearch,scorpionvicky/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,strapdata/elassandra,coding0011/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,HonzaKral/elasticsearch,gfyoung/elasticsearch,GlenRSmith/elasticsearch,gfyoung/elasticsearch,gingerwizard/elasticsearch,strapdata/elassandra,gingerwizard/elasticsearch,gfyoung/elasticsearch,nknize/elasticsearch,gfyoung/elasticsearch
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import net.nicholaswilliams.java.licensing.exception.ExpiredLicenseException;
import net.nicholaswilliams.java.licensing.exception.InvalidLicenseException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.collect.Sets;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.core.ESLicense;
import org.elasticsearch.license.manager.ESLicenseManager;
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.license.core.ESLicenses.reduceAndMap;
/**
* Service responsible for managing {@link org.elasticsearch.license.plugin.core.LicensesMetaData}
* Interfaces through which this is exposed are:
* - LicensesManagerService - responsible for managing signed and one-time-trial licenses
* - LicensesClientService - responsible for feature registration and notification to consumer plugin(s)
* <p/>
*
* Notification Scheme:
*
* All registered feature(s) are notified using {@link #notifyFeatures(LicensesMetaData)} (depends on the current
* {@link #registeredListeners}). It is idempotent with respect to all the feature listeners.
*
* The notification scheduling is done by {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} which does the following:
* - calls {@link #notifyFeatures(LicensesMetaData)} to notify all registered feature(s)
* - if there is any license(s) with a future expiry date in the current cluster state:
* - schedules a delayed {@link LicensingClientNotificationJob} on the MIN of all the expiry dates of all the registered feature(s)
*
* The {@link LicensingClientNotificationJob} calls {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} to schedule
* another delayed {@link LicensingClientNotificationJob} as stated above. It is a no-op in case of a global block on
* {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}
*
* Upon successful registration of a new feature:
* - {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} is called
*
* Upon clusterChanged():
* - {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} is called if:
* - new trial/signed license(s) are found in the cluster state meta data
* - if new feature(s) are added to the registeredListener
* - if the previous cluster state had a global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}
* - no-op in case of global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}
*/
@Singleton
public class LicensesService extends AbstractLifecycleComponent<LicensesService> implements ClusterStateListener, LicensesManagerService, LicensesClientService {
public static final String REGISTER_TRIAL_LICENSE_ACTION_NAME = "internal:cluster/licenses/register_trial_license";
private final ESLicenseManager licenseManager;
private final ClusterService clusterService;
private final ThreadPool threadPool;
private final TransportService transportService;
/**
* Currently active consumers to notify to
*/
private final List<ListenerHolder> registeredListeners = new CopyOnWriteArrayList<>();
/**
* Currently pending consumers that has to be registered
*/
private final Queue<ListenerHolder> pendingListeners = new ConcurrentLinkedQueue<>();
/**
* Currently active scheduledNotifications
* All finished notifications will be cleared in {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)}
* and {@link #scheduleNextNotification(long)}
*/
private final Queue<ScheduledFuture> scheduledNotifications = new ConcurrentLinkedQueue<>();
/**
* The last licensesMetaData that has been notified by {@link #notifyFeatures(LicensesMetaData)}
*/
private final AtomicReference<LicensesMetaData> lastObservedLicensesState;
@Inject
public LicensesService(Settings settings, ClusterService clusterService, ThreadPool threadPool, TransportService transportService, ESLicenseManager licenseManager) {
super(settings);
this.clusterService = clusterService;
this.licenseManager = licenseManager;
this.threadPool = threadPool;
this.transportService = transportService;
this.lastObservedLicensesState = new AtomicReference<>(null);
if (DiscoveryNode.masterNode(settings)) {
transportService.registerHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME, new RegisterTrialLicenseRequestHandler());
}
}
/**
* {@inheritDoc}
*/
@Override
public void registerLicenses(final PutLicenseRequestHolder requestHolder, final ActionListener<LicensesUpdateResponse> listener) {
final PutLicenseRequest request = requestHolder.request;
final Set<ESLicense> newLicenses = Sets.newHashSet(request.licenses());
LicensesStatus status = checkLicenses(newLicenses);
if (status == LicensesStatus.VALID) {
clusterService.submitStateUpdateTask(requestHolder.source, new AckedClusterStateUpdateTask<LicensesUpdateResponse>(request, listener) {
@Override
protected LicensesUpdateResponse newResponse(boolean acknowledged) {
return new LicensesUpdateResponse(acknowledged, LicensesStatus.VALID);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicenses);
Set<String> newSignatures = licenseManager.toSignatures(newLicenses);
Set<String> newLicenseSignatures = Sets.union(licensesWrapper.signatures, newSignatures);
if (newLicenseSignatures.size() != licensesWrapper.signatures.size()) {
LicensesMetaData newLicensesMetaData = new LicensesMetaData(newLicenseSignatures, licensesWrapper.encodedTrialLicenses);
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
return currentState;
}
});
} else {
listener.onResponse(new LicensesUpdateResponse(true, status));
}
}
public static class LicensesUpdateResponse extends ClusterStateUpdateResponse {
private LicensesStatus status;
public LicensesUpdateResponse(boolean acknowledged, LicensesStatus status) {
super(acknowledged);
this.status = status;
}
public LicensesStatus status() {
return status;
}
}
/**
* {@inheritDoc}
*/
@Override
public void removeLicenses(final DeleteLicenseRequestHolder requestHolder, final ActionListener<ClusterStateUpdateResponse> listener) {
final DeleteLicenseRequest request = requestHolder.request;
clusterService.submitStateUpdateTask(requestHolder.source, new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, listener) {
@Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicenses);
Set<ESLicense> currentSignedLicenses = licensesWrapper.signedLicenses(licenseManager);
Set<ESLicense> licensesToDelete = new HashSet<>();
for (ESLicense license : currentSignedLicenses) {
if (request.features().contains(license.feature())) {
licensesToDelete.add(license);
}
}
if (!licensesToDelete.isEmpty()) {
Set<ESLicense> reducedLicenses = Sets.difference(currentSignedLicenses, licensesToDelete);
Set<String> newSignatures = licenseManager.toSignatures(reducedLicenses);
LicensesMetaData newLicensesMetaData = new LicensesMetaData(newSignatures, licensesWrapper.encodedTrialLicenses);
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
} else {
return currentState;
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> enabledFeatures() {
Set<String> enabledFeatures = Sets.newHashSet();
for (ListenerHolder holder : registeredListeners) {
if (holder.enabled.get()) {
enabledFeatures.add(holder.feature);
}
}
return enabledFeatures;
}
/**
* {@inheritDoc}
*/
@Override
public List<ESLicense> getLicenses() {
LicensesMetaData currentMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
if (currentMetaData != null) {
// don't use ESLicenses.reduceAndMap, as it will merge out expired licenses
Set<ESLicense> licenses = Sets.union(licenseManager.fromSignatures(currentMetaData.getSignatures()),
TrialLicenseUtils.fromEncodedTrialLicenses(currentMetaData.getEncodedTrialLicenses()));
// bucket license for feature with the latest expiry date
Map<String, ESLicense> licenseMap = new HashMap<>();
for (ESLicense license : licenses) {
if (!licenseMap.containsKey(license.feature())) {
licenseMap.put(license.feature(), license);
} else {
ESLicense prevLicense = licenseMap.get(license.feature());
if (license.expiryDate() > prevLicense.expiryDate()) {
licenseMap.put(license.feature(), license);
}
}
}
// sort the licenses by issue date
List<ESLicense> reducedLicenses = new ArrayList<>(licenseMap.values());
Collections.sort(reducedLicenses, new Comparator<ESLicense>() {
@Override
public int compare(ESLicense license1, ESLicense license2) {
return (int) (license2.issueDate() - license1.issueDate());
}
});
return reducedLicenses;
}
return Collections.emptyList();
}
private LicensesStatus checkLicenses(Set<ESLicense> licenses) {
final ImmutableMap<String, ESLicense> map = reduceAndMap(licenses);
return checkLicenses(map);
}
private LicensesStatus checkLicenses(Map<String, ESLicense> licenseMap) {
LicensesStatus status = LicensesStatus.VALID;
try {
licenseManager.verifyLicenses(licenseMap);
} catch (ExpiredLicenseException e) {
status = LicensesStatus.EXPIRED;
} catch (InvalidLicenseException e) {
status = LicensesStatus.INVALID;
}
return status;
}
/**
* Master-only operation to generate a one-time trial license for a feature.
* The trial license is only generated and stored if the current cluster state metaData
* has no signed/one-time-trial license for the feature in question
*/
private void registerTrialLicense(final RegisterTrialLicenseRequest request) {
clusterService.submitStateUpdateTask("register trial license []", new ProcessedClusterStateUpdateTask() {
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
// Change to debug
logger.info("Processed Trial License registration");
LicensesMetaData licensesMetaData = newState.metaData().custom(LicensesMetaData.TYPE);
logLicenseMetaDataStats("new", licensesMetaData);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
LicensesMetaData currentLicensesMetaData = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicensesMetaData);
// do not generate a trial license for a feature that already has a signed/trial license
if (checkTrialLicenseGenerationCondition(request.feature, licensesWrapper)) {
Set<String> newTrialLicenses = Sets.union(licensesWrapper.encodedTrialLicenses,
Sets.newHashSet(generateEncodedTrialLicense(request.feature, request.duration, request.maxNodes)));
final LicensesMetaData newLicensesMetaData = new LicensesMetaData(
licensesWrapper.signatures, newTrialLicenses);
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
return currentState;
}
@Override
public void onFailure(String source, @Nullable Throwable t) {
logger.info("LicensesService: " + source, t);
}
private boolean checkTrialLicenseGenerationCondition(String feature, LicensesWrapper licensesWrapper) {
for (ESLicense license : Sets.union(licensesWrapper.signedLicenses(licenseManager),
licensesWrapper.trialLicenses())) {
if (license.feature().equals(feature)) {
return false;
}
}
return true;
}
private String generateEncodedTrialLicense(String feature, TimeValue duration, int maxNodes) {
return TrialLicenseUtils.toEncodedTrialLicense(
TrialLicenseUtils.builder()
.issuedTo(clusterService.state().getClusterName().value())
.issueDate(System.currentTimeMillis())
.duration(duration)
.feature(feature)
.maxNodes(maxNodes)
.build()
);
}
});
}
@Override
protected void doStart() throws ElasticsearchException {
//Change to debug
logger.info("Started LicensesService");
clusterService.add(this);
}
@Override
protected void doStop() throws ElasticsearchException {
clusterService.remove(this);
// cancel all notifications
for (ScheduledFuture scheduledNotification : scheduledNotifications) {
scheduledNotification.cancel(true);
}
// notify features to be disabled
for (ListenerHolder holder : registeredListeners) {
holder.disableFeatureIfNeeded();
}
// clear all handlers
registeredListeners.clear();
// empty out notification queue
scheduledNotifications.clear();
lastObservedLicensesState.set(null);
}
@Override
protected void doClose() throws ElasticsearchException {
logger.info("Closing LicensesService");
transportService.removeHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME);
}
/**
* When there is no global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}:
* - tries to register any {@link #pendingListeners} by calling {@link #registeredListeners}
* - if any {@link #pendingListeners} are registered successfully or if previous cluster state had a block on
* {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}, calls
* {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)}
* - else calls {@link #notifyFeaturesAndScheduleNotificationIfNeeded(LicensesMetaData)}
*/
@Override
public void clusterChanged(ClusterChangedEvent event) {
final ClusterState currentClusterState = event.state();
final ClusterState previousClusterState = event.previousState();
if (!currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
LicensesMetaData oldLicensesMetaData = previousClusterState.getMetaData().custom(LicensesMetaData.TYPE);
LicensesMetaData currentLicensesMetaData = currentClusterState.getMetaData().custom(LicensesMetaData.TYPE);
logLicenseMetaDataStats("old", oldLicensesMetaData);
logLicenseMetaDataStats("new", currentLicensesMetaData);
// Check pending feature registrations and try to complete registrations
boolean addedNewRegisteredListener = false;
if (!pendingListeners.isEmpty()) {
ListenerHolder pendingRegistrationLister;
while ((pendingRegistrationLister = pendingListeners.poll()) != null) {
boolean masterAvailable = registerListener(pendingRegistrationLister);
logger.info("trying to register pending listener for " + pendingRegistrationLister.feature + " masterAvailable: " + masterAvailable);
if (!masterAvailable) {
// if the master is not available do not, break out of trying pendingListeners
pendingListeners.add(pendingRegistrationLister);
break;
} else {
logger.info("successfully registered listener for: " + pendingRegistrationLister.feature);
registeredListeners.add(pendingRegistrationLister);
// make sure to notify new registered feature
// notifications could have been scheduled for it before it was registered
addedNewRegisteredListener = true;
}
}
}
// notify all interested plugins
// Change to debug
if (previousClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) || addedNewRegisteredListener) {
logger.info("calling notifyFeaturesAndScheduleNotification from clusterChanged");
notifyFeaturesAndScheduleNotification(currentLicensesMetaData);
} else {
logger.info("calling notifyFeaturesAndScheduleNotificationIfNeeded from clusterChanged");
notifyFeaturesAndScheduleNotificationIfNeeded(currentLicensesMetaData);
}
} else {
logger.info("clusterChanged: no action [has STATE_NOT_RECOVERED_BLOCK]");
}
}
/**
* Calls {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} with <code>currentLicensesMetaData</code>
* if it was not already notified on
*/
private void notifyFeaturesAndScheduleNotificationIfNeeded(LicensesMetaData currentLicensesMetaData) {
final LicensesMetaData lastNotifiedLicensesMetaData = lastObservedLicensesState.get();
if (lastNotifiedLicensesMetaData != null && lastNotifiedLicensesMetaData.equals(currentLicensesMetaData)) {
logger.info("currentLicensesMetaData has been already notified on");
return;
}
notifyFeaturesAndScheduleNotification(currentLicensesMetaData);
}
/**
* Calls {@link #notifyFeatures(LicensesMetaData)} with <code>currentLicensesMetaData</code>
* if needed, also schedules the earliest expiry notification for registered feature(s)
*/
private void notifyFeaturesAndScheduleNotification(LicensesMetaData currentLicensesMetaData) {
long nextScheduleFrequency = notifyFeatures(currentLicensesMetaData);
if (nextScheduleFrequency != -1l) {
scheduleNextNotification(nextScheduleFrequency);
}
}
/**
* Checks license expiry for all the registered feature(s), upon completion
* sets <code>currentLicensesMetaData</code> to {@link #lastObservedLicensesState}
* to ensure the same license(s) are not notified on from
* {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)}
*
* @return -1 if there are no expiring license(s) for any registered feature(s), else
* returns the minimum of the expiry times of all the registered feature(s) to
* schedule an expiry notification
*/
private long notifyFeatures(LicensesMetaData currentLicensesMetaData) {
long nextScheduleFrequency = -1l;
long offset = TimeValue.timeValueMillis(100).getMillis();
StringBuilder sb = new StringBuilder("Registered listeners: [ ");
for (ListenerHolder listenerHolder : registeredListeners) {
sb.append("( ");
sb.append("feature:");
sb.append(listenerHolder.feature);
sb.append(", ");
long expiryDate;
if ((expiryDate = expiryDateForFeature(listenerHolder.feature, currentLicensesMetaData)) != -1l) {
sb.append(" license expiry: ");
sb.append(expiryDate);
sb.append(", ");
}
long expiryDuration = expiryDate - System.currentTimeMillis();
if (expiryDate == -1l) {
sb.append("no trial/signed license found");
sb.append(", ");
} else {
sb.append("license expires in: ");
sb.append(TimeValue.timeValueMillis(expiryDuration).toString());
sb.append(", ");
}
if (expiryDuration > 0l) {
sb.append("calling enableFeatureIfNeeded");
listenerHolder.enableFeatureIfNeeded();
if (nextScheduleFrequency == -1l) {
nextScheduleFrequency = expiryDuration + offset;
} else {
nextScheduleFrequency = Math.min(expiryDuration + offset, nextScheduleFrequency);
}
} else {
sb.append("calling disableFeatureIfNeeded");
listenerHolder.disableFeatureIfNeeded();
}
sb.append(" )");
}
sb.append("]");
// Change to debug
logger.info(sb.toString());
logLicenseMetaDataStats("Setting last observed metaData", currentLicensesMetaData);
lastObservedLicensesState.set(currentLicensesMetaData);
if (nextScheduleFrequency == -1l) {
logger.info("no need to schedule next notification");
} else {
logger.info("next notification time: " + TimeValue.timeValueMillis(nextScheduleFrequency).toString());
}
return nextScheduleFrequency;
}
private void logLicenseMetaDataStats(String prefix, LicensesMetaData licensesMetaData) {
if (licensesMetaData != null) {
logger.info(prefix + " LicensesMetaData: signedLicenses: " + licensesMetaData.getSignatures().size() + " trialLicenses: " + licensesMetaData.getEncodedTrialLicenses().size());
} else {
logger.info(prefix + " LicensesMetaData: signedLicenses: 0 trialLicenses: 0");
}
}
/**
* {@inheritDoc}
*/
@Override
public void register(String feature, TrialLicenseOptions trialLicenseOptions, Listener listener) {
final ListenerHolder listenerHolder = new ListenerHolder(feature, trialLicenseOptions, listener);
if (registerListener(listenerHolder)) {
logger.info("successfully registered listener for: " + listenerHolder.feature);
registeredListeners.add(listenerHolder);
} else {
logger.info("add listener for: " + listenerHolder.feature + " to pending registration queue");
pendingListeners.add(listenerHolder);
}
}
/**
* Notifies new feature listener if it already has a signed license
* if new feature has a non-null trial license option, a master node request is made to generate the trial license
* if no trial license option is specified for the feature and no signed license is found,
* then notifies features to be disabled
*
* @param listenerHolder of the feature to register
* @return true if registration has been completed, false otherwise (if masterNode is not available & trail license spec is provided
* or if there is a global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK})
*/
private boolean registerListener(final ListenerHolder listenerHolder) {
logger.info("Registering listener for " + listenerHolder.feature);
ClusterState currentState = clusterService.state();
if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
logger.info("Store as pendingRegistration [cluster has NOT_RECOVERED_BLOCK]");
return false;
}
LicensesMetaData currentMetaData = currentState.metaData().custom(LicensesMetaData.TYPE);
if (expiryDateForFeature(listenerHolder.feature, currentMetaData) == -1l) {
// does not have any license so generate a trial license
TrialLicenseOptions options = listenerHolder.trialLicenseOptions;
if (options != null) {
// Trial license option is provided
RegisterTrialLicenseRequest request = new RegisterTrialLicenseRequest(listenerHolder.feature,
options.duration, options.maxNodes);
if (currentState.nodes().localNodeMaster()) {
logger.info("Executing trial license request");
registerTrialLicense(request);
} else {
DiscoveryNode masterNode = currentState.nodes().masterNode();
if (masterNode != null) {
logger.info("Sending trial license request to master");
transportService.sendRequest(masterNode,
REGISTER_TRIAL_LICENSE_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME);
} else {
// could not sent register trial license request to master
logger.info("Store as pendingRegistration [master not available yet]");
return false;
}
}
} else {
// notify feature as clusterChangedEvent may not happen
// as no trial or signed license has been found for feature
// Change to debug
logger.info("Calling notifyFeaturesAndScheduleNotification [no trial license spec provided]");
notifyFeaturesAndScheduleNotification(currentMetaData);
}
} else {
// signed license already found for the new registered
// feature, notify feature on registration
logger.info("Calling notifyFeaturesAndScheduleNotification [signed/trial license available]");
notifyFeaturesAndScheduleNotification(currentMetaData);
}
return true;
}
private long expiryDateForFeature(String feature, LicensesMetaData currentLicensesMetaData) {
final Map<String, ESLicense> effectiveLicenses = getEffectiveLicenses(currentLicensesMetaData);
ESLicense featureLicense;
if ((featureLicense = effectiveLicenses.get(feature)) != null) {
logger.info("effective license for " + feature + " relative expiry: " +
TimeValue.timeValueMillis(effectiveLicenses.get(feature).expiryDate() - System.currentTimeMillis()));
return featureLicense.expiryDate();
}
logger.info("no effective license for " + feature);
return -1l;
}
private Map<String, ESLicense> getEffectiveLicenses(LicensesMetaData metaData) {
Map<String, ESLicense> map = new HashMap<>();
if (metaData != null) {
Set<ESLicense> esLicenses = new HashSet<>();
esLicenses.addAll(licenseManager.fromSignatures(metaData.getSignatures()));
esLicenses.addAll(TrialLicenseUtils.fromEncodedTrialLicenses(metaData.getEncodedTrialLicenses()));
return reduceAndMap(esLicenses);
}
return ImmutableMap.copyOf(map);
}
/**
* Clears out any completed notification future from
* {@link #scheduledNotifications}
*/
private void clearFinishedNotifications() {
while (!scheduledNotifications.isEmpty()) {
ScheduledFuture notification = scheduledNotifications.peek();
if (notification.isDone()) {
// remove the notifications that are done
scheduledNotifications.poll();
} else {
// stop emptying out the queue as soon as the first undone future hits
break;
}
}
}
private String executorName() {
return ThreadPool.Names.GENERIC;
}
/**
* Schedules an expiry notification with a delay of <code>nextScheduleDelay</code>
*/
private void scheduleNextNotification(long nextScheduleDelay) {
clearFinishedNotifications();
try {
final TimeValue delay = TimeValue.timeValueMillis(nextScheduleDelay);
scheduledNotifications.add(threadPool.schedule(delay, executorName(), new LicensingClientNotificationJob()));
logger.info("Scheduling next notification after: " + delay);
} catch (EsRejectedExecutionException ex) {
logger.info("Couldn't re-schedule licensing client notification job", ex);
}
}
/**
* Job for notifying on expired license(s) to registered feature(s)
* In case of a global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK},
* the notification is not run, instead the feature(s) would be notified on the next
* {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)} with no global block
*/
private class LicensingClientNotificationJob implements Runnable {
public LicensingClientNotificationJob() {}
@Override
public void run() {
logger.info("Performing LicensingClientNotificationJob");
// next clusterChanged event will deal with the missed notifications
ClusterState currentClusterState = clusterService.state();
if (!currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
LicensesMetaData currentLicensesMetaData = currentClusterState.metaData().custom(LicensesMetaData.TYPE);
notifyFeaturesAndScheduleNotification(currentLicensesMetaData);
} else {
logger.info("skip notification [STATE_NOT_RECOVERED_BLOCK]");
}
}
}
public static class PutLicenseRequestHolder {
private final PutLicenseRequest request;
private final String source;
public PutLicenseRequestHolder(PutLicenseRequest request, String source) {
this.request = request;
this.source = source;
}
}
public static class DeleteLicenseRequestHolder {
private final DeleteLicenseRequest request;
private final String source;
public DeleteLicenseRequestHolder(DeleteLicenseRequest request, String source) {
this.request = request;
this.source = source;
}
}
public static class TrialLicenseOptions {
final TimeValue duration;
final int maxNodes;
public TrialLicenseOptions(TimeValue duration, int maxNodes) {
this.duration = duration;
this.maxNodes = maxNodes;
}
}
/**
* Stores configuration and listener for a feature
*/
private class ListenerHolder {
final String feature;
final TrialLicenseOptions trialLicenseOptions;
final Listener listener;
final AtomicBoolean enabled = new AtomicBoolean(false); // by default, a consumer plugin should be disabled
private ListenerHolder(String feature, TrialLicenseOptions trialLicenseOptions, Listener listener) {
this.feature = feature;
this.trialLicenseOptions = trialLicenseOptions;
this.listener = listener;
}
private void enableFeatureIfNeeded() {
if (enabled.compareAndSet(false, true)) {
listener.onEnabled();
}
}
private void disableFeatureIfNeeded() {
if (enabled.compareAndSet(true, false)) {
listener.onDisabled();
}
}
}
/**
* Thin wrapper to work with {@link org.elasticsearch.license.plugin.core.LicensesMetaData}
* Never mutates the wrapped metaData
*/
private static class LicensesWrapper {
public static LicensesWrapper wrap(LicensesMetaData licensesMetaData) {
return new LicensesWrapper(licensesMetaData);
}
private ImmutableSet<String> signatures = ImmutableSet.of();
private ImmutableSet<String> encodedTrialLicenses = ImmutableSet.of();
private LicensesWrapper(LicensesMetaData licensesMetaData) {
if (licensesMetaData != null) {
this.signatures = ImmutableSet.copyOf(licensesMetaData.getSignatures());
this.encodedTrialLicenses = ImmutableSet.copyOf(licensesMetaData.getEncodedTrialLicenses());
}
}
public Set<ESLicense> signedLicenses(ESLicenseManager licenseManager) {
return licenseManager.fromSignatures(signatures);
}
public Set<ESLicense> trialLicenses() {
return TrialLicenseUtils.fromEncodedTrialLicenses(encodedTrialLicenses);
}
}
/**
* Request for trial license generation to master
*/
private static class RegisterTrialLicenseRequest extends TransportRequest {
private int maxNodes;
private String feature;
private TimeValue duration;
private RegisterTrialLicenseRequest() {}
private RegisterTrialLicenseRequest(String feature, TimeValue duration, int maxNodes) {
this.maxNodes = maxNodes;
this.feature = feature;
this.duration = duration;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
maxNodes = in.readVInt();
feature = in.readString();
duration = new TimeValue(in.readVLong(), TimeUnit.MILLISECONDS);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(maxNodes);
out.writeString(feature);
out.writeVLong(duration.getMillis());
}
}
/**
* Request handler for trial license generation to master
*/
private class RegisterTrialLicenseRequestHandler extends BaseTransportRequestHandler<RegisterTrialLicenseRequest> {
@Override
public RegisterTrialLicenseRequest newInstance() {
return new RegisterTrialLicenseRequest();
}
@Override
public void messageReceived(RegisterTrialLicenseRequest request, TransportChannel channel) throws Exception {
registerTrialLicense(request);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
|
src/main/java/org/elasticsearch/license/plugin/core/LicensesService.java
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import net.nicholaswilliams.java.licensing.exception.ExpiredLicenseException;
import net.nicholaswilliams.java.licensing.exception.InvalidLicenseException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.collect.Sets;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.core.ESLicense;
import org.elasticsearch.license.manager.ESLicenseManager;
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.license.core.ESLicenses.reduceAndMap;
/**
* Service responsible for managing {@link org.elasticsearch.license.plugin.core.LicensesMetaData}
* Interfaces through which this is exposed are:
* - LicensesManagerService - responsible for managing signed and one-time-trial licenses
* - LicensesClientService - responsible for feature registration and notification to consumer plugin(s)
* <p/>
*/
@Singleton
public class LicensesService extends AbstractLifecycleComponent<LicensesService> implements ClusterStateListener, LicensesManagerService, LicensesClientService {
public static final String REGISTER_TRIAL_LICENSE_ACTION_NAME = "internal:cluster/licenses/register_trial_license";
private final ESLicenseManager licenseManager;
private final ClusterService clusterService;
private final ThreadPool threadPool;
private final TransportService transportService;
/**
* Currently active consumers to notify to
*/
private final List<ListenerHolder> registeredListeners = new CopyOnWriteArrayList<>();
/**
* Currently pending consumers that has to be registered
*/
private final Queue<ListenerHolder> pendingListeners = new ConcurrentLinkedQueue<>();
/**
* Currently active scheduledNotifications
* All finished notifications will be cleared in {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)}
* and {@link #scheduleNextNotification(long)}
*/
private final Queue<ScheduledFuture> scheduledNotifications = new ConcurrentLinkedQueue<>();
/**
* The last licensesMetaData that has been notified by {@link #notifyFeatures(LicensesMetaData)}
*/
private final AtomicReference<LicensesMetaData> lastObservedLicensesState;
@Inject
public LicensesService(Settings settings, ClusterService clusterService, ThreadPool threadPool, TransportService transportService, ESLicenseManager licenseManager) {
super(settings);
this.clusterService = clusterService;
this.licenseManager = licenseManager;
this.threadPool = threadPool;
this.transportService = transportService;
this.lastObservedLicensesState = new AtomicReference<>(null);
if (DiscoveryNode.masterNode(settings)) {
transportService.registerHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME, new RegisterTrialLicenseRequestHandler());
}
}
/**
* {@inheritDoc}
*/
@Override
public void registerLicenses(final PutLicenseRequestHolder requestHolder, final ActionListener<LicensesUpdateResponse> listener) {
final PutLicenseRequest request = requestHolder.request;
final Set<ESLicense> newLicenses = Sets.newHashSet(request.licenses());
LicensesStatus status = checkLicenses(newLicenses);
if (status == LicensesStatus.VALID) {
clusterService.submitStateUpdateTask(requestHolder.source, new AckedClusterStateUpdateTask<LicensesUpdateResponse>(request, listener) {
@Override
protected LicensesUpdateResponse newResponse(boolean acknowledged) {
return new LicensesUpdateResponse(acknowledged, LicensesStatus.VALID);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicenses);
Set<String> newSignatures = licenseManager.toSignatures(newLicenses);
Set<String> newLicenseSignatures = Sets.union(licensesWrapper.signatures, newSignatures);
if (newLicenseSignatures.size() != licensesWrapper.signatures.size()) {
LicensesMetaData newLicensesMetaData = new LicensesMetaData(newLicenseSignatures, licensesWrapper.encodedTrialLicenses);
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
return currentState;
}
});
} else {
listener.onResponse(new LicensesUpdateResponse(true, status));
}
}
public static class LicensesUpdateResponse extends ClusterStateUpdateResponse {
private LicensesStatus status;
public LicensesUpdateResponse(boolean acknowledged, LicensesStatus status) {
super(acknowledged);
this.status = status;
}
public LicensesStatus status() {
return status;
}
}
/**
* {@inheritDoc}
*/
@Override
public void removeLicenses(final DeleteLicenseRequestHolder requestHolder, final ActionListener<ClusterStateUpdateResponse> listener) {
final DeleteLicenseRequest request = requestHolder.request;
clusterService.submitStateUpdateTask(requestHolder.source, new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, listener) {
@Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicenses);
Set<ESLicense> currentSignedLicenses = licensesWrapper.signedLicenses(licenseManager);
Set<ESLicense> licensesToDelete = new HashSet<>();
for (ESLicense license : currentSignedLicenses) {
if (request.features().contains(license.feature())) {
licensesToDelete.add(license);
}
}
if (!licensesToDelete.isEmpty()) {
Set<ESLicense> reducedLicenses = Sets.difference(currentSignedLicenses, licensesToDelete);
Set<String> newSignatures = licenseManager.toSignatures(reducedLicenses);
LicensesMetaData newLicensesMetaData = new LicensesMetaData(newSignatures, licensesWrapper.encodedTrialLicenses);
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
} else {
return currentState;
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> enabledFeatures() {
Set<String> enabledFeatures = Sets.newHashSet();
for (ListenerHolder holder : registeredListeners) {
if (holder.enabled.get()) {
enabledFeatures.add(holder.feature);
}
}
return enabledFeatures;
}
/**
* {@inheritDoc}
*/
@Override
public List<ESLicense> getLicenses() {
LicensesMetaData currentMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
if (currentMetaData != null) {
// don't use ESLicenses.reduceAndMap, as it will merge out expired licenses
Set<ESLicense> licenses = Sets.union(licenseManager.fromSignatures(currentMetaData.getSignatures()),
TrialLicenseUtils.fromEncodedTrialLicenses(currentMetaData.getEncodedTrialLicenses()));
// bucket license for feature with the latest expiry date
Map<String, ESLicense> licenseMap = new HashMap<>();
for (ESLicense license : licenses) {
if (!licenseMap.containsKey(license.feature())) {
licenseMap.put(license.feature(), license);
} else {
ESLicense prevLicense = licenseMap.get(license.feature());
if (license.expiryDate() > prevLicense.expiryDate()) {
licenseMap.put(license.feature(), license);
}
}
}
// sort the licenses by issue date
List<ESLicense> reducedLicenses = new ArrayList<>(licenseMap.values());
Collections.sort(reducedLicenses, new Comparator<ESLicense>() {
@Override
public int compare(ESLicense license1, ESLicense license2) {
return (int) (license2.issueDate() - license1.issueDate());
}
});
return reducedLicenses;
}
return Collections.emptyList();
}
private LicensesStatus checkLicenses(Set<ESLicense> licenses) {
final ImmutableMap<String, ESLicense> map = reduceAndMap(licenses);
return checkLicenses(map);
}
private LicensesStatus checkLicenses(Map<String, ESLicense> licenseMap) {
LicensesStatus status = LicensesStatus.VALID;
try {
licenseManager.verifyLicenses(licenseMap);
} catch (ExpiredLicenseException e) {
status = LicensesStatus.EXPIRED;
} catch (InvalidLicenseException e) {
status = LicensesStatus.INVALID;
}
return status;
}
/**
* Master-only operation to generate a one-time trial license for a feature.
* The trial license is only generated and stored if the current cluster state metaData
* has no signed/one-time-trial license for the feature in question
*/
private void registerTrialLicense(final RegisterTrialLicenseRequest request) {
clusterService.submitStateUpdateTask("register trial license []", new ProcessedClusterStateUpdateTask() {
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
// Change to debug
logger.info("Processed Trial License registration");
LicensesMetaData licensesMetaData = newState.metaData().custom(LicensesMetaData.TYPE);
logLicenseMetaDataStats("new", licensesMetaData);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
LicensesMetaData currentLicensesMetaData = metaData.custom(LicensesMetaData.TYPE);
final LicensesWrapper licensesWrapper = LicensesWrapper.wrap(currentLicensesMetaData);
// do not generate a trial license for a feature that already has a signed/trial license
if (checkTrialLicenseGenerationCondition(request.feature, licensesWrapper)) {
Set<String> newTrialLicenses = Sets.union(licensesWrapper.encodedTrialLicenses,
Sets.newHashSet(generateEncodedTrialLicense(request.feature, request.duration, request.maxNodes)));
final LicensesMetaData newLicensesMetaData = new LicensesMetaData(
licensesWrapper.signatures, newTrialLicenses);
mdBuilder.putCustom(LicensesMetaData.TYPE, newLicensesMetaData);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
return currentState;
}
@Override
public void onFailure(String source, @Nullable Throwable t) {
logger.info("LicensesService: " + source, t);
}
private boolean checkTrialLicenseGenerationCondition(String feature, LicensesWrapper licensesWrapper) {
for (ESLicense license : Sets.union(licensesWrapper.signedLicenses(licenseManager),
licensesWrapper.trialLicenses())) {
if (license.feature().equals(feature)) {
return false;
}
}
return true;
}
private String generateEncodedTrialLicense(String feature, TimeValue duration, int maxNodes) {
return TrialLicenseUtils.toEncodedTrialLicense(
TrialLicenseUtils.builder()
.issuedTo(clusterService.state().getClusterName().value())
.issueDate(System.currentTimeMillis())
.duration(duration)
.feature(feature)
.maxNodes(maxNodes)
.build()
);
}
});
}
@Override
protected void doStart() throws ElasticsearchException {
//Change to debug
logger.info("Started LicensesService");
clusterService.add(this);
}
@Override
protected void doStop() throws ElasticsearchException {
clusterService.remove(this);
// cancel all notifications
for (ScheduledFuture scheduledNotification : scheduledNotifications) {
scheduledNotification.cancel(true);
}
// notify features to be disabled
for (ListenerHolder holder : registeredListeners) {
holder.disableFeatureIfNeeded();
}
// clear all handlers
registeredListeners.clear();
// empty out notification queue
scheduledNotifications.clear();
lastObservedLicensesState.set(null);
}
@Override
protected void doClose() throws ElasticsearchException {
logger.info("Closing LicensesService");
transportService.removeHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME);
}
/**
* When there is no global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}:
* - tries to register any {@link #pendingListeners} by calling {@link #registeredListeners}
* - if any {@link #pendingListeners} are registered successfully or if previous cluster state had a block on
* {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK}, calls
* {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)}
* - else calls {@link #notifyFeaturesAndScheduleNotificationIfNeeded(LicensesMetaData)}
*/
@Override
public void clusterChanged(ClusterChangedEvent event) {
final ClusterState currentClusterState = event.state();
final ClusterState previousClusterState = event.previousState();
if (!currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
LicensesMetaData oldLicensesMetaData = previousClusterState.getMetaData().custom(LicensesMetaData.TYPE);
LicensesMetaData currentLicensesMetaData = currentClusterState.getMetaData().custom(LicensesMetaData.TYPE);
logLicenseMetaDataStats("old", oldLicensesMetaData);
logLicenseMetaDataStats("new", currentLicensesMetaData);
// Check pending feature registrations and try to complete registrations
boolean addedNewRegisteredListener = false;
if (!pendingListeners.isEmpty()) {
ListenerHolder pendingRegistrationLister;
while ((pendingRegistrationLister = pendingListeners.poll()) != null) {
boolean masterAvailable = registerListener(pendingRegistrationLister);
logger.info("trying to register pending listener for " + pendingRegistrationLister.feature + " masterAvailable: " + masterAvailable);
if (!masterAvailable) {
// if the master is not available do not, break out of trying pendingListeners
pendingListeners.add(pendingRegistrationLister);
break;
} else {
logger.info("successfully registered listener for: " + pendingRegistrationLister.feature);
registeredListeners.add(pendingRegistrationLister);
// make sure to notify new registered feature
// notifications could have been scheduled for it before it was registered
addedNewRegisteredListener = true;
}
}
}
// notify all interested plugins
// Change to debug
if (previousClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) || addedNewRegisteredListener) {
logger.info("calling notifyFeaturesAndScheduleNotification from clusterChanged");
notifyFeaturesAndScheduleNotification(currentLicensesMetaData);
} else {
logger.info("calling notifyFeaturesAndScheduleNotificationIfNeeded from clusterChanged");
notifyFeaturesAndScheduleNotificationIfNeeded(currentLicensesMetaData);
}
} else {
logger.info("clusterChanged: no action [has STATE_NOT_RECOVERED_BLOCK]");
}
}
/**
* Calls {@link #notifyFeaturesAndScheduleNotification(LicensesMetaData)} with <code>currentLicensesMetaData</code>
* if it was not already notified on
*/
private void notifyFeaturesAndScheduleNotificationIfNeeded(LicensesMetaData currentLicensesMetaData) {
final LicensesMetaData lastNotifiedLicensesMetaData = lastObservedLicensesState.get();
if (lastNotifiedLicensesMetaData != null && lastNotifiedLicensesMetaData.equals(currentLicensesMetaData)) {
logger.info("currentLicensesMetaData has been already notified on");
return;
}
notifyFeaturesAndScheduleNotification(currentLicensesMetaData);
}
/**
* Calls {@link #notifyFeatures(LicensesMetaData)} with <code>currentLicensesMetaData</code>
* if needed, also schedules the earliest expiry notification for registered feature(s)
*/
private void notifyFeaturesAndScheduleNotification(LicensesMetaData currentLicensesMetaData) {
long nextScheduleFrequency = notifyFeatures(currentLicensesMetaData);
if (nextScheduleFrequency != -1l) {
scheduleNextNotification(nextScheduleFrequency);
}
}
/**
* Checks license expiry for all the registered feature(s), upon completion
* sets <code>currentLicensesMetaData</code> to {@link #lastObservedLicensesState}
* to ensure the same license(s) are not notified on from
* {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)}
*
* @return -1 if there are no expiring license(s) for any registered feature(s), else
* returns the minimum of the expiry times of all the registered feature(s) to
* schedule an expiry notification
*/
private long notifyFeatures(LicensesMetaData currentLicensesMetaData) {
long nextScheduleFrequency = -1l;
long offset = TimeValue.timeValueMillis(100).getMillis();
StringBuilder sb = new StringBuilder("Registered listeners: [ ");
for (ListenerHolder listenerHolder : registeredListeners) {
sb.append("( ");
sb.append("feature:");
sb.append(listenerHolder.feature);
sb.append(", ");
long expiryDate;
if ((expiryDate = expiryDateForFeature(listenerHolder.feature, currentLicensesMetaData)) != -1l) {
sb.append(" license expiry: ");
sb.append(expiryDate);
sb.append(", ");
}
long expiryDuration = expiryDate - System.currentTimeMillis();
if (expiryDate == -1l) {
sb.append("no trial/signed license found");
sb.append(", ");
} else {
sb.append("license expires in: ");
sb.append(TimeValue.timeValueMillis(expiryDuration).toString());
sb.append(", ");
}
if (expiryDuration > 0l) {
sb.append("calling enableFeatureIfNeeded");
listenerHolder.enableFeatureIfNeeded();
if (nextScheduleFrequency == -1l) {
nextScheduleFrequency = expiryDuration + offset;
} else {
nextScheduleFrequency = Math.min(expiryDuration + offset, nextScheduleFrequency);
}
} else {
sb.append("calling disableFeatureIfNeeded");
listenerHolder.disableFeatureIfNeeded();
}
sb.append(" )");
}
sb.append("]");
// Change to debug
logger.info(sb.toString());
logLicenseMetaDataStats("Setting last observed metaData", currentLicensesMetaData);
lastObservedLicensesState.set(currentLicensesMetaData);
if (nextScheduleFrequency == -1l) {
logger.info("no need to schedule next notification");
} else {
logger.info("next notification time: " + TimeValue.timeValueMillis(nextScheduleFrequency).toString());
}
return nextScheduleFrequency;
}
private void logLicenseMetaDataStats(String prefix, LicensesMetaData licensesMetaData) {
if (licensesMetaData != null) {
logger.info(prefix + " LicensesMetaData: signedLicenses: " + licensesMetaData.getSignatures().size() + " trialLicenses: " + licensesMetaData.getEncodedTrialLicenses().size());
} else {
logger.info(prefix + " LicensesMetaData: signedLicenses: 0 trialLicenses: 0");
}
}
/**
* {@inheritDoc}
*/
@Override
public void register(String feature, TrialLicenseOptions trialLicenseOptions, Listener listener) {
final ListenerHolder listenerHolder = new ListenerHolder(feature, trialLicenseOptions, listener);
if (registerListener(listenerHolder)) {
logger.info("successfully registered listener for: " + listenerHolder.feature);
registeredListeners.add(listenerHolder);
} else {
logger.info("add listener for: " + listenerHolder.feature + " to pending registration queue");
pendingListeners.add(listenerHolder);
}
}
/**
* Notifies new feature listener if it already has a signed license
* if new feature has a non-null trial license option, a master node request is made to generate the trial license
* if no trial license option is specified for the feature and no signed license is found,
* then notifies features to be disabled
*
* @param listenerHolder of the feature to register
* @return true if registration has been completed, false otherwise (if masterNode is not available & trail license spec is provided
* or if there is a global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK})
*/
private boolean registerListener(final ListenerHolder listenerHolder) {
logger.info("Registering listener for " + listenerHolder.feature);
ClusterState currentState = clusterService.state();
if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
logger.info("Store as pendingRegistration [cluster has NOT_RECOVERED_BLOCK]");
return false;
}
LicensesMetaData currentMetaData = currentState.metaData().custom(LicensesMetaData.TYPE);
if (expiryDateForFeature(listenerHolder.feature, currentMetaData) == -1l) {
// does not have any license so generate a trial license
TrialLicenseOptions options = listenerHolder.trialLicenseOptions;
if (options != null) {
// Trial license option is provided
RegisterTrialLicenseRequest request = new RegisterTrialLicenseRequest(listenerHolder.feature,
options.duration, options.maxNodes);
if (currentState.nodes().localNodeMaster()) {
logger.info("Executing trial license request");
registerTrialLicense(request);
} else {
DiscoveryNode masterNode = currentState.nodes().masterNode();
if (masterNode != null) {
logger.info("Sending trial license request to master");
transportService.sendRequest(masterNode,
REGISTER_TRIAL_LICENSE_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME);
} else {
// could not sent register trial license request to master
logger.info("Store as pendingRegistration [master not available yet]");
return false;
}
}
} else {
// notify feature as clusterChangedEvent may not happen
// as no trial or signed license has been found for feature
// Change to debug
logger.info("Calling notifyFeaturesAndScheduleNotification [no trial license spec provided]");
notifyFeaturesAndScheduleNotification(currentMetaData);
}
} else {
// signed license already found for the new registered
// feature, notify feature on registration
logger.info("Calling notifyFeaturesAndScheduleNotification [signed/trial license available]");
notifyFeaturesAndScheduleNotification(currentMetaData);
}
return true;
}
private long expiryDateForFeature(String feature, LicensesMetaData currentLicensesMetaData) {
final Map<String, ESLicense> effectiveLicenses = getEffectiveLicenses(currentLicensesMetaData);
ESLicense featureLicense;
if ((featureLicense = effectiveLicenses.get(feature)) != null) {
logger.info("effective license for " + feature + " relative expiry: " +
TimeValue.timeValueMillis(effectiveLicenses.get(feature).expiryDate() - System.currentTimeMillis()));
return featureLicense.expiryDate();
}
logger.info("no effective license for " + feature);
return -1l;
}
private Map<String, ESLicense> getEffectiveLicenses(LicensesMetaData metaData) {
Map<String, ESLicense> map = new HashMap<>();
if (metaData != null) {
Set<ESLicense> esLicenses = new HashSet<>();
esLicenses.addAll(licenseManager.fromSignatures(metaData.getSignatures()));
esLicenses.addAll(TrialLicenseUtils.fromEncodedTrialLicenses(metaData.getEncodedTrialLicenses()));
return reduceAndMap(esLicenses);
}
return ImmutableMap.copyOf(map);
}
/**
* Clears out any completed notification future from
* {@link #scheduledNotifications}
*/
private void clearFinishedNotifications() {
while (!scheduledNotifications.isEmpty()) {
ScheduledFuture notification = scheduledNotifications.peek();
if (notification.isDone()) {
// remove the notifications that are done
scheduledNotifications.poll();
} else {
// stop emptying out the queue as soon as the first undone future hits
break;
}
}
}
private String executorName() {
return ThreadPool.Names.GENERIC;
}
/**
* Schedules an expiry notification with a delay of <code>nextScheduleDelay</code>
*/
private void scheduleNextNotification(long nextScheduleDelay) {
clearFinishedNotifications();
try {
final TimeValue delay = TimeValue.timeValueMillis(nextScheduleDelay);
scheduledNotifications.add(threadPool.schedule(delay, executorName(), new LicensingClientNotificationJob()));
logger.info("Scheduling next notification after: " + delay);
} catch (EsRejectedExecutionException ex) {
logger.info("Couldn't re-schedule licensing client notification job", ex);
}
}
/**
* Job for notifying on expired license(s) to registered feature(s)
* In case of a global block on {@link org.elasticsearch.gateway.GatewayService#STATE_NOT_RECOVERED_BLOCK},
* the notification is not run, instead the feature(s) would be notified on the next
* {@link #clusterChanged(org.elasticsearch.cluster.ClusterChangedEvent)} with no global block
*/
public class LicensingClientNotificationJob implements Runnable {
public LicensingClientNotificationJob() {}
@Override
public void run() {
logger.info("Performing LicensingClientNotificationJob");
// next clusterChanged event will deal with the missed notifications
ClusterState currentClusterState = clusterService.state();
if (!currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
LicensesMetaData currentLicensesMetaData = currentClusterState.metaData().custom(LicensesMetaData.TYPE);
long nextScheduleDelay = notifyFeatures(currentLicensesMetaData);
if (nextScheduleDelay != -1l) {
try {
scheduleNextNotification(nextScheduleDelay);
} catch (EsRejectedExecutionException ex) {
logger.info("Reschedule licensing client notification job was rejected", ex);
}
}
} else {
logger.info("skip notification [STATE_NOT_RECOVERED_BLOCK]");
}
}
}
public static class PutLicenseRequestHolder {
private final PutLicenseRequest request;
private final String source;
public PutLicenseRequestHolder(PutLicenseRequest request, String source) {
this.request = request;
this.source = source;
}
}
public static class DeleteLicenseRequestHolder {
private final DeleteLicenseRequest request;
private final String source;
public DeleteLicenseRequestHolder(DeleteLicenseRequest request, String source) {
this.request = request;
this.source = source;
}
}
public static class TrialLicenseOptions {
final TimeValue duration;
final int maxNodes;
public TrialLicenseOptions(TimeValue duration, int maxNodes) {
this.duration = duration;
this.maxNodes = maxNodes;
}
}
/**
* Stores configuration and listener for a feature
*/
private class ListenerHolder {
final String feature;
final TrialLicenseOptions trialLicenseOptions;
final Listener listener;
final AtomicBoolean enabled = new AtomicBoolean(false); // by default, a consumer plugin should be disabled
private ListenerHolder(String feature, TrialLicenseOptions trialLicenseOptions, Listener listener) {
this.feature = feature;
this.trialLicenseOptions = trialLicenseOptions;
this.listener = listener;
}
private void enableFeatureIfNeeded() {
if (enabled.compareAndSet(false, true)) {
listener.onEnabled();
}
}
private void disableFeatureIfNeeded() {
if (enabled.compareAndSet(true, false)) {
listener.onDisabled();
}
}
}
/**
* Thin wrapper to work with {@link org.elasticsearch.license.plugin.core.LicensesMetaData}
* Never mutates the wrapped metaData
*/
private static class LicensesWrapper {
public static LicensesWrapper wrap(LicensesMetaData licensesMetaData) {
return new LicensesWrapper(licensesMetaData);
}
private ImmutableSet<String> signatures = ImmutableSet.of();
private ImmutableSet<String> encodedTrialLicenses = ImmutableSet.of();
private LicensesWrapper(LicensesMetaData licensesMetaData) {
if (licensesMetaData != null) {
this.signatures = ImmutableSet.copyOf(licensesMetaData.getSignatures());
this.encodedTrialLicenses = ImmutableSet.copyOf(licensesMetaData.getEncodedTrialLicenses());
}
}
public Set<ESLicense> signedLicenses(ESLicenseManager licenseManager) {
return licenseManager.fromSignatures(signatures);
}
public Set<ESLicense> trialLicenses() {
return TrialLicenseUtils.fromEncodedTrialLicenses(encodedTrialLicenses);
}
}
/**
* Request for trial license generation to master
*/
private static class RegisterTrialLicenseRequest extends TransportRequest {
private int maxNodes;
private String feature;
private TimeValue duration;
private RegisterTrialLicenseRequest() {}
private RegisterTrialLicenseRequest(String feature, TimeValue duration, int maxNodes) {
this.maxNodes = maxNodes;
this.feature = feature;
this.duration = duration;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
maxNodes = in.readVInt();
feature = in.readString();
duration = new TimeValue(in.readVLong(), TimeUnit.MILLISECONDS);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(maxNodes);
out.writeString(feature);
out.writeVLong(duration.getMillis());
}
}
/**
* Request handler for trial license generation to master
*/
private class RegisterTrialLicenseRequestHandler extends BaseTransportRequestHandler<RegisterTrialLicenseRequest> {
@Override
public RegisterTrialLicenseRequest newInstance() {
return new RegisterTrialLicenseRequest();
}
@Override
public void messageReceived(RegisterTrialLicenseRequest request, TransportChannel channel) throws Exception {
registerTrialLicense(request);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
|
LicensesService: Added Notification Documentation
Original commit: elastic/x-pack-elasticsearch@1337fbc9c73d484b856f75eaa9b89fad35a8457c
|
src/main/java/org/elasticsearch/license/plugin/core/LicensesService.java
|
LicensesService: Added Notification Documentation
|
|
Java
|
apache-2.0
|
69ad23cbac7b99a405ba04608e0054c0156d288f
| 0
|
Squarespace/template-compiler,Squarespace/template-compiler,Squarespace/template-compiler
|
/**
* Copyright (c) 2014 SQUARESPACE, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squarespace.template;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.PrettyPrinter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeType;
import com.fasterxml.jackson.databind.node.NumericNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.squarespace.cldrengine.api.Decimal;
/**
* Utility methods used by various parts of the framework.
*/
public class GeneralUtils {
private static final JsonFactory JSON_FACTORY = new JsonFactory();
private static final PrettyPrinter PRETTY_PRINTER = new JsonPrettyPrinter();
private GeneralUtils() {
}
/**
* Return true if the string argument is null or empty, false otherwise.
*/
public static boolean isEmpty(String s) {
return s == null || s.isEmpty();
}
/**
* Quick string to integer conversion, clamping negative values to zero.
*/
public static long toPositiveLong(CharSequence seq, int pos, int length) {
long n = 0;
int i = pos;
while (i < length) {
char c = seq.charAt(i);
if (c >= '0' && c <= '9') {
n *= 10;
n += c - '0';
} else {
break;
}
i++;
}
return n;
}
/**
* Convert an opaque JSON node to Decimal using the most correct
* conversion method.
*/
public static Decimal nodeToDecimal(JsonNode node) {
JsonNodeType type = node.getNodeType();
if (type == JsonNodeType.NUMBER) {
return numericToDecimal((NumericNode)node);
} else {
try {
return new Decimal(node.asText());
} catch (ArithmeticException | NumberFormatException e) {
// Fall through..
}
}
return null;
}
/**
* Convert an opaque JSON node to BigDecimal using the most correct
* conversion method.
*/
public static BigDecimal nodeToBigDecimal(JsonNode node) {
JsonNodeType type = node.getNodeType();
if (type == JsonNodeType.NUMBER) {
return numericToBigDecimal((NumericNode)node);
} else {
try {
return new BigDecimal(node.asText());
} catch (ArithmeticException | NumberFormatException e) {
// Fall through..
}
}
return null;
}
/**
* Convert a numeric JSON node to Decimal using the most correct
* conversion method.
*/
private static Decimal numericToDecimal(NumericNode node) {
switch (node.numberType()) {
case INT:
case LONG:
return new Decimal(node.asLong());
case FLOAT:
case DOUBLE:
return new Decimal(node.asDouble());
case BIG_DECIMAL:
case BIG_INTEGER:
default:
return new Decimal(node.decimalValue().toString());
}
}
/**
* Convert a numeric JSON node to BigDecimal using the most correct
* conversion method.
*/
private static BigDecimal numericToBigDecimal(NumericNode node) {
switch (node.numberType()) {
case INT:
case LONG:
return BigDecimal.valueOf(node.asLong());
case FLOAT:
case DOUBLE:
return BigDecimal.valueOf(node.asDouble());
case BIG_DECIMAL:
case BIG_INTEGER:
default:
return node.decimalValue();
}
}
/**
* Executes a compiled instruction using the given context and JSON node.
* Optionally hides all context above the JSON node, treating it as a root.
* This is a helper method for formatters which need to execute templates to
* produce their output.
*/
public static JsonNode executeTemplate(Context ctx, Instruction inst, JsonNode node, boolean privateContext)
throws CodeExecuteException {
return executeTemplate(ctx, inst, node, privateContext, null);
}
/**
* Executes a compiled instruction using the given context and JSON node.
* Optionally hides all context above the JSON node, treating it as a root.
* This is a helper method for formatters which need to execute templates to
* produce their output.
* Optionally allows passing in an 'argvar' node which will be defined inside the
* template as '@args'.
*/
public static JsonNode executeTemplate(Context ctx, Instruction inst, JsonNode node, boolean privateContext,
ObjectNode argvar)
throws CodeExecuteException {
// Temporarily swap the buffers to capture all output of the partial.
StringBuilder buf = new StringBuilder();
StringBuilder origBuf = ctx.swapBuffer(buf);
try {
// If we want to hide the parent context during execution, create a new
// temporary sub-context.
ctx.push(node);
ctx.frame().stopResolution(privateContext);
if (argvar != null) {
ctx.setVar("@args", argvar);
}
ctx.execute(inst);
} finally {
ctx.swapBuffer(origBuf);
ctx.pop();
}
return new TextNode(buf.toString());
}
/**
* Loads a resource from the Java package relative to {@code cls}, raising a
* CodeException if it fails.
*/
public static String loadResource(Class<?> cls, String path) throws CodeException {
try (InputStream stream = cls.getResourceAsStream(path)) {
if (stream == null) {
throw new CodeExecuteException(resourceLoadError(path, "not found"));
}
return streamToString(stream);
} catch (IOException e) {
throw new CodeExecuteException(resourceLoadError(path, e.toString()));
}
}
public static String streamToString(InputStream stream) throws IOException {
try (InputStreamReader reader = new InputStreamReader(stream, "UTF-8")) {
StringBuilder buf = new StringBuilder();
char[] buffer = new char[4096];
int n = 0;
while (-1 != (n = reader.read(buffer))) {
buf.append(buffer, 0, n);
}
return buf.toString();
}
}
public static boolean resourceExists(Class<?> cls, String fileName) {
try {
String name = resolveName(cls, fileName);
Enumeration<URL> urls = cls.getClassLoader().getResources(name);
return urls.hasMoreElements();
} catch (IOException e) {
throw new RuntimeException("Failed to list resources", e);
}
}
public static List<Path> list(Class<?> cls, Predicate<Path> predicate) {
try {
Enumeration<URL> urls = cls.getClassLoader().getResources(resolveName(cls, "."));
List<Path> result = new ArrayList<>();
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (!url.getProtocol().equals("file")) {
continue;
}
File dir = Paths.get(url.toURI()).toFile();
if (dir == null || !dir.isDirectory()) {
continue;
}
for (File file : dir.listFiles()) {
Path path = file.toPath();
if (predicate.test(path)) {
result.add(path.getFileName());
}
}
}
if (result.isEmpty()) {
throw new RuntimeException("No files matched predicate");
}
Collections.sort(result);
return result;
} catch (IOException | URISyntaxException e) {
throw new RuntimeException("Failed to list resources", e);
}
}
private static ErrorInfo resourceLoadError(String path, String message) {
ErrorInfo info = new ErrorInfo(ExecuteErrorType.RESOURCE_LOAD);
info.name(path);
info.data(message);
return info;
}
private static String resolveName(Class<?> cls, String name) {
if (name == null) {
return name;
}
if (!name.startsWith("/")) {
while (cls.isArray()) {
cls = cls.getComponentType();
}
String baseName = cls.getName();
int index = baseName.lastIndexOf('.');
if (index != -1) {
name = baseName.substring(0, index).replace('.', '/') + "/" + name;
}
} else {
name = name.substring(1);
}
return name;
}
/**
* Map.getOrDefault only available in JDK 8., <s>For now we support JDK 7.</s>
* We now support JDK 8 but leaving this in place anyway.
*/
public static <K, V> V getOrDefault(Map<K, V> map, K key, V defaultValue) {
V value = map.get(key);
return value == null ? defaultValue : value;
}
/**
* Returns the translated string given the key in that localizedStrings node.
* If key is not found, it returns the defaultValue.
*/
public static String localizeOrDefault(JsonNode localizedStrings, String key, String defaultValue) {
JsonNode node = localizedStrings.get(key);
return node == null ? defaultValue : node.asText();
}
/**
* Returns true if the first non-whitespace character is one of the
* valid starting characters for a JSON value; else false.
*/
public static boolean isJsonStart(String raw) {
int size = raw.length();
int index = 0;
while (index < size) {
char ch = raw.charAt(index);
if (ch != ' ') {
switch (ch) {
case '"':
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '[':
case '{':
return true;
// Even when failSilently=true is passed, Jackson's decode method
// throws an exception and immediately swallows it.
// These string comparisons add more precision when trying to detect
// a JSON value without attempting to parse it. They are place-holders
// to cut down on Jackson exceptions until a long-term fix is made.
case 'f':
return raw.startsWith("false");
case 'n':
return raw.startsWith("null");
case 't':
return raw.startsWith("true");
default:
return false;
}
}
index++;
}
return false;
}
/**
* Checks the {@code parent} node to see if it contains one of the keys, and
* returns the first that matches. If none match it returns {@link Constants#MISSING_NODE}
*/
public static JsonNode getFirstMatchingNode(JsonNode parent, String... keys) {
for (String key : keys) {
JsonNode node = parent.path(key);
if (!node.isMissingNode()) {
return node;
}
}
return Constants.MISSING_NODE;
}
/**
* Formats the {@code node} as a string using the pretty printer.
*/
public static String jsonPretty(JsonNode node) throws IOException {
StringBuilder buf = new StringBuilder();
JsonGenerator gen = JSON_FACTORY.createGenerator(new StringBuilderWriter(buf));
gen.setPrettyPrinter(PRETTY_PRINTER);
gen.setCodec(JsonUtils.getMapper());
gen.writeTree(node);
return buf.toString();
}
/**
* Splits a variable name into its parts.
*/
public static Object[] splitVariable(String name) {
String[] parts = name.equals("@") ? null : StringUtils.split(name, '.');
if (parts == null) {
return null;
}
// Each segment of the key path can be either a String or an Integer.
Object[] keys = new Object[parts.length];
for (int i = 0, len = parts.length; i < len; i++) {
keys[i] = allDigits(parts[i]) ? Integer.parseInt(parts[i], 10) : parts[i];
}
return keys;
}
/**
* URL-encodes the string.
*/
public static String urlEncode(String val) {
try {
return URLEncoder.encode(val, "UTF-8").replace("+", "%20");
} catch (UnsupportedEncodingException e) {
return val;
}
}
/**
* Determines the boolean value of a node based on its type.
*/
public static boolean isTruthy(JsonNode node) {
if (node.isTextual()) {
return !node.asText().equals("");
}
if (node.isNumber() || node.isBoolean()) {
return node.asDouble() != 0;
}
if (node.isMissingNode() || node.isNull()) {
return false;
}
return node.size() != 0;
}
public static String ifString(JsonNode node, String defaultString) {
return isTruthy(node) ? node.asText() : defaultString;
}
public static double ifDouble(JsonNode node, double defaultValue) {
return isTruthy(node) ? node.asDouble() : defaultValue;
}
/**
* Obtains the text representation of a node, converting {@code null} to
* empty string.
*/
public static String eatNull(JsonNode node) {
return node.isNull() ? "" : node.asText();
}
/**
* Indicates if the string consists of all digits.
*/
private static boolean allDigits(String str) {
for (int i = 0, len = str.length(); i < len; i++) {
if (!Character.isDigit(str.charAt(i))) {
return false;
}
}
return true;
}
}
|
core/src/main/java/com/squarespace/template/GeneralUtils.java
|
/**
* Copyright (c) 2014 SQUARESPACE, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squarespace.template;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.PrettyPrinter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeType;
import com.fasterxml.jackson.databind.node.NumericNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.squarespace.cldrengine.api.Decimal;
/**
* Utility methods used by various parts of the framework.
*/
public class GeneralUtils {
private static final JsonFactory JSON_FACTORY = new JsonFactory();
private static final PrettyPrinter PRETTY_PRINTER = new JsonPrettyPrinter();
private GeneralUtils() {
}
/**
* Return true if the string argument is null or empty, false otherwise.
*/
public static boolean isEmpty(String s) {
return s == null || s.isEmpty();
}
/**
* Quick string to integer conversion, clamping negative values to zero.
*/
public static long toPositiveLong(CharSequence seq, int pos, int length) {
long n = 0;
int i = pos;
while (i < length) {
char c = seq.charAt(i);
if (c >= '0' && c <= '9') {
n *= 10;
n += c - '0';
} else {
break;
}
i++;
}
return n;
}
/**
* Convert an opaque JSON node to Decimal using the most correct
* conversion method.
*/
public static Decimal nodeToDecimal(JsonNode node) {
JsonNodeType type = node.getNodeType();
if (type == JsonNodeType.NUMBER) {
return numericToDecimal((NumericNode)node);
} else {
try {
return new Decimal(node.asText());
} catch (ArithmeticException | NumberFormatException e) {
// Fall through..
}
}
return null;
}
/**
* Convert an opaque JSON node to BigDecimal using the most correct
* conversion method.
*/
public static BigDecimal nodeToBigDecimal(JsonNode node) {
JsonNodeType type = node.getNodeType();
if (type == JsonNodeType.NUMBER) {
return numericToBigDecimal((NumericNode)node);
} else {
try {
return new BigDecimal(node.asText());
} catch (ArithmeticException | NumberFormatException e) {
// Fall through..
}
}
return null;
}
/**
* Convert a numeric JSON node to Decimal using the most correct
* conversion method.
*/
private static Decimal numericToDecimal(NumericNode node) {
switch (node.numberType()) {
case INT:
case LONG:
return new Decimal(node.asLong());
case FLOAT:
case DOUBLE:
return new Decimal(node.asDouble());
case BIG_DECIMAL:
case BIG_INTEGER:
default:
return new Decimal(node.decimalValue().toString());
}
}
/**
* Convert a numeric JSON node to BigDecimal using the most correct
* conversion method.
*/
private static BigDecimal numericToBigDecimal(NumericNode node) {
switch (node.numberType()) {
case INT:
case LONG:
return BigDecimal.valueOf(node.asLong());
case FLOAT:
case DOUBLE:
return BigDecimal.valueOf(node.asDouble());
case BIG_DECIMAL:
case BIG_INTEGER:
default:
return node.decimalValue();
}
}
/**
* Executes a compiled instruction using the given context and JSON node.
* Optionally hides all context above the JSON node, treating it as a root.
* This is a helper method for formatters which need to execute templates to
* produce their output.
*/
public static JsonNode executeTemplate(Context ctx, Instruction inst, JsonNode node, boolean privateContext)
throws CodeExecuteException {
return executeTemplate(ctx, inst, node, privateContext, null);
}
/**
* Executes a compiled instruction using the given context and JSON node.
* Optionally hides all context above the JSON node, treating it as a root.
* This is a helper method for formatters which need to execute templates to
* produce their output.
* Optionally allows passing in an 'argvar' node which will be defined inside the
* template as '@args'.
*/
public static JsonNode executeTemplate(Context ctx, Instruction inst, JsonNode node, boolean privateContext,
ObjectNode argvar)
throws CodeExecuteException {
// Temporarily swap the buffers to capture all output of the partial.
StringBuilder buf = new StringBuilder();
StringBuilder origBuf = ctx.swapBuffer(buf);
try {
// If we want to hide the parent context during execution, create a new
// temporary sub-context.
ctx.push(node);
ctx.frame().stopResolution(privateContext);
if (argvar != null) {
ctx.setVar("@args", argvar);
}
ctx.execute(inst);
} finally {
ctx.swapBuffer(origBuf);
ctx.pop();
}
return new TextNode(buf.toString());
}
/**
* Loads a resource from the Java package relative to {@code cls}, raising a
* CodeException if it fails.
*/
public static String loadResource(Class<?> cls, String path) throws CodeException {
try (InputStream stream = cls.getResourceAsStream(path)) {
if (stream == null) {
throw new CodeExecuteException(resourceLoadError(path, "not found"));
}
return streamToString(stream);
} catch (IOException e) {
throw new CodeExecuteException(resourceLoadError(path, e.toString()));
}
}
public static String streamToString(InputStream stream) throws IOException {
try (InputStreamReader reader = new InputStreamReader(stream, "UTF-8")) {
StringBuilder buf = new StringBuilder();
char[] buffer = new char[4096];
int n = 0;
while (-1 != (n = reader.read(buffer))) {
buf.append(buffer, 0, n);
}
return buf.toString();
}
}
public static boolean resourceExists(Class<?> cls, String fileName) {
try {
String name = resolveName(cls, fileName);
Enumeration<URL> urls = cls.getClassLoader().getResources(name);
return urls.hasMoreElements();
} catch (IOException e) {
throw new RuntimeException("Failed to list resources", e);
}
}
public static List<Path> list(Class<?> cls, Predicate<Path> predicate) {
try {
Enumeration<URL> urls = cls.getClassLoader().getResources(resolveName(cls, "."));
List<Path> result = new ArrayList<>();
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
if (!url.getProtocol().equals("file")) {
continue;
}
File dir = Paths.get(url.toURI()).toFile();
if (dir == null || !dir.isDirectory()) {
continue;
}
for (File file : dir.listFiles()) {
Path path = file.toPath();
if (predicate.test(path)) {
result.add(path.getFileName());
}
}
}
if (result.isEmpty()) {
throw new RuntimeException("No files matched predicate");
}
Collections.sort(result);
return result;
} catch (IOException | URISyntaxException e) {
throw new RuntimeException("Failed to list resources", e);
}
}
private static ErrorInfo resourceLoadError(String path, String message) {
ErrorInfo info = new ErrorInfo(ExecuteErrorType.RESOURCE_LOAD);
info.name(path);
info.data(message);
return info;
}
private static String resolveName(Class<?> cls, String name) {
if (name == null) {
return name;
}
if (!name.startsWith("/")) {
while (cls.isArray()) {
cls = cls.getComponentType();
}
String baseName = cls.getName();
int index = baseName.lastIndexOf('.');
if (index != -1) {
name = baseName.substring(0, index).replace('.', '/') + "/" + name;
}
} else {
name = name.substring(1);
}
return name;
}
/**
* Map.getOrDefault only available in JDK 8., <s>For now we support JDK 7.</s>
* We now support JDK 8 but leaving this in place anyway.
*/
public static <K, V> V getOrDefault(Map<K, V> map, K key, V defaultValue) {
V value = map.get(key);
return value == null ? defaultValue : value;
}
/**
* Returns the translated string given the key in that localizedStrings node.
* If key is not found, it returns the defaultValue.
*/
public static String localizeOrDefault(JsonNode localizedStrings, String key, String defaultValue) {
JsonNode node = localizedStrings.get(key);
return node == null ? defaultValue : node.asText();
}
/**
* Returns true if the first non-whitespace character is one of the
* valid starting characters for a JSON value; else false.
*/
public static boolean isJsonStart(String raw) {
int size = raw.length();
int index = 0;
while (index < size) {
char ch = raw.charAt(index);
if (ch != ' ') {
switch (ch) {
case '"':
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '[':
case '{':
return true;
// Even when failSilently=true is passed, Jackson's decode method
// throws an exception and immediately swallows it.
// These string comparisons add more precision when trying to detect
// a JSON value without attempting to parse it. They are place-holders
// to cut down on Jackson exceptions until a long-term fix is made.
case 'f':
return raw.startsWith("false");
case 'n':
return raw.startsWith("null");
case 't':
return raw.startsWith("true");
default:
return false;
}
}
index++;
}
return false;
}
/**
* Checks the {@code parent} node to see if it contains one of the keys, and
* returns the first that matches. If none match it returns {@link Constants#MISSING_NODE}
*/
public static JsonNode getFirstMatchingNode(JsonNode parent, String... keys) {
for (String key : keys) {
JsonNode node = parent.path(key);
if (!node.isMissingNode()) {
return node;
}
}
return Constants.MISSING_NODE;
}
/**
* Formats the {@code node} as a string using the pretty printer.
*/
public static String jsonPretty(JsonNode node) throws IOException {
StringBuilder buf = new StringBuilder();
JsonGenerator gen = JSON_FACTORY.createGenerator(new StringBuilderWriter(buf));
gen.setPrettyPrinter(PRETTY_PRINTER);
gen.setCodec(JsonUtils.getMapper());
gen.writeTree(node);
return buf.toString();
}
/**
* Splits a variable name into its parts.
*/
public static Object[] splitVariable(String name) {
String[] parts = name.equals("@") ? null : StringUtils.split(name, '.');
if (parts == null) {
return null;
}
// Each segment of the key path can be either a String or an Integer.
Object[] keys = new Object[parts.length];
for (int i = 0, len = parts.length; i < len; i++) {
keys[i] = allDigits(parts[i]) ? Integer.parseInt(parts[i], 10) : parts[i];
}
return keys;
}
/**
* URL-encodes the string.
*/
public static String urlEncode(String val) {
try {
return URLEncoder.encode(val, "UTF-8").replace("+", "%20");
} catch (UnsupportedEncodingException e) {
return val;
}
}
/**
* Determines the boolean value of a node based on its type.
*/
public static boolean isTruthy(JsonNode node) {
if (node.isTextual()) {
return !node.asText().equals("");
}
if (node.isNumber() || node.isBoolean()) {
return node.asLong() != 0;
}
if (node.isMissingNode() || node.isNull()) {
return false;
}
return node.size() != 0;
}
public static String ifString(JsonNode node, String defaultString) {
return isTruthy(node) ? node.asText() : defaultString;
}
public static double ifDouble(JsonNode node, double defaultValue) {
return isTruthy(node) ? node.asDouble() : defaultValue;
}
/**
* Obtains the text representation of a node, converting {@code null} to
* empty string.
*/
public static String eatNull(JsonNode node) {
return node.isNull() ? "" : node.asText();
}
/**
* Indicates if the string consists of all digits.
*/
private static boolean allDigits(String str) {
for (int i = 0, len = str.length(); i < len; i++) {
if (!Character.isDigit(str.charAt(i))) {
return false;
}
}
return true;
}
}
|
Fix truthi-ness of floating point numbers (#21)
|
core/src/main/java/com/squarespace/template/GeneralUtils.java
|
Fix truthi-ness of floating point numbers (#21)
|
|
Java
|
apache-2.0
|
60ab09e83ea1d1a066f2a39935078abbbaf47a90
| 0
|
atomix/atomix,arnonmoscona/copycat,atomix/atomix,tempbottle/copycat,quantiply-fork/copycat,aruanruan/copycat,kuujo/copycat,arnonmoscona/copycat,kuujo/copycat,aruanruan/copycat,madjam/copycat,madjam/copycat,tempbottle/copycat
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kuujo.copycat.test.unit;
import net.kuujo.copycat.Command;
import net.kuujo.copycat.impl.DefaultCommand;
import net.kuujo.copycat.log.CommandEntry;
import net.kuujo.copycat.log.ConfigurationEntry;
import net.kuujo.copycat.log.Entry;
import net.kuujo.copycat.log.Log;
import net.kuujo.copycat.log.MemoryLog;
import net.kuujo.copycat.log.NoOpEntry;
import org.junit.Test;
import org.vertx.java.core.AsyncResult;
import org.vertx.java.core.Handler;
import org.vertx.java.core.json.JsonObject;
import static org.junit.Assert.assertTrue;
/**
* In-memory log tests.
*
* @author Jordan Halterman
*/
public class MemoryLogTest {
@Test
public void testAppendEntry() {
Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
}
});
}
@Test
public void testContainsEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.containsEntry(1, new Handler<AsyncResult<Boolean>>() {
@Override
public void handle(AsyncResult<Boolean> result) {
assertTrue(result.succeeded());
assertTrue(result.result());
}
});
}
});
}
@Test
public void testLoadEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.entry(1, new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof NoOpEntry);
}
});
}
});
}
@Test
public void testFirstIndex() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 3);
assertTrue(log.firstIndex() == 1);
assertTrue(log.lastIndex() == 3);
}
});
}
});
}
});
}
@Test
public void testFreeEntry() {
final Log log = new MemoryLog();
final CommandEntry entry1 = new CommandEntry(1, new DefaultCommand("foo", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry1, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
final CommandEntry entry2 = new CommandEntry(1, new DefaultCommand("bar", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry2, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
final CommandEntry entry3 = new CommandEntry(1, new DefaultCommand("baz", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry3, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 3);
log.floor(3);
log.free(entry2);
assertTrue(log.firstIndex() == 2);
assertTrue(log.lastIndex() == 3);
}
});
}
});
}
});
}
@Test
public void testFirstEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 3);
log.firstEntry(new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof NoOpEntry);
}
});
}
});
}
});
}
});
}
@Test
public void testLastIndex() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 3);
assertTrue(log.lastIndex() == 3);
}
});
}
});
}
});
}
@Test
public void testLastEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 3);
log.lastEntry(new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof CommandEntry);
}
});
}
});
}
});
}
});
}
}
|
src/test/java/net/kuujo/copycat/test/unit/MemoryLogTest.java
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kuujo.copycat.test.unit;
import net.kuujo.copycat.Command;
import net.kuujo.copycat.impl.DefaultCommand;
import net.kuujo.copycat.log.CommandEntry;
import net.kuujo.copycat.log.ConfigurationEntry;
import net.kuujo.copycat.log.Entry;
import net.kuujo.copycat.log.Log;
import net.kuujo.copycat.log.MemoryLog;
import net.kuujo.copycat.log.NoOpEntry;
import org.junit.Test;
import org.vertx.java.core.AsyncResult;
import org.vertx.java.core.Handler;
import org.vertx.java.core.json.JsonObject;
import static org.junit.Assert.assertTrue;
/**
* In-memory log tests.
*
* @author Jordan Halterman
*/
public class MemoryLogTest {
@Test
public void testAppendEntry() {
Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
}
});
}
@Test
public void testContainsEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.containsEntry(0, new Handler<AsyncResult<Boolean>>() {
@Override
public void handle(AsyncResult<Boolean> result) {
assertTrue(result.succeeded());
assertTrue(result.result());
}
});
}
});
}
@Test
public void testLoadEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.entry(0, new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof NoOpEntry);
}
});
}
});
}
@Test
public void testFirstIndex() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.firstIndex(new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
}
});
}
});
}
});
}
});
}
@Test
public void testFreeEntry() {
final Log log = new MemoryLog();
final CommandEntry entry1 = new CommandEntry(1, new DefaultCommand("foo", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry1, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
final CommandEntry entry2 = new CommandEntry(1, new DefaultCommand("bar", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry2, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
final CommandEntry entry3 = new CommandEntry(1, new DefaultCommand("baz", Command.Type.WRITE, new JsonObject()));
log.appendEntry(entry3, new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.floor(2, new Handler<AsyncResult<Void>>() {
@Override
public void handle(AsyncResult<Void> result) {
assertTrue(result.succeeded());
log.free(entry2);
log.firstIndex(new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
}
});
log.lastIndex(new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
}
});
}
});
}
});
}
});
}
});
}
@Test
public void testFirstEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.firstEntry(new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof NoOpEntry);
}
});
}
});
}
});
}
});
}
@Test
public void testLastIndex() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.lastIndex(new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
}
});
}
});
}
});
}
});
}
@Test
public void testLastEntry() {
final Log log = new MemoryLog();
log.appendEntry(new NoOpEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 0);
log.appendEntry(new ConfigurationEntry(), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 1);
log.appendEntry(new CommandEntry(1, new DefaultCommand()), new Handler<AsyncResult<Long>>() {
@Override
public void handle(AsyncResult<Long> result) {
assertTrue(result.succeeded());
assertTrue(result.result() == 2);
log.lastEntry(new Handler<AsyncResult<Entry>>() {
@Override
public void handle(AsyncResult<Entry> result) {
assertTrue(result.succeeded());
assertTrue(result.result() instanceof CommandEntry);
}
});
}
});
}
});
}
});
}
}
|
Update log tests.
|
src/test/java/net/kuujo/copycat/test/unit/MemoryLogTest.java
|
Update log tests.
|
|
Java
|
apache-2.0
|
efc49c25c9dd680c0da8b6ec7d3cf89935d7c81d
| 0
|
HHzzhz/drools,ThiagoGarciaAlves/drools,ThomasLau/drools,sutaakar/drools,prabasn/drools,manstis/drools,ThomasLau/drools,ngs-mtech/drools,vinodkiran/drools,kevinpeterson/drools,292388900/drools,ngs-mtech/drools,kevinpeterson/drools,mrietveld/drools,iambic69/drools,yurloc/drools,vinodkiran/drools,Buble1981/MyDroolsFork,mrrodriguez/drools,prabasn/drools,ngs-mtech/drools,jomarko/drools,winklerm/drools,mswiderski/drools,kevinpeterson/drools,Buble1981/MyDroolsFork,mrietveld/drools,amckee23/drools,292388900/drools,romartin/drools,HHzzhz/drools,liupugong/drools,mrietveld/drools,mrrodriguez/drools,winklerm/drools,iambic69/drools,ThomasLau/drools,droolsjbpm/drools,ThiagoGarciaAlves/drools,liupugong/drools,OnePaaS/drools,jomarko/drools,manstis/drools,lanceleverich/drools,amckee23/drools,mrrodriguez/drools,ChallenHB/drools,reynoldsm88/drools,mrrodriguez/drools,sutaakar/drools,romartin/drools,jomarko/drools,jiripetrlik/drools,droolsjbpm/drools,jiripetrlik/drools,kedzie/drools-android,iambic69/drools,TonnyFeng/drools,yurloc/drools,292388900/drools,pperboires/PocDrools,liupugong/drools,kedzie/drools-android,iambic69/drools,prabasn/drools,droolsjbpm/drools,mswiderski/drools,ChallenHB/drools,292388900/drools,manstis/drools,HHzzhz/drools,lanceleverich/drools,amckee23/drools,yurloc/drools,jomarko/drools,TonnyFeng/drools,Buble1981/MyDroolsFork,lanceleverich/drools,mswiderski/drools,reynoldsm88/drools,ThiagoGarciaAlves/drools,rajashekharmunthakewill/drools,mrrodriguez/drools,pperboires/PocDrools,kevinpeterson/drools,mswiderski/drools,sotty/drools,rajashekharmunthakewill/drools,ngs-mtech/drools,reynoldsm88/drools,mrietveld/drools,winklerm/drools,liupugong/drools,lanceleverich/drools,yurloc/drools,prabasn/drools,TonnyFeng/drools,sutaakar/drools,jiripetrlik/drools,droolsjbpm/drools,amckee23/drools,OnePaaS/drools,vinodkiran/drools,jiripetrlik/drools,ThiagoGarciaAlves/drools,OnePaaS/drools,ChallenHB/drools,OnePaaS/drools,winklerm/drools,jiripetrlik/drools,ChallenHB/drools,kevinpeterson/drools,rajashekharmunthakewill/drools,sotty/drools,reynoldsm88/drools,mrietveld/drools,manstis/drools,psiroky/drools,sutaakar/drools,OnePaaS/drools,sutaakar/drools,ThomasLau/drools,ThiagoGarciaAlves/drools,pperboires/PocDrools,sotty/drools,psiroky/drools,HHzzhz/drools,kedzie/drools-android,droolsjbpm/drools,romartin/drools,romartin/drools,HHzzhz/drools,TonnyFeng/drools,pperboires/PocDrools,prabasn/drools,iambic69/drools,rajashekharmunthakewill/drools,kedzie/drools-android,amckee23/drools,Buble1981/MyDroolsFork,vinodkiran/drools,manstis/drools,romartin/drools,kedzie/drools-android,vinodkiran/drools,TonnyFeng/drools,lanceleverich/drools,sotty/drools,winklerm/drools,ngs-mtech/drools,psiroky/drools,292388900/drools,sotty/drools,jomarko/drools,rajashekharmunthakewill/drools,ChallenHB/drools,reynoldsm88/drools,psiroky/drools,pwachira/droolsexamples,ThomasLau/drools,liupugong/drools
|
/*
* Copyright 2008 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.reteoo;
import junit.framework.TestCase;
import org.drools.RuleBase;
import org.drools.RuleBaseFactory;
import org.drools.common.InternalWorkingMemory;
import org.drools.concurrent.ExternalExecutorService;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.concurrent.DeterministicScheduler;
/**
* Test case for PartitionTaskManager
*
* @author <a href="mailto:tirelli@post.com">Edson Tirelli</a>
*/
public class PartitionTaskManagerTest extends TestCase {
Mockery context = new Mockery();
private PartitionManager manager;
private PartitionTaskManager taskManager;
private InternalWorkingMemory workingMemory;
@Override
public void setUp() {
RuleBase rulebase = RuleBaseFactory.newRuleBase();
workingMemory = (InternalWorkingMemory) rulebase.newStatefulSession();
manager = new PartitionManager(workingMemory);
taskManager = new PartitionTaskManager( manager, workingMemory );
}
@Override
protected void tearDown() throws Exception {
}
public void testEnqueueBeforeSettingExecutor() throws InterruptedException {
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// set expectations for the scenario
context.checking( new Expectations() {{
oneOf( action ).execute( workingMemory );
}});
taskManager.enqueue( action );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
public void testFireCorrectly() throws InterruptedException {
// creates a mock action
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// set expectations for the scenario
context.checking( new Expectations() {{
oneOf( action ).execute( workingMemory );
}});
// fire scenario
taskManager.enqueue( action );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
public void testActionCallbacks() throws InterruptedException {
// creates a mock action
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
// set expectations for the scenario
context.checking( new Expectations() {{
allowing(action).compareTo( with( any(PartitionTaskManager.Action.class) ) );
exactly(5).of( action ).execute( workingMemory );
}});
// enqueue before pool
taskManager.enqueue( action );
taskManager.enqueue( action );
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// enqueue after setting the pool
taskManager.enqueue( action );
taskManager.enqueue( action );
taskManager.enqueue( action );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
}
|
drools-core/src/test/java/org/drools/reteoo/PartitionTaskManagerTest.java
|
/*
* Copyright 2008 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.reteoo;
import junit.framework.TestCase;
import org.drools.RuleBase;
import org.drools.RuleBaseFactory;
import org.drools.common.InternalWorkingMemory;
import org.drools.concurrent.ExternalExecutorService;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.concurrent.DeterministicScheduler;
import com.sun.corba.se.spi.orbutil.fsm.Action;
/**
* Test case for PartitionTaskManager
*
* @author <a href="mailto:tirelli@post.com">Edson Tirelli</a>
*/
public class PartitionTaskManagerTest extends TestCase {
Mockery context = new Mockery();
private PartitionManager manager;
private PartitionTaskManager taskManager;
private InternalWorkingMemory workingMemory;
@Override
public void setUp() {
RuleBase rulebase = RuleBaseFactory.newRuleBase();
workingMemory = (InternalWorkingMemory) rulebase.newStatefulSession();
manager = new PartitionManager(workingMemory);
taskManager = new PartitionTaskManager( manager, workingMemory );
}
@Override
protected void tearDown() throws Exception {
}
public void testEnqueueBeforeSettingExecutor() throws InterruptedException {
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// set expectations for the scenario
context.checking( new Expectations() {{
oneOf( action ).execute( workingMemory );
}});
taskManager.enqueue( action );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
public void testFireCorrectly() throws InterruptedException {
// creates a mock action
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// set expectations for the scenario
context.checking( new Expectations() {{
oneOf( action ).execute( workingMemory );
}});
// fire scenario
taskManager.enqueue( action );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
public void testActionCallbacks() throws InterruptedException {
// creates a mock action
final PartitionTaskManager.Action action = context.mock( PartitionTaskManager.Action.class );
// this is a jmock helper class that implements the ExecutorService interface
DeterministicScheduler pool = new DeterministicScheduler();
// set expectations for the scenario
context.checking( new Expectations() {{
allowing(action).compareTo( with( any(PartitionTaskManager.Action.class) ) );
exactly(5).of( action ).execute( workingMemory );
}});
// enqueue before pool
taskManager.enqueue( action );
taskManager.enqueue( action );
ExternalExecutorService service = new ExternalExecutorService( pool );
// set the pool
manager.setPool( service );
// enqueue after setting the pool
taskManager.enqueue( action );
taskManager.enqueue( action );
taskManager.enqueue( action );
// executes all pending actions using current thread
pool.runUntilIdle();
// check expectations
context.assertIsSatisfied();
}
}
|
JBRULES-2270: changing synchronization mechanism between agenda and rete network
git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@29235 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
|
drools-core/src/test/java/org/drools/reteoo/PartitionTaskManagerTest.java
|
JBRULES-2270: changing synchronization mechanism between agenda and rete network
|
|
Java
|
bsd-2-clause
|
75732721199c50ec6d377b2a2be2185634595180
| 0
|
gosu-lang/ragnardb
|
package ragnardb.plugin;
import gw.lang.reflect.*;
import gw.lang.reflect.java.JavaTypes;
import ragnardb.runtime.SQLConstraint;
import ragnardb.runtime.SQLMetadata;
import ragnardb.runtime.SQLQuery;
import ragnardb.runtime.SQLRecord;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
public class SQLTableTypeInfo extends SQLBaseTypeInfo {
private SQLMetadata _md = new SQLMetadata();
public SQLTableTypeInfo(ISQLTableType type) {
super(type);
resolveProperties(type);
}
private void resolveProperties( ISQLTableType type ) {
_propertiesList = new ArrayList<>();
_propertiesMap = new HashMap<>();
List<ColumnDefinition> columns = type.getColumnDefinitions();
for(ColumnDefinition column : columns) {
SQLColumnPropertyInfo prop = new SQLColumnPropertyInfo(column.getColumnName(), column.getPropertyName(),
getGosuType(column.getSQLType()), this, column.getOffset(), column.getLength());
_propertiesMap.put(prop.getName(), prop);
_propertiesList.add( prop );
}
createMethodInfos();
_constructorList = createConstructorInfos();
}
@Override
public int getOffset() {
return ((ISQLTableType) getOwnersType()).getTable().getOffset();
}
@Override
public int getTextLength() {
return ((ISQLTableType) getOwnersType()).getTable().getTypeName().length();
}
private List<IConstructorInfo> createConstructorInfos() {
List<IConstructorInfo> constructorInfos = new ArrayList<>();
IConstructorInfo constructorMethod = new ConstructorInfoBuilder()
.withDescription( "Creates a new Table object" )
.withParameters()
.withConstructorHandler((args) -> new SQLRecord(((ISQLTableType) getOwnersType()).getTable().getTableName(),
"id")).build(this);
constructorInfos.add( constructorMethod );
return constructorInfos;
}
private void createMethodInfos() {
MethodList methodList = new MethodList();
for (String propertyName : _propertiesMap.keySet()) {
SQLColumnPropertyInfo prop = (SQLColumnPropertyInfo) _propertiesMap.get(propertyName);
methodList.add(generateFindByMethod(prop));
methodList.add(generateFindByAllMethod(prop));
}
methodList.add(generateCreateMethod());
methodList.add(generateInitMethod());
methodList.add(generateWhereMethod());
List<? extends IMethodInfo> domainMethods = maybeGetDomainMethods();
List<? extends IPropertyInfo> domainProperties = maybeGetDomainProperties();
for(IMethodInfo domainMethod : domainMethods) {
methodList.add(domainMethod);
}
_methodList = methodList;
for(IPropertyInfo domainProperty : domainProperties) {
_propertiesMap.put(domainProperty.getName(), domainProperty);
_propertiesList.add(domainProperty);
}
}
private IMethodInfo generateFindByMethod(IPropertyInfo prop) {
final String propertyName = prop.getName();
return new MethodInfoBuilder()
.withName( "findBy" + propertyName )
.withDescription("Find single match based on the value of the " + propertyName + " column.")
.withParameters(new ParameterInfoBuilder()
.withName(propertyName)
.withType(prop.getFeatureType())
.withDescription("Performs strict matching on this argument"))
.withReturnType(this.getOwnersType())
.withStatic(true)
.withCallHandler(( ctx, args ) -> {
SQLQuery query = new SQLQuery(_md, getOwnersType());
SQLConstraint constraint = SQLConstraint.isEqualTo(prop, args[0]);
query = query.where(constraint);
return query.iterator().hasNext() ? query.iterator().next() : null;
})
.build(this);
}
private IMethodInfo generateFindByAllMethod(IPropertyInfo prop) {
final String propertyName = prop.getName();
return new MethodInfoBuilder()
.withName("findAllBy" + propertyName.substring(0, 1).toUpperCase() + propertyName.substring(1))
.withDescription("Find all matches based on the value of the " + propertyName + " column.")
.withParameters(new ParameterInfoBuilder()
.withName(propertyName)
.withType(prop.getFeatureType())
.withDescription("Performs strict matching on this argument"))
.withReturnType(JavaTypes.ITERABLE().getParameterizedType(this.getOwnersType()))
.withStatic(true)
.withCallHandler(( ctx, args ) -> {
SQLQuery query = new SQLQuery(_md, getOwnersType());
SQLConstraint constraint = SQLConstraint.isEqualTo(prop, args[0]);
query = query.where(constraint);
return query;
})
.build(this);
}
private IMethodInfo generateCreateMethod() {
return new MethodInfoBuilder()
.withName("create")
.withDescription("Creates a new table entry")
.withParameters()
.withReturnType(this.getOwnersType())
.withCallHandler(( ctx, args ) -> ((SQLRecord) ctx).create())
.build(this);
}
private IMethodInfo generateInitMethod() {
return new MethodInfoBuilder()
.withName("init")
.withDescription("Creates a new table entry")
.withParameters()
.withReturnType(this.getOwnersType())
.withStatic(true)
.withCallHandler(( ctx, args ) -> new SQLRecord(((ISQLTableType) getOwnersType()).getTable().getTableName(), "id"))
.build(this);
}
private IMethodInfo generateWhereMethod() {
return new MethodInfoBuilder()
.withName("where")
.withDescription("Creates a new table query")
.withParameters(new ParameterInfoBuilder().withName("condition").withType(TypeSystem.get(SQLConstraint.class)))
.withReturnType(JavaTypes.ITERABLE().getParameterizedType(this.getOwnersType()))
.withStatic(true)
.withCallHandler(( ctx, args ) -> new SQLQuery<SQLRecord>(_md, this.getOwnersType()).where((SQLConstraint) args[0]))
.build(this);
}
/**
* TODO singularize fqn properly
* @return
*/
private IType maybeGetDomainLogic() {
ISQLTableType tableType = (ISQLTableType) getOwnersType();
final String domainLogicPackageSuffix = "Extensions."; //TODO make constant
final String domainLogicTableSuffix = "Ext"; //TODO make constant
final String domainLogicFqn = tableType.getNamespace() + domainLogicPackageSuffix + tableType.getRelativeName() + domainLogicTableSuffix;
return TypeSystem.getByFullNameIfValid(domainLogicFqn);
}
private List<? extends IMethodInfo> maybeGetDomainMethods() {
List<IMethodInfo> methodList = Collections.emptyList();
final IType domainLogic = maybeGetDomainLogic();
if (domainLogic != null) {
methodList = new ArrayList<>();
final IRelativeTypeInfo domainLogicTypeInfo = (IRelativeTypeInfo) domainLogic.getTypeInfo();
List<? extends IMethodInfo> domainMethods = domainLogicTypeInfo.getDeclaredMethods()
.stream()
.filter(IAttributedFeatureInfo::isPublic)
.filter(method -> !method.getName().startsWith("@"))
.collect(Collectors.toList());
for (IMethodInfo method : domainMethods) {
final IParameterInfo[] params = method.getParameters();
ParameterInfoBuilder[] paramInfos = new ParameterInfoBuilder[params.length];
for(int i = 0; i < params.length; i++) {
IParameterInfo param = params[i];
paramInfos[i] = new ParameterInfoBuilder().like(param);
}
IMethodInfo syntheticMethod = new MethodInfoBuilder()
.withName(method.getDisplayName())
.withDescription(method.getDescription())
.withParameters(paramInfos)
.withReturnType(method.getReturnType())
.withStatic(method.isStatic())
.withCallHandler(method.getCallHandler())
.build(this);
methodList.add(syntheticMethod);
}
}
return methodList;
}
private List<? extends IPropertyInfo> maybeGetDomainProperties() {
List<IPropertyInfo> propertyList = Collections.emptyList();
final IType domainLogic = maybeGetDomainLogic();
if (domainLogic != null) {
propertyList = new ArrayList<>();
final IRelativeTypeInfo domainLogicTypeInfo = (IRelativeTypeInfo) domainLogic.getTypeInfo();
List<? extends IPropertyInfo> domainProperties = domainLogicTypeInfo.getDeclaredProperties()
.stream()
.filter(IAttributedFeatureInfo::isPublic)
.collect(Collectors.toList());
for (IPropertyInfo prop : domainProperties) {
IPropertyInfo syntheticProperty = new PropertyInfoBuilder()
.withName(prop.getName())
.withDescription(prop.getDescription())
.withStatic(prop.isStatic())
.withWritable(prop.isWritable())
.withType(prop.getFeatureType())
.withAccessor(prop.getAccessor())
.build(this);
propertyList.add(syntheticProperty);
}
}
return propertyList;
}
}
|
ragnardb/src/main/java/ragnardb/plugin/SQLTableTypeInfo.java
|
package ragnardb.plugin;
import gw.lang.reflect.ConstructorInfoBuilder;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.MethodInfoBuilder;
import gw.lang.reflect.MethodList;
import gw.lang.reflect.ParameterInfoBuilder;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.java.JavaTypes;
import ragnardb.runtime.SQLConstraint;
import ragnardb.runtime.SQLMetadata;
import ragnardb.runtime.SQLQuery;
import ragnardb.runtime.SQLRecord;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class SQLTableTypeInfo extends SQLBaseTypeInfo {
public SQLTableTypeInfo(ISQLTableType type) {
super(type);
resolveProperties(type);
}
private void resolveProperties( ISQLTableType type ) {
_propertiesList = new ArrayList<>();
_propertiesMap = new HashMap<>();
List<ColumnDefinition> columns = type.getColumnDefinitions();
for(ColumnDefinition column : columns) {
SQLColumnPropertyInfo prop = new SQLColumnPropertyInfo(column.getColumnName(), column.getPropertyName(),
getGosuType(column.getSQLType()), this, column.getOffset(), column.getLength());
_propertiesMap.put(prop.getName(), prop);
_propertiesList.add( prop );
}
_methodList = createMethodInfos();
_constructorList = createConstructorInfos();
}
@Override
public int getOffset() {
return ((ISQLTableType) getOwnersType()).getTable().getOffset();
}
@Override
public int getTextLength() {
return ((ISQLTableType) getOwnersType()).getTable().getTypeName().length();
}
private List<IConstructorInfo> createConstructorInfos() {
List<IConstructorInfo> constructorInfos = new ArrayList<>();
IConstructorInfo constructorMethod = new ConstructorInfoBuilder()
.withDescription( "Creates a new Table object" )
.withParameters()
.withConstructorHandler((args) -> new SQLRecord(((ISQLTableType) getOwnersType()).getTable().getTableName(),
"id")).build(this);
constructorInfos.add( constructorMethod );
return constructorInfos;
}
/**
* create a "findBy***" method for each property/column
* @return
*/
private MethodList createMethodInfos() { //MethodList#add(IMethodInfo)
MethodList result = new MethodList();
SQLMetadata md = new SQLMetadata();
for(String propertyName : _propertiesMap.keySet()) {
SQLColumnPropertyInfo prop = (SQLColumnPropertyInfo) _propertiesMap.get(propertyName);
String name = "findBy" + prop.getName();
IMethodInfo findByMethod = new MethodInfoBuilder()
.withName( name )
.withDescription("Find single match based on the value of the " + propertyName + " column.")
.withParameters(new ParameterInfoBuilder()
.withName(propertyName)
.withType(prop.getFeatureType())
.withDescription("Performs strict matching on this argument"))
.withReturnType(this.getOwnersType())
.withStatic(true)
.withCallHandler((ctx, args) -> {
SQLQuery query = new SQLQuery(md, getOwnersType());
SQLConstraint constraint = SQLConstraint.isEqualTo(prop, args[0]);
query = query.where(constraint);
return query.iterator().hasNext() ? query.iterator().next() : null;
}) // as opposed to { return null; }
.build(this);
result.add(findByMethod);
//Now we add the findAllBy
IMethodInfo findAllByMethod = new MethodInfoBuilder()
.withName("findAllBy" + propertyName.substring(0,1).toUpperCase()+propertyName.substring(1))
.withDescription("Find all matches based on the value of the " + propertyName + " column.")
.withParameters(new ParameterInfoBuilder()
.withName(propertyName)
.withType(prop.getFeatureType())
.withDescription("Performs strict matching on this argument"))
.withReturnType(JavaTypes.ITERABLE().getParameterizedType(this.getOwnersType()))
.withStatic(true)
.withCallHandler((ctx, args) -> {
SQLQuery query = new SQLQuery(md, getOwnersType());
SQLConstraint constraint = SQLConstraint.isEqualTo(prop, args[0]);
query = query.where(constraint);
return query;
})
.build(this);
result.add(findAllByMethod);
}
//Now we add a create method to allow insertions
IMethodInfo createMethod = new MethodInfoBuilder()
.withName("create")
.withDescription("Creates a new table entry")
.withParameters()
.withReturnType(this.getOwnersType())
.withCallHandler((ctx, args) -> ((SQLRecord) ctx).create())
.build(this);
result.add(createMethod);
IMethodInfo initMethod = new MethodInfoBuilder()
.withName("init")
.withDescription("Creates a new table entry")
.withParameters()
.withReturnType(this.getOwnersType())
.withStatic(true)
.withCallHandler((ctx, args) -> new SQLRecord(((ISQLTableType) getOwnersType()).getTable().getTableName(), "id"))
.build(this);
result.add(initMethod);
IMethodInfo selectMethod = new MethodInfoBuilder()
.withName("select")
.withDescription("Creates a new table query")
.withParameters()
.withReturnType(JavaTypes.getGosuType(SQLQuery.class).getParameterizedType(this.getOwnersType()))
.withStatic(true)
.withCallHandler((ctx, args) -> new SQLQuery<SQLRecord>(md, this.getOwnersType()))
.build(this);
result.add(selectMethod);
IMethodInfo whereMethod = new MethodInfoBuilder()
.withName("where")
.withDescription("Creates a new table query")
.withParameters(new ParameterInfoBuilder().withName("condition").withType(TypeSystem.get(SQLConstraint.class)))
.withReturnType(JavaTypes.getGosuType(SQLQuery.class).getParameterizedType(this.getOwnersType()))
.withStatic(true)
.withCallHandler((ctx, args) -> new SQLQuery<SQLRecord>(md, this.getOwnersType()).where((SQLConstraint) args[0]))
.build(this);
result.add(whereMethod);
return result;
}
}
|
First pass at merging domain logic
|
ragnardb/src/main/java/ragnardb/plugin/SQLTableTypeInfo.java
|
First pass at merging domain logic
|
|
Java
|
bsd-2-clause
|
a0721a5e67d3930f0278dfaad9f46b08cf6d98aa
| 0
|
bnanes/slideset
|
package edu.emory.cellbio.ijbat.ui;
import edu.emory.cellbio.ijbat.SlideSet;
import edu.emory.cellbio.ijbat.dm.ColumnBoundReader;
import edu.emory.cellbio.ijbat.dm.ColumnBoundWriter;
import edu.emory.cellbio.ijbat.dm.DataElement;
import edu.emory.cellbio.ijbat.dm.DataTypeIDService;
import edu.emory.cellbio.ijbat.dm.FileLinkElement;
import edu.emory.cellbio.ijbat.dm.MIME;
import edu.emory.cellbio.ijbat.dm.read.SVGFileToAbstractOverlayReader;
import edu.emory.cellbio.ijbat.dm.write.AbstractOverlaysToSVGFileWriter;
import edu.emory.cellbio.ijbat.ex.ImgLinkException;
import edu.emory.cellbio.ijbat.ex.LinkNotFoundException;
import edu.emory.cellbio.ijbat.ex.OperationCanceledException;
import edu.emory.cellbio.ijbat.ex.RoiLinkException;
import edu.emory.cellbio.ijbat.ex.SlideSetException;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.WindowConstants;
import javax.swing.filechooser.FileNameExtensionFilter;
import net.imagej.Data;
import net.imagej.Dataset;
import net.imagej.ImageJ;
import net.imagej.display.ColorMode;
import net.imagej.display.DataView;
import net.imagej.display.DatasetView;
import net.imagej.display.ImageDisplay;
import net.imagej.display.ImageDisplayService;
import net.imagej.display.OverlayService;
import net.imagej.event.OverlayCreatedEvent;
import net.imagej.event.OverlayDeletedEvent;
import net.imagej.event.OverlayRestructuredEvent;
import net.imagej.event.OverlayUpdatedEvent;
import net.imagej.overlay.AbstractOverlay;
import net.imagej.overlay.Overlay;
import net.imagej.ui.swing.commands.OverlayManager;
import net.imagej.ui.swing.sdi.viewer.SwingSdiImageDisplayViewer;
import net.imagej.ui.swing.viewer.image.SwingImageDisplayViewer;
import org.scijava.command.CommandInfo;
import org.scijava.command.CommandService;
import org.scijava.event.EventHandler;
import org.scijava.event.EventSubscriber;
import org.scijava.object.event.ObjectEvent;
import org.scijava.ui.UserInterface;
import org.scijava.ui.swing.SwingUI;
import org.scijava.ui.swing.viewer.SwingDisplayWindow;
import org.scijava.ui.viewer.DisplayWindow;
/**
* Editor for ROI set files.
*
* @author Benjamin Nanes
*/
public class RoiEditor extends JFrame
implements ActionListener, SlideSetWindow {
// -- Fields --
private SlideSet slideSet;
private DataTypeIDService dtid;
private ImageJ ij;
private OverlayService os;
private UserInterface ui;
private ColumnBoundReader<? extends DataElement, Dataset> images = null;
private ArrayList<ColumnBoundReader> roiReaders;
private ArrayList<ColumnBoundWriter> roiWriters;
/** Names of the ROI sets */
private ArrayList<String> roiSetNames;
/** ROI sets {@code AbstractOverlay[image#][Roi#]} */
private ArrayList<AbstractOverlay[][]> roiSets;
/** Current ROI set index */
private int curRoiSet = -1;
/** Current image index */
private int curImage = 0;
private JComboBox roiSetList;
private JButton addRoiSet;
// private JButton deleteRoiSet;
private JButton openROIManager;
private JComboBox displayMode;
private JButton changeLevels;
private JButton exportSVG;
private JComboBox imageList;
private JButton goImageNext;
private JButton goImageBack;
private JButton saveChanges;
private JButton undoChanges;
/** The image display */
private FastUpdateImageDisplay imageDisplay;
/** The image window */
private SwingDisplayWindow imageWindow;
/** The brightness/contrast dialog */
private BrightnessContrastRoi bcDialog;
/** Active flag */
private boolean active = false;
/** Busy loading an image flag */
private boolean loadingImage = false;
/** The log */
private SlideSetLog log;
/** Read-only mode */
private boolean locked = false;
/** Changed flag */
private boolean changed = false;
// -- Constructor --
public RoiEditor(SlideSet slideSet, DataTypeIDService dtid,
ImageJ ij, SlideSetLog log) {
if(slideSet == null || dtid == null || ij == null || log == null)
throw new IllegalArgumentException("Can't initiate with null elements");
this.slideSet = slideSet;
this.dtid = dtid;
this.ij = ij;
this.log = log;
os = ij.get(OverlayService.class);
ui = ij.ui().getUI(SwingUI.NAME);
roiSetNames = new ArrayList<String>();
roiSets = new ArrayList<AbstractOverlay[][]>();
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
buildLayout();
setActionListeners();
}
// -- Methods --
/** Run the ROI editor. Returns when finished. Use separate thread. */
public void showAndWait() {
synchronized(this) {
active = true;
try {
loadData();
} catch(SlideSetException ex) {
handleError(ex);
active = false;
return;
}
if(!active) return;
updateControls();
setVisible(true);
}
List<EventSubscriber<?>> subscribers = ij.event().subscribe(this);
loadImage(curImage);
if(imageWindow != null && imageWindow.isVisible()) {
Point p = imageWindow.getLocationOnScreen();
setLocation(Math.max(p.x - getWidth(), 0), Math.max(p.y, 0));
}
synchronized(this) {
while(active) {
try{ wait(); }
catch(InterruptedException e){}
}
setVisible(false);
}
ij.event().unsubscribe(subscribers);
}
/** Action handler */
@Override
public void actionPerformed(ActionEvent e) {
handleActionEvent(e);
}
/** Register a Brightness/Contrast dialog */
public void registerBrightnessContrast(BrightnessContrastRoi bc) {
bcDialog = bc;
if(imageDisplay == null || bcDialog == null) return;
for(DataView v : imageDisplay) {
if(!DatasetView.class.isInstance(v)) continue;
bcDialog.setView((DatasetView)v);
return;
}
}
/** Activate read-only mode and prevent changes to ROIs */
public void lock() {
locked = true;
}
// -- Helper methods --
/** Build the window */
private void buildLayout() {
setLayout(new BoxLayout(getContentPane(), BoxLayout.Y_AXIS));
add(Box.createVerticalStrut(5));
roiSetList = new JComboBox();
add(roiSetList);
add(Box.createVerticalStrut(5));
JPanel rsetButtons = new JPanel();
rsetButtons.setLayout(new BoxLayout(rsetButtons, BoxLayout.Y_AXIS));
addRoiSet = new JButton("Add ROI Set");
// deleteRoiSet = new JButton("Delete");
openROIManager = new JButton("ROI Manager");
exportSVG = new JButton("Export SVG");
Box addRoiSetBox = Box.createHorizontalBox();
addRoiSetBox.add(Box.createHorizontalGlue());
addRoiSetBox.add(addRoiSet);
addRoiSetBox.add(Box.createHorizontalGlue());
rsetButtons.add(addRoiSetBox);
rsetButtons.add(Box.createVerticalStrut(5));
Box openROIManagerBox = Box.createHorizontalBox();
openROIManagerBox.add(Box.createHorizontalGlue());
openROIManagerBox.add(openROIManager);
openROIManagerBox.add(Box.createHorizontalGlue());
rsetButtons.add(openROIManagerBox);
rsetButtons.add(Box.createVerticalStrut(5));
Box exportSVGBox = Box.createHorizontalBox();
exportSVGBox.add(Box.createHorizontalGlue());
exportSVGBox.add(exportSVG);
exportSVGBox.add(Box.createHorizontalGlue());
rsetButtons.add(exportSVGBox);
// rsetButtons.add(deleteRoiSet);
add(rsetButtons);
add(Box.createVerticalStrut(10));
JPanel dispButtons = new JPanel();
dispButtons.setLayout(new BoxLayout(dispButtons, BoxLayout.Y_AXIS));
displayMode = new JComboBox();
displayMode.addItem("Composite");
displayMode.addItem("Grayscale");
displayMode.addItem("Color");
Box modeBox = Box.createHorizontalBox();
modeBox.add(Box.createHorizontalGlue());
modeBox.add(displayMode);
modeBox.add(Box.createHorizontalGlue());
dispButtons.add(modeBox);
changeLevels = new JButton("Levels");
Box levBox = Box.createHorizontalBox();
levBox.add(Box.createHorizontalGlue());
levBox.add(changeLevels);
levBox.add(Box.createHorizontalGlue());
dispButtons.add(Box.createVerticalStrut(5));
dispButtons.add(levBox);
add(dispButtons);
add(Box.createVerticalStrut(10));
imageList = new JComboBox();
add(imageList);
add(Box.createVerticalStrut(5));
goImageBack = new JButton("<<");
goImageNext = new JButton(">>");
JPanel imageButtons = new JPanel();
imageButtons.setLayout(new BoxLayout(imageButtons, BoxLayout.X_AXIS));
imageButtons.add(goImageBack);
imageButtons.add(goImageNext);
add(imageButtons);
add(Box.createVerticalStrut(10));
saveChanges = new JButton("Save");
undoChanges = new JButton("Undo");
JPanel roiButtons = new JPanel();
roiButtons.setLayout(new BoxLayout(roiButtons, BoxLayout.X_AXIS));
roiButtons.add(saveChanges);
roiButtons.add(undoChanges);
add(roiButtons);
add(Box.createVerticalStrut(5));
pack();
}
/** Set action listeners for the controls */
private void setActionListeners() {
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) { kill(); }
});
goImageBack.setActionCommand("imageBack");
goImageBack.addActionListener(this);
goImageNext.setActionCommand("imageNext");
goImageNext.addActionListener(this);
imageList.setActionCommand("imageListSelection");
imageList.addActionListener(this);
addRoiSet.setActionCommand("roiSetNew");
addRoiSet.addActionListener(this);
openROIManager.setActionCommand("openROIManager");
openROIManager.addActionListener(this);
exportSVG.setActionCommand("exportSVG");
exportSVG.addActionListener(this);
roiSetList.setActionCommand("roiSetListSelection");
roiSetList.addActionListener(this);
saveChanges.setActionCommand("writeRoiSets");
saveChanges.addActionListener(this);
undoChanges.setActionCommand("revertRoiSets");
undoChanges.addActionListener(this);
displayMode.setActionCommand("changeColorMode");
displayMode.addActionListener(this);
changeLevels.setActionCommand("changeLevels");
changeLevels.addActionListener(this);
}
/** Handle an {@code ActionEvent} */
private void handleActionEvent(final ActionEvent e) {
(new Thread() {
@Override
public void run() {
if(loadingImage)
return;
String ac = e.getActionCommand();
ij.log().debug("Action command: " + ac);
if(ac.equals("imageBack"))
setImage(curImage - 1);
else if(ac.equals("imageNext"))
setImage(curImage + 1);
else if(ac.equals("imageListSelection"))
setImage(imageList.getSelectedIndex());
else if(ac.equals("roiSetNew"))
createRoiSet();
else if(ac.equals("roiSetListSelection"))
setROISet(roiSetList.getSelectedIndex());
else if(ac.equals("writeRoiSets"))
writeOverlays();
else if(ac.equals("revertRoiSets"))
revertOverlays();
else if(ac.equals("openROIManager"))
openROIManager();
else if(ac.equals("exportSVG"))
exportSVG();
else if(ac.equals("changeColorMode"))
changeDisplayMode();
else if(ac.equals("changeLevels"))
changeLevels();
}
}).start();
}
/** Load the internal data */
private void loadData() throws SlideSetException {
ArrayList<ColumnBoundReader> iCbrs;
iCbrs = dtid.getCompatableColumnReaders(Dataset.class, slideSet);
if(iCbrs == null || iCbrs.isEmpty()) {
JOptionPane.showMessageDialog(this,
"This table does not contain any images. "
+ "Cannot create ROIs.",
"SlideSet - ROI Editor",
JOptionPane.ERROR_MESSAGE);
active = false;
throw new OperationCanceledException("No images in table.");
}
if(iCbrs.size() > 1) {
int choices = iCbrs.size();
String[] names = new String[choices];
for(int i=0; i<choices; i++)
names[i] = String.valueOf(i+1)
+ ": " + iCbrs.get(i).getColumnName();
Object choice = JOptionPane.showInputDialog(this,
"Select images on which ROIs will be drawn:",
"SlideSet - ROI Editor",
JOptionPane.PLAIN_MESSAGE,
null, names, names[0]);
if(choice == null)
{ throw new OperationCanceledException("No images selected"); }
for(int i=0; i<choices; i++) {
images = iCbrs.get(i);
if(names[i].equals(choice))
break;
}
}
else images = iCbrs.get(0);
loadOverlays();
}
/** Load overlay data from disk */
private void loadOverlays() throws SlideSetException {
if(roiReaders == null)
roiReaders = new ArrayList<ColumnBoundReader>();
if(roiWriters == null)
roiWriters = new ArrayList<ColumnBoundWriter>();
dtid.getColumnReadWritePairs(
AbstractOverlay[].class, slideSet, roiReaders, roiWriters);
for(int u = 0; u < roiReaders.size(); u++) {
final int i = roiReaders.get(u).getColumnNum();
roiSetNames.add(roiReaders.get(u).getColumnName());
final String defp = slideSet.getColumnDefaultPath(i);
if(defp == null || defp.isEmpty())
slideSet.setColumnDefaultPath(i, "roi");
final String dlp = slideSet.getDefaultLinkPrefix(i);
if(dlp == null || dlp.isEmpty())
slideSet.setDefaultLinkPrefix(
i, slideSet.getColumnName(i).replaceAll("\\W", "-"));
final String dlex = slideSet.getDefaultLinkExtension(i);
if(dlex == null || dlex.isEmpty()) {
if(slideSet.getColumnMimeType(i).equals(MIME.SVG))
slideSet.setDefaultLinkExtension(i, "svg");
else
slideSet.setDefaultLinkExtension(i, "roiset");
}
AbstractOverlay[][] set = new AbstractOverlay[slideSet.getNumRows()][];
for(int j=0; j<slideSet.getNumRows(); j++) {
try{
set[j] = (AbstractOverlay[]) roiReaders.get(u).read(j);
} catch(LinkNotFoundException e) {
log.println("\nWarning: Could not find ROI set file \""
+ slideSet.getItemText(i, j) + "\"");
set[j] = null;
} catch (RoiLinkException e) {
log.println("\nError: Could not read ROI set file!");
log.println("# This could be because the file specified");
log.println("# is not really an ROI set file.");
handleError(e);
set[j] = null;
} catch (Exception ex) {
log.println("\nWarning: Unable to read ROI set.");
handleError(ex);
set[j] = null;
}
}
roiSets.add(set);
}
if(!roiSets.isEmpty())
curRoiSet = 0;
}
/**
* Update the state of the controls
* Do NOT call from the event dispatch thread.
*/
private void updateControls() {
try {
SwingUtilities.invokeAndWait( new Thread() {
@Override
public void run() {
roiSetList.setModel(
new DefaultComboBoxModel(getRoiSetNames()));
roiSetList.setSelectedIndex(curRoiSet);
imageList.setModel(
new DefaultComboBoxModel(getImageNames()));
imageList.setSelectedIndex(curImage);
}
});
} catch(Exception e) {
throw new IllegalArgumentException(e);
}
}
/**
* Get the names of available ROI sets to put in the list,
* prefixed by the row number to avoid name duplications
* which cause problems with {@code DefaultComboBoxModel}.
*/
private String[] getRoiSetNames() {
String[] names = new String[roiSetNames.size()];
names = roiSetNames.toArray(names);
for(int i=0; i<names.length; i++)
names[i] = String.valueOf(i+1) + ": " + names[i];
return names;
}
/**
* Get the short names of image files to put in the list,
* prefixed by the row number to avoid name duplications
* which cause problems with {@code DefaultComboBoxModel}.
*/
private String[] getImageNames() {
String[] names = new String[slideSet.getNumRows()];
for(int i=0; i<slideSet.getNumRows(); i++)
names[i] = String.valueOf(i+1) + ": " +
new File(slideSet.getItemText(images.getColumnNum(), i)
.toString()).getName();
return names;
}
/** Load and display the selected image */
private void loadImage(int imageIndex) {
Dataset ds = null;
synchronized(this) {
if( loadingImage
|| imageIndex < 0
|| imageIndex >= slideSet.getNumRows())
return;
loadingImage = true;
if(imageIndex == curImage && imageDisplay != null) {
for(DataView dv : imageDisplay) {
Data d = dv.getData();
if(d instanceof Dataset)
ds = (Dataset) d;
}
}
curImage = imageIndex;
}
updateControls();
if(ds == null) try {
ds = images.read(imageIndex);
} catch(LinkNotFoundException e) {
log.println("\nError: Unable to find image \""
+ slideSet.getItemText(
images.getColumnNum(), imageIndex) + "\"");
if(imageWindow != null)
imageWindow.close();
loadingImage = false;
return;
} catch(ImgLinkException e) {
log.println("\nError: Unable to load image");
log.println("# \"" +
slideSet.getItemText(
images.getColumnNum(), imageIndex) + "\"");
log.println("# It may not be a valid image file!");
ij.log().debug(e);
if(imageWindow != null)
imageWindow.close();
loadingImage = false;
return;
} catch(Throwable t) {
log.println("\nFatal error: Unexpected problem loading image!");
ij.log().debug(t);
kill();
return;
}
if(imageDisplay != null)
imageDisplay.close();
imageDisplay = new FastUpdateImageDisplay();
ij.getContext().inject(imageDisplay);
imageDisplay.display(ds);
createImageWindow();
imageDisplay.update();
drawOverlays();
changeDisplayMode();
registerBrightnessContrast(bcDialog);
imageWindow.setTitle("ROI Editor");
loadingImage = false;
}
/** Update the color mode */
private void changeDisplayMode() {
ColorMode m;
switch(displayMode.getSelectedIndex()) {
default:
case 0:
m = ColorMode.COMPOSITE;
break;
case 1:
m = ColorMode.GRAYSCALE;
break;
case 2:
m = ColorMode.COLOR;
}
for(DataView v : imageDisplay) {
if(!DatasetView.class.isInstance(v)) continue;
((DatasetView)v).setColorMode(m);
}
imageDisplay.update();
}
/** Open the Brightness/Contrast dialog */
private void changeLevels() {
CommandInfo bci = ij.command().getCommand(BrightnessContrastRoi.class);
//CommandInfo bci = ij.command().getCommand(net.imagej.plugins.commands.display.interactive.BrightnessContrast.class);
ij.command().run(bci, true, "roiEditor", this);
}
/** Create the image window */
private void createImageWindow() {
try {
ij.thread().invoke( new Thread() {
@Override
public void run() {
SwingImageDisplayViewer idv = new SwingSdiImageDisplayViewer();
idv.setContext(ij.getContext());
if(!idv.canView(imageDisplay) || !idv.isCompatible(ui))
throw new IllegalArgumentException("Viewer problem");
final DisplayWindow dw = ui.createDisplayWindow(imageDisplay);
if(!(dw instanceof SwingDisplayWindow))
throw new IllegalArgumentException("Must run in a windowed environment!");
imageWindow = (SwingDisplayWindow) dw;
idv.view(imageWindow, imageDisplay);
ij.ui().addDisplayViewer(idv);
imageWindow.addWindowListener(
new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
if(active) kill();
}});
imageWindow.showDisplay(true);
}
});
} catch (InterruptedException e) {
throw new IllegalArgumentException(e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException(e);
}
imageWindow.setLocationRelativeTo(null);
}
/** Draw appropriate overlays on the image */
private void drawOverlays() {
if(imageDisplay == null)
return;
if(curRoiSet < 0 || curRoiSet >= roiSets.size())
return;
if(!ImageDisplay.class.isInstance(imageDisplay))
throw new IllegalArgumentException("Bad display type.");
imageDisplay.clearOverlaysFast();
imageDisplay.update();
Overlay[] overlays = roiSets.get(curRoiSet)[curImage];
ImageDisplayService ids = ij.get(ImageDisplayService.class);
if(overlays != null)
for(int i = 0; i < overlays.length; i++)
imageDisplay.addFast(overlays[i], ids);
imageDisplay.rebuildNow();
imageDisplay.update();
}
/** Save overlays drawn on the current image to memory, not to disk. */
private void saveOverlays() {
if(locked) return; // Don't save changes if read-only
if(curRoiSet < 0 || curRoiSet >= roiSets.size())
return;
if(imageDisplay == null || imageDisplay.isEmpty())
return;
if(!ImageDisplay.class.isInstance(imageDisplay)) {
log.println("\nError: Unable to record overlays.");
log.println("# There is not a valid display open.");
}
List<Overlay> overlays = os.getOverlays((ImageDisplay) imageDisplay);
if(overlays.isEmpty()) {
roiSets.get(curRoiSet)[curImage] = null;
return;
}
ArrayList<AbstractOverlay> overCast =
new ArrayList<AbstractOverlay>(overlays.size()+2);
for(Overlay o : overlays)
if(AbstractOverlay.class.isInstance(o) &&
!overCast.contains((AbstractOverlay) o)) //<<< A bit hacky...
overCast.add((AbstractOverlay) o);
roiSets.get(curRoiSet)[curImage] =
overCast.toArray(new AbstractOverlay[overCast.size()]);
}
/** Save all overlays to disk */
private void writeOverlays() {
if(locked) { // Don't save changes if read-only. Here we'll let the user know.
JOptionPane.showMessageDialog(this, "This ROI set is locked. Unable to save changes.", "Slide Set", JOptionPane.ERROR_MESSAGE);
return;
}
saveOverlays();
if(roiSets.isEmpty())
return;
for(int i=0; i<roiSets.size(); i++) {
for(int row=0; row < slideSet.getNumRows(); row++) {
final ColumnBoundWriter w = roiWriters.get(i);
try {
String dest =
slideSet.getItemText(w.getColumnNum(), row);
if(dest == null || dest.isEmpty()) {
slideSet.makeDefaultLink(w.getColumnNum(), row);
dest =
slideSet.getItemText(w.getColumnNum(), row);
}
if(w.getWriter()
instanceof AbstractOverlaysToSVGFileWriter) {
final String imgpath
= slideSet.resolvePath(
slideSet.getItemText(images.getColumnNum(), row));
dest = slideSet.resolvePath(dest);
final AbstractOverlaysToSVGFileWriter aosvg
= (AbstractOverlaysToSVGFileWriter) w.getWriter();
aosvg.write(roiSets.get(i)[row], dest, -1, -1, imgpath);
}
else
w.write(roiSets.get(i)[row], row);
changed = false;
} catch(LinkNotFoundException e) {
log.println("\nError: \""
+ slideSet.getItemText(w.getColumnNum(), row)
+ "\"");
log.println("# is not a valid path, so the");
log.println("# ROI set cannot be saved!");
} catch(SlideSetException e) {
log.println("\nError: Unable to save ROI set!");
handleError(e);
}
}
}
}
/** With user confirmation, revert overlays to last saved version. */
private void revertOverlays() {
if( JOptionPane.showConfirmDialog(this,
"Revert all regions of interest to last saved version?",
"ROI Editor", JOptionPane.YES_NO_OPTION)
!= JOptionPane.YES_OPTION )
return;
try {
loadOverlays();
} catch(Exception e) {
handleError(e);
}
loadImage(curImage);
}
/** Create a new set of overlays (ROIs) */
private void createRoiSet() {
String name = JOptionPane.showInputDialog(this, "New ROI set name:");
if(name == null)
return;
name = name.trim();
name = name.equals("") ? "ROI" : name;
int colI;
try {
colI = slideSet.addColumn(name, FileLinkElement.class, MIME.SVG);
} catch(Exception e) {
handleError(e);
return;
}
roiSetNames.add(name);
roiSets.add(new AbstractOverlay[slideSet.getNumRows()][]);
roiReaders.add(
new ColumnBoundReader(slideSet, colI,
new SVGFileToAbstractOverlayReader()));
roiWriters.add(
new ColumnBoundWriter(slideSet, colI,
new AbstractOverlaysToSVGFileWriter()));
slideSet.setColumnDefaultPath(colI, "roi");
slideSet.setDefaultLinkPrefix(colI, name.replaceAll("\\W", "-"));
slideSet.setDefaultLinkExtension(colI, "svg");
try {
for(int i=0; i<slideSet.getNumRows(); i++)
slideSet.makeDefaultLink(colI, i);
} catch(SlideSetException e) {
handleError(e);
}
curRoiSet = roiSets.size() - 1;
updateControls();
loadImage(curImage);
}
/** Clean up and close the editor */
@Override
public void kill() {
ij.log().debug("Closing ROI editor");
if(active && (!locked) && changed &&
JOptionPane.showConfirmDialog(this, "Save changes?",
"ROI Editor", JOptionPane.YES_NO_OPTION)
== JOptionPane.YES_OPTION) {
saveOverlays();
writeOverlays();
}
synchronized(this) {
active = false;
setVisible(false);
if(imageWindow != null && imageWindow.isVisible())
imageWindow.dispose();
dispose();
notifyAll();
}
}
/**
* Change display to the image with the given {@code index} in the list.
*/
private void setImage(int index) {
if(index >= imageList.getItemCount())
index = 0;
if(index < 0)
index = imageList.getItemCount() - 1;
if(index == curImage)
return;
saveOverlays();
loadImage(index);
}
/**
* Load the selected ROI set into the display.
*/
private void setROISet(int index) {
if(index >= roiSetList.getItemCount())
index = 0;
else if(index < 0)
index = roiSetList.getItemCount() - 1;
if(index == curRoiSet)
return;
saveOverlays();
curRoiSet = index;
updateControls();
loadImage(curImage);
}
/** Open the ImageJ overlay manager window */
private void openROIManager() {
CommandService cs = os.getContext().getService(CommandService.class);
try {
cs.run(OverlayManager.class, true, new Object[0]);
} catch(Exception e) {
log.println("\nUnable to open ROI Manager window.");
handleError(e);
}
}
/** Check to see if one of the ROIs has been changed */
@EventHandler
private void onEvent(OverlayUpdatedEvent e) {
flagOverlayChanges(e);
}
/** Check to see if one of the ROIs has been restructured */
@EventHandler
private void onEvent(OverlayRestructuredEvent e) {
flagOverlayChanges(e);
}
/** Handle an ROI creation */
@EventHandler
private void onEvent(OverlayCreatedEvent e) {
flagOverlayChanges(e);
}
/** Handle an ROI deletion */
@EventHandler
private void onEvent(OverlayDeletedEvent e) {
flagOverlayChanges(e);
}
/** Check to see if an ROI change should set the {@code changed} flag */
private void flagOverlayChanges(ObjectEvent e) {
if(imageDisplay==null)
return;
if(e instanceof OverlayCreatedEvent && (!loadingImage)) {
changed = true; // A bit hacky and non-specific, but the OverlayCreatedEvent is fired before the overlay is added to the display!
return;
}
for(DataView dv : imageDisplay) {
if(e.getObject() == dv.getData())
changed = true;
}
}
private void exportSVG() {
saveOverlays();
JFileChooser fc = new JFileChooser(slideSet.getWorkingDirectory());
fc.setDialogType(JFileChooser.SAVE_DIALOG);
fc.setDialogTitle("Save ROIs as...");
fc.setFileFilter(new FileNameExtensionFilter("SVG file", "svg"));
fc.setSelectedFile(new File("ROI" + ".svg"));
final int r = fc.showDialog(this, "Save");
if(r != JFileChooser.APPROVE_OPTION)
return;
final File path = fc.getSelectedFile();
if(path == null)
return;
if(path.exists()
&& JOptionPane.showConfirmDialog(this,
"File exists. OK to overwrite?",
"Slide Set", JOptionPane.OK_CANCEL_OPTION)
!= JOptionPane.OK_OPTION )
return;
try {
int w = new Double(((ImageDisplay) imageDisplay)
.getPlaneExtents().width).intValue() + 1; //Not sure why, but needs to be corrected...
int h = new Double(((ImageDisplay) imageDisplay)
.getPlaneExtents().height).intValue() + 1;
String imgPath = slideSet.getItemText(
images.getColumnNum(), curImage);
if(!(new File(imgPath)).isAbsolute())
imgPath = slideSet.getWorkingDirectory() + File.separator + imgPath;
new AbstractOverlaysToSVGFileWriter()
.write(roiSets.get(curRoiSet)[curImage],
path.getAbsolutePath(),
w, h, imgPath);
} catch(Exception e) {
handleError(e);
}
}
private void handleError(Exception e) {
log.println(e.getLocalizedMessage());
ij.log().debug(e);
}
}
|
src/main/java/edu/emory/cellbio/ijbat/ui/RoiEditor.java
|
package edu.emory.cellbio.ijbat.ui;
import edu.emory.cellbio.ijbat.SlideSet;
import edu.emory.cellbio.ijbat.dm.ColumnBoundReader;
import edu.emory.cellbio.ijbat.dm.ColumnBoundWriter;
import edu.emory.cellbio.ijbat.dm.DataElement;
import edu.emory.cellbio.ijbat.dm.DataTypeIDService;
import edu.emory.cellbio.ijbat.dm.FileLinkElement;
import edu.emory.cellbio.ijbat.dm.MIME;
import edu.emory.cellbio.ijbat.dm.read.SVGFileToAbstractOverlayReader;
import edu.emory.cellbio.ijbat.dm.write.AbstractOverlaysToSVGFileWriter;
import edu.emory.cellbio.ijbat.ex.ImgLinkException;
import edu.emory.cellbio.ijbat.ex.LinkNotFoundException;
import edu.emory.cellbio.ijbat.ex.OperationCanceledException;
import edu.emory.cellbio.ijbat.ex.RoiLinkException;
import edu.emory.cellbio.ijbat.ex.SlideSetException;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.WindowConstants;
import javax.swing.filechooser.FileNameExtensionFilter;
import net.imagej.Data;
import net.imagej.Dataset;
import net.imagej.ImageJ;
import net.imagej.display.ColorMode;
import net.imagej.display.DataView;
import net.imagej.display.DatasetView;
import net.imagej.display.ImageDisplay;
import net.imagej.display.ImageDisplayService;
import net.imagej.display.OverlayService;
import net.imagej.overlay.AbstractOverlay;
import net.imagej.overlay.Overlay;
import net.imagej.ui.swing.commands.OverlayManager;
import net.imagej.ui.swing.sdi.viewer.SwingSdiImageDisplayViewer;
import net.imagej.ui.swing.viewer.image.SwingImageDisplayViewer;
import org.scijava.command.CommandInfo;
import org.scijava.command.CommandService;
import org.scijava.ui.UserInterface;
import org.scijava.ui.swing.SwingUI;
import org.scijava.ui.swing.viewer.SwingDisplayWindow;
import org.scijava.ui.viewer.DisplayWindow;
/**
* Editor for ROI set files.
*
* @author Benjamin Nanes
*/
public class RoiEditor extends JFrame
implements ActionListener, SlideSetWindow {
// -- Fields --
private SlideSet slideSet;
private DataTypeIDService dtid;
private ImageJ ij;
private OverlayService os;
private UserInterface ui;
private ColumnBoundReader<? extends DataElement, Dataset> images = null;
private ArrayList<ColumnBoundReader> roiReaders;
private ArrayList<ColumnBoundWriter> roiWriters;
/** Names of the ROI sets */
private ArrayList<String> roiSetNames;
/** ROI sets {@code AbstractOverlay[image#][Roi#]} */
private ArrayList<AbstractOverlay[][]> roiSets;
/** Current ROI set index */
private int curRoiSet = -1;
/** Current image index */
private int curImage = 0;
private JComboBox roiSetList;
private JButton addRoiSet;
// private JButton deleteRoiSet;
private JButton openROIManager;
private JComboBox displayMode;
private JButton changeLevels;
private JButton exportSVG;
private JComboBox imageList;
private JButton goImageNext;
private JButton goImageBack;
private JButton saveChanges;
private JButton undoChanges;
/** The image display */
private FastUpdateImageDisplay imageDisplay;
/** The image window */
private SwingDisplayWindow imageWindow;
/** The brightness/contrast dialog */
private BrightnessContrastRoi bcDialog;
/** Active flag */
private boolean active = false;
/** Busy loading an image flag */
private boolean loadingImage = false;
/** The log */
private SlideSetLog log;
/** Read-only mode */
private boolean locked = false;
// -- Constructor --
public RoiEditor(SlideSet slideSet, DataTypeIDService dtid,
ImageJ ij, SlideSetLog log) {
if(slideSet == null || dtid == null || ij == null || log == null)
throw new IllegalArgumentException("Can't initiate with null elements");
this.slideSet = slideSet;
this.dtid = dtid;
this.ij = ij;
this.log = log;
os = ij.get(OverlayService.class);
ui = ij.ui().getUI(SwingUI.NAME);
roiSetNames = new ArrayList<String>();
roiSets = new ArrayList<AbstractOverlay[][]>();
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
buildLayout();
setActionListeners();
}
// -- Methods --
/** Run the ROI editor. Returns when finished. Use separate thread. */
public void showAndWait() {
synchronized(this) {
active = true;
try {
loadData();
} catch(SlideSetException ex) {
handleError(ex);
active = false;
return;
}
if(!active) return;
updateControls();
setVisible(true);
}
loadImage(curImage);
if(imageWindow != null && imageWindow.isVisible()) {
Point p = imageWindow.getLocationOnScreen();
setLocation(Math.max(p.x - getWidth(), 0), Math.max(p.y, 0));
}
synchronized(this) {
while(active) {
try{ wait(); }
catch(InterruptedException e){}
}
setVisible(false);
}
}
/** Action handler */
@Override
public void actionPerformed(ActionEvent e) {
handleActionEvent(e);
}
/** Register a Brightness/Contrast dialog */
public void registerBrightnessContrast(BrightnessContrastRoi bc) {
bcDialog = bc;
if(imageDisplay == null || bcDialog == null) return;
for(DataView v : imageDisplay) {
if(!DatasetView.class.isInstance(v)) continue;
bcDialog.setView((DatasetView)v);
return;
}
}
/** Activate read-only mode and prevent changes to ROIs */
public void lock() {
locked = true;
}
// -- Helper methods --
/** Build the window */
private void buildLayout() {
setLayout(new BoxLayout(getContentPane(), BoxLayout.Y_AXIS));
add(Box.createVerticalStrut(5));
roiSetList = new JComboBox();
add(roiSetList);
add(Box.createVerticalStrut(5));
JPanel rsetButtons = new JPanel();
rsetButtons.setLayout(new BoxLayout(rsetButtons, BoxLayout.Y_AXIS));
addRoiSet = new JButton("Add ROI Set");
// deleteRoiSet = new JButton("Delete");
openROIManager = new JButton("ROI Manager");
exportSVG = new JButton("Export SVG");
Box addRoiSetBox = Box.createHorizontalBox();
addRoiSetBox.add(Box.createHorizontalGlue());
addRoiSetBox.add(addRoiSet);
addRoiSetBox.add(Box.createHorizontalGlue());
rsetButtons.add(addRoiSetBox);
rsetButtons.add(Box.createVerticalStrut(5));
Box openROIManagerBox = Box.createHorizontalBox();
openROIManagerBox.add(Box.createHorizontalGlue());
openROIManagerBox.add(openROIManager);
openROIManagerBox.add(Box.createHorizontalGlue());
rsetButtons.add(openROIManagerBox);
rsetButtons.add(Box.createVerticalStrut(5));
Box exportSVGBox = Box.createHorizontalBox();
exportSVGBox.add(Box.createHorizontalGlue());
exportSVGBox.add(exportSVG);
exportSVGBox.add(Box.createHorizontalGlue());
rsetButtons.add(exportSVGBox);
// rsetButtons.add(deleteRoiSet);
add(rsetButtons);
add(Box.createVerticalStrut(10));
JPanel dispButtons = new JPanel();
dispButtons.setLayout(new BoxLayout(dispButtons, BoxLayout.Y_AXIS));
displayMode = new JComboBox();
displayMode.addItem("Composite");
displayMode.addItem("Grayscale");
displayMode.addItem("Color");
Box modeBox = Box.createHorizontalBox();
modeBox.add(Box.createHorizontalGlue());
modeBox.add(displayMode);
modeBox.add(Box.createHorizontalGlue());
dispButtons.add(modeBox);
changeLevels = new JButton("Levels");
Box levBox = Box.createHorizontalBox();
levBox.add(Box.createHorizontalGlue());
levBox.add(changeLevels);
levBox.add(Box.createHorizontalGlue());
dispButtons.add(Box.createVerticalStrut(5));
dispButtons.add(levBox);
add(dispButtons);
add(Box.createVerticalStrut(10));
imageList = new JComboBox();
add(imageList);
add(Box.createVerticalStrut(5));
goImageBack = new JButton("<<");
goImageNext = new JButton(">>");
JPanel imageButtons = new JPanel();
imageButtons.setLayout(new BoxLayout(imageButtons, BoxLayout.X_AXIS));
imageButtons.add(goImageBack);
imageButtons.add(goImageNext);
add(imageButtons);
add(Box.createVerticalStrut(10));
saveChanges = new JButton("Save");
undoChanges = new JButton("Undo");
JPanel roiButtons = new JPanel();
roiButtons.setLayout(new BoxLayout(roiButtons, BoxLayout.X_AXIS));
roiButtons.add(saveChanges);
roiButtons.add(undoChanges);
add(roiButtons);
add(Box.createVerticalStrut(5));
pack();
}
/** Set action listeners for the controls */
private void setActionListeners() {
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) { kill(); }
});
goImageBack.setActionCommand("imageBack");
goImageBack.addActionListener(this);
goImageNext.setActionCommand("imageNext");
goImageNext.addActionListener(this);
imageList.setActionCommand("imageListSelection");
imageList.addActionListener(this);
addRoiSet.setActionCommand("roiSetNew");
addRoiSet.addActionListener(this);
openROIManager.setActionCommand("openROIManager");
openROIManager.addActionListener(this);
exportSVG.setActionCommand("exportSVG");
exportSVG.addActionListener(this);
roiSetList.setActionCommand("roiSetListSelection");
roiSetList.addActionListener(this);
saveChanges.setActionCommand("writeRoiSets");
saveChanges.addActionListener(this);
undoChanges.setActionCommand("revertRoiSets");
undoChanges.addActionListener(this);
displayMode.setActionCommand("changeColorMode");
displayMode.addActionListener(this);
changeLevels.setActionCommand("changeLevels");
changeLevels.addActionListener(this);
}
/** Handle an {@code ActionEvent} */
private void handleActionEvent(final ActionEvent e) {
(new Thread() {
@Override
public void run() {
if(loadingImage)
return;
String ac = e.getActionCommand();
ij.log().debug("Action command: " + ac);
if(ac.equals("imageBack"))
setImage(curImage - 1);
else if(ac.equals("imageNext"))
setImage(curImage + 1);
else if(ac.equals("imageListSelection"))
setImage(imageList.getSelectedIndex());
else if(ac.equals("roiSetNew"))
createRoiSet();
else if(ac.equals("roiSetListSelection"))
setROISet(roiSetList.getSelectedIndex());
else if(ac.equals("writeRoiSets"))
writeOverlays();
else if(ac.equals("revertRoiSets"))
revertOverlays();
else if(ac.equals("openROIManager"))
openROIManager();
else if(ac.equals("exportSVG"))
exportSVG();
else if(ac.equals("changeColorMode"))
changeDisplayMode();
else if(ac.equals("changeLevels"))
changeLevels();
}
}).start();
}
/** Load the internal data */
private void loadData() throws SlideSetException {
ArrayList<ColumnBoundReader> iCbrs;
iCbrs = dtid.getCompatableColumnReaders(Dataset.class, slideSet);
if(iCbrs == null || iCbrs.isEmpty()) {
JOptionPane.showMessageDialog(this,
"This table does not contain any images. "
+ "Cannot create ROIs.",
"SlideSet - ROI Editor",
JOptionPane.ERROR_MESSAGE);
active = false;
throw new OperationCanceledException("No images in table.");
}
if(iCbrs.size() > 1) {
int choices = iCbrs.size();
String[] names = new String[choices];
for(int i=0; i<choices; i++)
names[i] = String.valueOf(i+1)
+ ": " + iCbrs.get(i).getColumnName();
Object choice = JOptionPane.showInputDialog(this,
"Select images on which ROIs will be drawn:",
"SlideSet - ROI Editor",
JOptionPane.PLAIN_MESSAGE,
null, names, names[0]);
if(choice == null)
{ throw new OperationCanceledException("No images selected"); }
for(int i=0; i<choices; i++) {
images = iCbrs.get(i);
if(names[i].equals(choice))
break;
}
}
else images = iCbrs.get(0);
loadOverlays();
}
/** Load overlay data from disk */
private void loadOverlays() throws SlideSetException {
if(roiReaders == null)
roiReaders = new ArrayList<ColumnBoundReader>();
if(roiWriters == null)
roiWriters = new ArrayList<ColumnBoundWriter>();
dtid.getColumnReadWritePairs(
AbstractOverlay[].class, slideSet, roiReaders, roiWriters);
for(int u = 0; u < roiReaders.size(); u++) {
final int i = roiReaders.get(u).getColumnNum();
roiSetNames.add(roiReaders.get(u).getColumnName());
final String defp = slideSet.getColumnDefaultPath(i);
if(defp == null || defp.isEmpty())
slideSet.setColumnDefaultPath(i, "roi");
final String dlp = slideSet.getDefaultLinkPrefix(i);
if(dlp == null || dlp.isEmpty())
slideSet.setDefaultLinkPrefix(
i, slideSet.getColumnName(i).replaceAll("\\W", "-"));
final String dlex = slideSet.getDefaultLinkExtension(i);
if(dlex == null || dlex.isEmpty()) {
if(slideSet.getColumnMimeType(i).equals(MIME.SVG))
slideSet.setDefaultLinkExtension(i, "svg");
else
slideSet.setDefaultLinkExtension(i, "roiset");
}
AbstractOverlay[][] set = new AbstractOverlay[slideSet.getNumRows()][];
for(int j=0; j<slideSet.getNumRows(); j++) {
try{
set[j] = (AbstractOverlay[]) roiReaders.get(u).read(j);
} catch(LinkNotFoundException e) {
log.println("\nWarning: Could not find ROI set file \""
+ slideSet.getItemText(i, j) + "\"");
set[j] = null;
} catch (RoiLinkException e) {
log.println("\nError: Could not read ROI set file!");
log.println("# This could be because the file specified");
log.println("# is not really an ROI set file.");
handleError(e);
set[j] = null;
} catch (Exception ex) {
log.println("\nWarning: Unable to read ROI set.");
handleError(ex);
set[j] = null;
}
}
roiSets.add(set);
}
if(!roiSets.isEmpty())
curRoiSet = 0;
}
/**
* Update the state of the controls
* Do NOT call from the event dispatch thread.
*/
private void updateControls() {
try {
SwingUtilities.invokeAndWait( new Thread() {
public void run() {
roiSetList.setModel(
new DefaultComboBoxModel(getRoiSetNames()));
roiSetList.setSelectedIndex(curRoiSet);
imageList.setModel(
new DefaultComboBoxModel(getImageNames()));
imageList.setSelectedIndex(curImage);
}
});
} catch(Exception e) {
throw new IllegalArgumentException(e);
}
}
/**
* Get the names of available ROI sets to put in the list,
* prefixed by the row number to avoid name duplications
* which cause problems with {@code DefaultComboBoxModel}.
*/
private String[] getRoiSetNames() {
String[] names = new String[roiSetNames.size()];
names = roiSetNames.toArray(names);
for(int i=0; i<names.length; i++)
names[i] = String.valueOf(i+1) + ": " + names[i];
return names;
}
/**
* Get the short names of image files to put in the list,
* prefixed by the row number to avoid name duplications
* which cause problems with {@code DefaultComboBoxModel}.
*/
private String[] getImageNames() {
String[] names = new String[slideSet.getNumRows()];
for(int i=0; i<slideSet.getNumRows(); i++)
names[i] = String.valueOf(i+1) + ": " +
new File(slideSet.getItemText(images.getColumnNum(), i)
.toString()).getName();
return names;
}
/** Load and display the selected image */
private void loadImage(int imageIndex) {
Dataset ds = null;
synchronized(this) {
if( loadingImage
|| imageIndex < 0
|| imageIndex >= slideSet.getNumRows())
return;
loadingImage = true;
if(imageIndex == curImage && imageDisplay != null) {
for(DataView dv : imageDisplay) {
Data d = dv.getData();
if(d instanceof Dataset)
ds = (Dataset) d;
}
}
curImage = imageIndex;
}
updateControls();
if(ds == null) try {
ds = images.read(imageIndex);
} catch(LinkNotFoundException e) {
log.println("\nError: Unable to find image \""
+ slideSet.getItemText(
images.getColumnNum(), imageIndex) + "\"");
if(imageWindow != null)
imageWindow.close();
loadingImage = false;
return;
} catch(ImgLinkException e) {
log.println("\nError: Unable to load image");
log.println("# \"" +
slideSet.getItemText(
images.getColumnNum(), imageIndex) + "\"");
log.println("# It may not be a valid image file!");
ij.log().debug(e);
if(imageWindow != null)
imageWindow.close();
loadingImage = false;
return;
} catch(Throwable t) {
log.println("\nFatal error: Unexpected problem loading image!");
ij.log().debug(t);
kill();
return;
}
if(imageDisplay != null)
imageDisplay.close();
imageDisplay = new FastUpdateImageDisplay();
ij.getContext().inject(imageDisplay);
imageDisplay.display(ds);
createImageWindow();
imageDisplay.update();
drawOverlays();
changeDisplayMode();
registerBrightnessContrast(bcDialog);
imageWindow.setTitle("ROI Editor");
loadingImage = false;
}
/** Update the color mode */
private void changeDisplayMode() {
ColorMode m;
switch(displayMode.getSelectedIndex()) {
default:
case 0:
m = ColorMode.COMPOSITE;
break;
case 1:
m = ColorMode.GRAYSCALE;
break;
case 2:
m = ColorMode.COLOR;
}
for(DataView v : imageDisplay) {
if(!DatasetView.class.isInstance(v)) continue;
((DatasetView)v).setColorMode(m);
}
imageDisplay.update();
}
/** Open the Brightness/Contrast dialog */
private void changeLevels() {
CommandInfo bci = ij.command().getCommand(BrightnessContrastRoi.class);
//CommandInfo bci = ij.command().getCommand(net.imagej.plugins.commands.display.interactive.BrightnessContrast.class);
ij.command().run(bci, true, "roiEditor", this);
}
/** Create the image window */
private void createImageWindow() {
try {
ij.thread().invoke( new Thread() {
@Override
public void run() {
SwingImageDisplayViewer idv = new SwingSdiImageDisplayViewer();
idv.setContext(ij.getContext());
if(!idv.canView(imageDisplay) || !idv.isCompatible(ui))
throw new IllegalArgumentException("Viewer problem");
final DisplayWindow dw = ui.createDisplayWindow(imageDisplay);
if(!(dw instanceof SwingDisplayWindow))
throw new IllegalArgumentException("Must run in a windowed environment!");
imageWindow = (SwingDisplayWindow) dw;
idv.view(imageWindow, imageDisplay);
ij.ui().addDisplayViewer(idv);
imageWindow.addWindowListener(
new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
if(active) kill();
}});
imageWindow.showDisplay(true);
}
});
} catch (InterruptedException e) {
throw new IllegalArgumentException(e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException(e);
}
imageWindow.setLocationRelativeTo(null);
}
/** Draw appropriate overlays on the image */
private void drawOverlays() {
if(imageDisplay == null)
return;
if(curRoiSet < 0 || curRoiSet >= roiSets.size())
return;
if(!ImageDisplay.class.isInstance(imageDisplay))
throw new IllegalArgumentException("Bad display type.");
imageDisplay.clearOverlaysFast();
imageDisplay.update();
Overlay[] overlays = roiSets.get(curRoiSet)[curImage];
ImageDisplayService ids = ij.get(ImageDisplayService.class);
if(overlays != null)
for(int i = 0; i < overlays.length; i++)
imageDisplay.addFast(overlays[i], ids);
imageDisplay.rebuildNow();
imageDisplay.update();
}
/** Save overlays drawn on the current image to memory, not to disk. */
private void saveOverlays() {
if(locked) return; // Don't save changes if read-only
if(curRoiSet < 0 || curRoiSet >= roiSets.size())
return;
if(imageDisplay == null || imageDisplay.isEmpty())
return;
if(!ImageDisplay.class.isInstance(imageDisplay)) {
log.println("\nError: Unable to record overlays.");
log.println("# There is not a valid display open.");
}
List<Overlay> overlays = os.getOverlays((ImageDisplay) imageDisplay);
if(overlays.isEmpty()) {
roiSets.get(curRoiSet)[curImage] = null;
return;
}
ArrayList<AbstractOverlay> overCast =
new ArrayList<AbstractOverlay>(overlays.size()+2);
for(Overlay o : overlays)
if(AbstractOverlay.class.isInstance(o) &&
!overCast.contains((AbstractOverlay) o)) //<<< A bit hacky...
overCast.add((AbstractOverlay) o);
roiSets.get(curRoiSet)[curImage] =
overCast.toArray(new AbstractOverlay[overCast.size()]);
}
/** Save all overlays to disk */
private void writeOverlays() {
if(locked) { // Don't save changes if read-only. Here we'll let the user know.
JOptionPane.showMessageDialog(this, "This ROI set is locked. Unable to save changes.", "Slide Set", JOptionPane.ERROR_MESSAGE);
return;
}
saveOverlays();
if(roiSets.isEmpty())
return;
for(int i=0; i<roiSets.size(); i++) {
for(int row=0; row < slideSet.getNumRows(); row++) {
final ColumnBoundWriter w = roiWriters.get(i);
try {
String dest =
slideSet.getItemText(w.getColumnNum(), row);
if(dest == null || dest.isEmpty()) {
slideSet.makeDefaultLink(w.getColumnNum(), row);
dest =
slideSet.getItemText(w.getColumnNum(), row);
}
if(w.getWriter()
instanceof AbstractOverlaysToSVGFileWriter) {
final String imgpath
= slideSet.resolvePath(
slideSet.getItemText(images.getColumnNum(), row));
dest = slideSet.resolvePath(dest);
final AbstractOverlaysToSVGFileWriter aosvg
= (AbstractOverlaysToSVGFileWriter) w.getWriter();
aosvg.write(roiSets.get(i)[row], dest, -1, -1, imgpath);
}
else
w.write(roiSets.get(i)[row], row);
} catch(LinkNotFoundException e) {
log.println("\nError: \""
+ slideSet.getItemText(w.getColumnNum(), row)
+ "\"");
log.println("# is not a valid path, so the");
log.println("# ROI set cannot be saved!");
} catch(SlideSetException e) {
log.println("\nError: Unable to save ROI set!");
handleError(e);
}
}
}
}
/** With user confirmation, revert overlays to last saved version. */
private void revertOverlays() {
if( JOptionPane.showConfirmDialog(this,
"Revert all regions of interest to last saved version?",
"ROI Editor", JOptionPane.YES_NO_OPTION)
!= JOptionPane.YES_OPTION )
return;
try {
loadOverlays();
} catch(Exception e) {
handleError(e);
}
loadImage(curImage);
}
/** Create a new set of overlays (ROIs) */
private void createRoiSet() {
String name = JOptionPane.showInputDialog(this, "New ROI set name:");
if(name == null)
return;
name = name.trim();
name = name.equals("") ? "ROI" : name;
int colI;
try {
colI = slideSet.addColumn(name, FileLinkElement.class, MIME.SVG);
} catch(Exception e) {
handleError(e);
return;
}
roiSetNames.add(name);
roiSets.add(new AbstractOverlay[slideSet.getNumRows()][]);
roiReaders.add(
new ColumnBoundReader(slideSet, colI,
new SVGFileToAbstractOverlayReader()));
roiWriters.add(
new ColumnBoundWriter(slideSet, colI,
new AbstractOverlaysToSVGFileWriter()));
slideSet.setColumnDefaultPath(colI, "roi");
slideSet.setDefaultLinkPrefix(colI, name.replaceAll("\\W", "-"));
slideSet.setDefaultLinkExtension(colI, "svg");
try {
for(int i=0; i<slideSet.getNumRows(); i++)
slideSet.makeDefaultLink(colI, i);
} catch(SlideSetException e) {
handleError(e);
}
curRoiSet = roiSets.size() - 1;
updateControls();
loadImage(curImage);
}
/** Clean up and close the editor */
@Override
public void kill() {
ij.log().debug("Closing ROI editor");
if(active && (!locked) &&
JOptionPane.showConfirmDialog(this, "Save changes?",
"ROI Editor", JOptionPane.YES_NO_OPTION)
== JOptionPane.YES_OPTION) {
saveOverlays();
writeOverlays();
}
synchronized(this) {
active = false;
setVisible(false);
if(imageWindow != null && imageWindow.isVisible())
imageWindow.dispose();
dispose();
notifyAll();
}
}
/**
* Change display to the image with the given {@code index} in the list.
*/
private void setImage(int index) {
if(index >= imageList.getItemCount())
index = 0;
if(index < 0)
index = imageList.getItemCount() - 1;
if(index == curImage)
return;
saveOverlays();
loadImage(index);
}
/**
* Load the selected ROI set into the display.
*/
private void setROISet(int index) {
if(index >= roiSetList.getItemCount())
index = 0;
else if(index < 0)
index = roiSetList.getItemCount() - 1;
if(index == curRoiSet)
return;
saveOverlays();
curRoiSet = index;
updateControls();
loadImage(curImage);
}
/** Open the ImageJ overlay manager window */
private void openROIManager() {
CommandService cs = os.getContext().getService(CommandService.class);
try {
cs.run(OverlayManager.class, true, new Object[0]);
} catch(Exception e) {
log.println("\nUnable to open ROI Manager window.");
handleError(e);
}
}
private void exportSVG() {
saveOverlays();
JFileChooser fc = new JFileChooser(slideSet.getWorkingDirectory());
fc.setDialogType(JFileChooser.SAVE_DIALOG);
fc.setDialogTitle("Save ROIs as...");
fc.setFileFilter(new FileNameExtensionFilter("SVG file", "svg"));
fc.setSelectedFile(new File("ROI" + ".svg"));
final int r = fc.showDialog(this, "Save");
if(r != JFileChooser.APPROVE_OPTION)
return;
final File path = fc.getSelectedFile();
if(path == null)
return;
if(path.exists()
&& JOptionPane.showConfirmDialog(this,
"File exists. OK to overwrite?",
"Slide Set", JOptionPane.OK_CANCEL_OPTION)
!= JOptionPane.OK_OPTION )
return;
try {
int w = new Double(((ImageDisplay) imageDisplay)
.getPlaneExtents().width).intValue() + 1; //Not sure why, but needs to be corrected...
int h = new Double(((ImageDisplay) imageDisplay)
.getPlaneExtents().height).intValue() + 1;
String imgPath = slideSet.getItemText(
images.getColumnNum(), curImage);
if(!(new File(imgPath)).isAbsolute())
imgPath = slideSet.getWorkingDirectory() + File.separator + imgPath;
new AbstractOverlaysToSVGFileWriter()
.write(roiSets.get(curRoiSet)[curImage],
path.getAbsolutePath(),
w, h, imgPath);
} catch(Exception e) {
handleError(e);
}
}
private void handleError(Exception e) {
log.println(e.getLocalizedMessage());
ij.log().debug(e);
}
}
|
Improve change tracking in ROI Editor
ROI editor tracks changes, and will not prompt user to save changes if no
changes have been made.
|
src/main/java/edu/emory/cellbio/ijbat/ui/RoiEditor.java
|
Improve change tracking in ROI Editor
|
|
Java
|
bsd-3-clause
|
eb074a7c7b8f1babf63556c089cdf81b4527adb1
| 0
|
ibcn-cloudlet/firefly,ibcn-cloudlet/firefly,ibcn-cloudlet/firefly
|
package be.iminds.iot.things.repository.simple.provider;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.osgi.framework.BundleContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.event.EventHandler;
import aQute.lib.converter.TypeReference;
import aQute.lib.json.JSONCodec;
import be.iminds.iot.things.api.Thing;
import be.iminds.iot.things.repository.api.Repository;
import be.iminds.iot.things.repository.api.ThingDTO;
/**
*
*/
@Component(property={"event.topics=be/iminds/iot/thing/*"})
public class ThingsRepository implements Repository, EventHandler {
private final static JSONCodec json = new JSONCodec();
private Map<UUID, ThingDTO> things = Collections.synchronizedMap(new HashMap<>());
private Set<UUID> online = Collections.synchronizedSet(new HashSet<>());
private Writer logger;
@Activate
public void activate(BundleContext context){
// load thing dtos from file
try {
things = (Map<UUID, ThingDTO>) json.dec().from(new File("things.txt")).get(new TypeReference<Map<UUID,ThingDTO>>(){});
} catch(Exception e){
System.err.println("Failed to load thing descriptions from file");
}
// open file output to log events
try {
logger = new PrintWriter(new BufferedWriter(new FileWriter(new File("log.txt"), true)));
logger.write(">> System online "+new Date()+"\n");
logger.flush();
} catch (IOException e) {
}
}
@Deactivate
public void deactivate(){
// close event logging file
try {
logger.close();
} catch (IOException ioe) {
// ignore
}
// write thing dtos to file
try {
json.enc().indent("\t").to(new File("things.txt")).put(things).close();
} catch(Exception e){
System.err.println("Failed to write thing descriptions to file");
}
}
@Override
public ThingDTO getThing(UUID id) {
return things.get(id);
}
@Override
public Collection<ThingDTO> getThings() {
// only return online things
ArrayList<ThingDTO> result = new ArrayList<>();
synchronized(online){
for(UUID id : online){
ThingDTO t = things.get(id);
result.add(t);
}
}
return Collections.unmodifiableCollection(result);
}
@Override
public void putThing(ThingDTO thing) {
things.put(thing.id, thing);
}
@Override
public void handleEvent(Event event) {
UUID id = (UUID) event.getProperty(Thing.ID);
ThingDTO thing;
synchronized(things){
thing = things.get(id);
if(thing==null){
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
thing = new ThingDTO();
thing.id = id;
thing.gateway = (UUID) event.getProperty(Thing.GATEWAY);
thing.device = (String) event.getProperty(Thing.DEVICE);
thing.service = (String) event.getProperty(Thing.SERVICE);
thing.type = (String) event.getProperty(Thing.TYPE);
thing.name = thing.service;
things.put(id, thing);
}
} else {
// update gateway - could be changed
thing.gateway = (UUID) event.getProperty(Thing.GATEWAY);
}
}
if(thing!=null){
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
online.add(thing.id);
} else if(event.getTopic().startsWith("be/iminds/iot/thing/offline/")){
online.remove(thing);
} else if(event.getTopic().startsWith("be/iminds/iot/thing/change/")){
online.add(thing.id);
String name = (String) event.getProperty(Thing.STATE_VAR);
Object val = event.getProperty(Thing.STATE_VAL);
if(thing.state == null){
thing.state = new HashMap<>();
}
thing.state.put(name, val);
}
}
logEvent(event);
}
@Reference(cardinality=ReferenceCardinality.MULTIPLE,
policy=ReferencePolicy.DYNAMIC)
public void addThing(Thing t, Map<String, Object> properties){
// mark online
UUID id = (UUID) properties.get(Thing.ID);
// also init here in case of missed online event
ThingDTO thing;
synchronized(things){
thing = things.get(id);
if(thing==null){
thing = new ThingDTO();
thing.gateway = (UUID) properties.get(Thing.GATEWAY);
thing.device = (String) properties.get(Thing.DEVICE);
thing.service = (String) properties.get(Thing.SERVICE);
thing.type = (String) properties.get(Thing.TYPE);
thing.name = thing.service;
things.put(id, thing);
} else {
// update gateway - could be changed
thing.gateway = (UUID) properties.get(Thing.GATEWAY);
}
}
online.add(id);
// This does not update UI, also send service online thing event?
// FIXME ? This could lead to many duplicates though
ea.postEvent(new Event("be/iminds/iot/thing/online/"+id, properties));
}
public void removeThing(Thing t, Map<String, Object> properties){
// mark offline
UUID id = (UUID) properties.get(Thing.ID);
online.remove(id);
// This does not update UI - as no event will be sent when the gateway
// is just stopped, we send an event of this service on our own...
ea.postEvent(new Event("be/iminds/iot/thing/offline/"+id, properties));
}
private EventAdmin ea;
@Reference
public void setEventAdmin(EventAdmin ea){
this.ea = ea;
}
private void logEvent(Event event){
String type = "change";
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
type = "online";
} else if(event.getTopic().startsWith("be/iminds/iot/thing/offline/")){
type = "offline";
}
StringBuilder builder = new StringBuilder();
builder.append(event.getProperty("timestamp"));
builder.append("\t");
builder.append(event.getProperty(Thing.ID));
builder.append("\t");
builder.append(event.getProperty(Thing.GATEWAY));
builder.append("\t");
builder.append(type);
if(type.equals("online")){
builder.append("\t");
builder.append(event.getProperty(Thing.DEVICE));
builder.append("\t");
builder.append(event.getProperty(Thing.SERVICE));
builder.append("\t");
builder.append(event.getProperty(Thing.TYPE));
} else if(type.equals("change")){
builder.append("\t");
builder.append(event.getProperty(Thing.STATE_VAR));
builder.append("\t");
builder.append(event.getProperty(Thing.STATE_VAL));
}
try {
builder.append("\n");
logger.write(builder.toString());
logger.flush();
} catch(IOException e){
// ignore
}
}
}
|
be.iminds.iot.things.repository.simple.provider/src/be/iminds/iot/things/repository/simple/provider/ThingsRepository.java
|
package be.iminds.iot.things.repository.simple.provider;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.osgi.framework.BundleContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.event.EventHandler;
import aQute.lib.converter.TypeReference;
import aQute.lib.json.JSONCodec;
import be.iminds.iot.things.api.Thing;
import be.iminds.iot.things.repository.api.Repository;
import be.iminds.iot.things.repository.api.ThingDTO;
/**
*
*/
@Component(property={"event.topics=be/iminds/iot/thing/*"})
public class ThingsRepository implements Repository, EventHandler {
private final static JSONCodec json = new JSONCodec();
private Map<UUID, ThingDTO> things = Collections.synchronizedMap(new HashMap<>());
private Set<UUID> online = Collections.synchronizedSet(new HashSet<>());
private Writer logger;
@Activate
public void activate(BundleContext context){
// load thing dtos from file
try {
things = (Map<UUID, ThingDTO>) json.dec().from(new File("things.txt")).get(new TypeReference<Map<UUID,ThingDTO>>(){});
} catch(Exception e){
System.err.println("Failed to load thing descriptions from file");
}
// open file output to log events
try {
logger = new PrintWriter(new BufferedWriter(new FileWriter(new File("log.txt"), true)));
logger.write(">> System online "+new Date()+"\n");
logger.flush();
} catch (IOException e) {
}
}
@Deactivate
public void deactivate(){
// close event logging file
try {
logger.close();
} catch (IOException ioe) {
// ignore
}
// write thing dtos to file
try {
json.enc().indent("\t").to(new File("things.txt")).put(things).close();
} catch(Exception e){
System.err.println("Failed to write thing descriptions to file");
}
}
@Override
public ThingDTO getThing(UUID id) {
return things.get(id);
}
@Override
public Collection<ThingDTO> getThings() {
// only return online things
ArrayList<ThingDTO> result = new ArrayList<>();
synchronized(online){
for(UUID id : online){
result.add(things.get(id));
}
}
return Collections.unmodifiableCollection(result);
}
@Override
public void putThing(ThingDTO thing) {
things.put(thing.id, thing);
}
@Override
public void handleEvent(Event event) {
UUID id = (UUID) event.getProperty(Thing.ID);
ThingDTO thing;
synchronized(things){
thing = things.get(id);
if(thing==null){
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
thing = new ThingDTO();
thing.id = id;
thing.gateway = (UUID) event.getProperty(Thing.GATEWAY);
thing.device = (String) event.getProperty(Thing.DEVICE);
thing.service = (String) event.getProperty(Thing.SERVICE);
thing.type = (String) event.getProperty(Thing.TYPE);
thing.name = thing.service;
things.put(id, thing);
}
} else {
// update gateway - could be changed
thing.gateway = (UUID) event.getProperty(Thing.GATEWAY);
}
}
if(thing!=null){
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
online.add(thing.id);
} else if(event.getTopic().startsWith("be/iminds/iot/thing/offline/")){
online.remove(thing);
} else if(event.getTopic().startsWith("be/iminds/iot/thing/change/")){
online.add(thing.id);
String name = (String) event.getProperty(Thing.STATE_VAR);
Object val = event.getProperty(Thing.STATE_VAL);
if(thing.state == null){
thing.state = new HashMap<>();
}
thing.state.put(name, val);
}
}
logEvent(event);
}
@Reference(cardinality=ReferenceCardinality.MULTIPLE,
policy=ReferencePolicy.DYNAMIC)
public void addThing(Thing t, Map<String, Object> properties){
// mark online
UUID id = (UUID) properties.get(Thing.ID);
// also init here in case of missed online event
ThingDTO thing;
synchronized(things){
thing = things.get(id);
if(thing==null){
thing.gateway = (UUID) properties.get(Thing.GATEWAY);
thing.device = (String) properties.get(Thing.DEVICE);
thing.service = (String) properties.get(Thing.SERVICE);
thing.type = (String) properties.get(Thing.TYPE);
thing.name = thing.service;
things.put(id, thing);
} else {
// update gateway - could be changed
thing.gateway = (UUID) properties.get(Thing.GATEWAY);
}
}
online.add(id);
// This does not update UI, also send service online thing event?
// FIXME ? This could lead to many duplicates though
ea.postEvent(new Event("be/iminds/iot/thing/online/"+id, properties));
}
public void removeThing(Thing t, Map<String, Object> properties){
// mark offline
UUID id = (UUID) properties.get(Thing.ID);
online.remove(id);
// This does not update UI - as no event will be sent when the gateway
// is just stopped, we send an event of this service on our own...
ea.postEvent(new Event("be/iminds/iot/thing/offline/"+id, properties));
}
private EventAdmin ea;
@Reference
public void setEventAdmin(EventAdmin ea){
this.ea = ea;
}
private void logEvent(Event event){
String type = "change";
if(event.getTopic().startsWith("be/iminds/iot/thing/online/")){
type = "online";
} else if(event.getTopic().startsWith("be/iminds/iot/thing/offline/")){
type = "offline";
}
StringBuilder builder = new StringBuilder();
builder.append(event.getProperty("timestamp"));
builder.append("\t");
builder.append(event.getProperty(Thing.ID));
builder.append("\t");
builder.append(event.getProperty(Thing.GATEWAY));
builder.append("\t");
builder.append(type);
if(type.equals("online")){
builder.append("\t");
builder.append(event.getProperty(Thing.DEVICE));
builder.append("\t");
builder.append(event.getProperty(Thing.SERVICE));
builder.append("\t");
builder.append(event.getProperty(Thing.TYPE));
} else if(type.equals("change")){
builder.append("\t");
builder.append(event.getProperty(Thing.STATE_VAR));
builder.append("\t");
builder.append(event.getProperty(Thing.STATE_VAL));
}
try {
builder.append("\n");
logger.write(builder.toString());
logger.flush();
} catch(IOException e){
// ignore
}
}
}
|
create new ThingDTO when lookup returns null
|
be.iminds.iot.things.repository.simple.provider/src/be/iminds/iot/things/repository/simple/provider/ThingsRepository.java
|
create new ThingDTO when lookup returns null
|
|
Java
|
mit
|
070243d4041daa2291ce2b17186a76c042508f62
| 0
|
MarfGamer/JRakNet,JRakNet/JRakNet
|
/*
* _ _____ _ _ _ _
* | | | __ \ | | | \ | | | |
* | | | |__) | __ _ | | __ | \| | ___ | |_
* _ | | | _ / / _` | | |/ / | . ` | / _ \ | __|
* | |__| | | | \ \ | (_| | | < | |\ | | __/ | |_
* \____/ |_| \_\ \__,_| |_|\_\ |_| \_| \___| \__|
*
* the MIT License (MIT)
*
* Copyright (c) 2016-2018 Trent Summerlin
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* the above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.whirvis.jraknet.server;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.whirvis.jraknet.RakNetPacket;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.socket.DatagramPacket;
/**
* Used by the <code>RakNetServer</code> with the sole purpose of sending
* received packets to the server so they can be handled.
*
* @author Trent Summerlin
*/
public class RakNetServerHandler extends ChannelInboundHandlerAdapter {
private static final Logger log = LoggerFactory.getLogger(RakNetServerHandler.class);
// Handler data
private final String loggerName;
private final RakNetServer server;
private final ConcurrentHashMap<InetAddress, BlockedAddress> blocked;
private InetSocketAddress causeAddress;
/**
* Constructs a <code>RakNetClientServer</code> with the specified
* <code>RakNetClient</code>.
*
* @param server
* the <code>RakNetServer</code> to send received packets to.
*/
public RakNetServerHandler(RakNetServer server) {
this.loggerName = "server handler #" + server.getGloballyUniqueId();
this.server = server;
this.blocked = new ConcurrentHashMap<InetAddress, BlockedAddress>();
}
/**
* Blocks the specified address with the specified reason for the specified
* amount time.
*
* @param address
* the address to block.
* @param reason
* the reason the address was blocked.
* @param time
* how long the address will be blocked in milliseconds.
*/
public void blockAddress(InetAddress address, String reason, long time) {
blocked.put(address, new BlockedAddress(System.currentTimeMillis(), time));
for (RakNetServerListener listener : server.getListeners()) {
listener.onAddressBlocked(address, reason, time);
}
log.info(
loggerName + "Blocked address " + address + " due to \"" + reason + "\" for " + time + " milliseconds");
}
/**
* Unblocks the specified address.
*
* @param address
* the address to unblock.
*/
public void unblockAddress(InetAddress address) {
blocked.remove(address);
for (RakNetServerListener listener : server.getListeners()) {
listener.onAddressUnblocked(address);
}
log.info(loggerName + "Unblocked address " + address);
}
/**
* @param address
* the address to check.
* @return whether or not the specified address is blocked.
*/
public boolean addressBlocked(InetAddress address) {
return blocked.containsKey(address);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof DatagramPacket) {
// Get packet and sender data
DatagramPacket datagram = (DatagramPacket) msg;
InetSocketAddress sender = datagram.sender();
RakNetPacket packet = new RakNetPacket(datagram);
// If an exception happens it's because of this address
this.causeAddress = sender;
// Is the sender blocked?
if (this.addressBlocked(sender.getAddress())) {
BlockedAddress status = blocked.get(sender.getAddress());
if (status.getTime() <= BlockedAddress.PERMANENT_BLOCK) {
datagram.content().release(); // No longer needed
return; // Permanently blocked
}
if (System.currentTimeMillis() - status.getStartTime() < status.getTime()) {
datagram.content().release(); // No longer needed
return; // Time hasn't expired
}
this.unblockAddress(sender.getAddress());
}
// Handle the packet and release the buffer
server.handleMessage(packet, sender);
datagram.content().readerIndex(0); // Reset position
log.debug(loggerName + "Sent packet to server and reset Datagram buffer read position");
for (RakNetServerListener listener : server.getListeners()) {
listener.handleNettyMessage(datagram.content(), sender);
}
datagram.content().release(); // No longer needed
log.debug(loggerName + "Sent Datagram buffer to server and released it");
// No exceptions occurred, release the suspect
this.causeAddress = null;
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
server.handleHandlerException(this.causeAddress, cause);
}
}
|
src/main/java/com/whirvis/jraknet/server/RakNetServerHandler.java
|
/*
* _ _____ _ _ _ _
* | | | __ \ | | | \ | | | |
* | | | |__) | __ _ | | __ | \| | ___ | |_
* _ | | | _ / / _` | | |/ / | . ` | / _ \ | __|
* | |__| | | | \ \ | (_| | | < | |\ | | __/ | |_
* \____/ |_| \_\ \__,_| |_|\_\ |_| \_| \___| \__|
*
* the MIT License (MIT)
*
* Copyright (c) 2016-2018 Trent Summerlin
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* the above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.whirvis.jraknet.server;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.whirvis.jraknet.RakNetPacket;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.socket.DatagramPacket;
/**
* Used by the <code>RakNetServer</code> with the sole purpose of sending
* received packets to the server so they can be handled.
*
* @author Trent Summerlin
*/
public class RakNetServerHandler extends ChannelInboundHandlerAdapter {
private static final Logger log = LoggerFactory.getLogger(RakNetServerHandler.class);
// Handler data
private final String loggerName;
private final RakNetServer server;
private final ConcurrentHashMap<InetAddress, BlockedAddress> blocked;
private InetSocketAddress causeAddress;
/**
* Constructs a <code>RakNetClientServer</code> with the specified
* <code>RakNetClient</code>.
*
* @param server
* the <code>RakNetServer</code> to send received packets to.
*/
public RakNetServerHandler(RakNetServer server) {
this.loggerName = "server handler #" + server.getGloballyUniqueId();
this.server = server;
this.blocked = new ConcurrentHashMap<InetAddress, BlockedAddress>();
}
/**
* Blocks the specified address with the specified reason for the specified
* amount time.
*
* @param address
* the address to block.
* @param reason
* the reason the address was blocked.
* @param time
* how long the address will be blocked in milliseconds.
*/
public void blockAddress(InetAddress address, String reason, long time) {
blocked.put(address, new BlockedAddress(System.currentTimeMillis(), time));
for (RakNetServerListener listener : server.getListeners()) {
listener.onAddressBlocked(address, reason, time);
}
log.info(
loggerName + "Blocked address " + address + " due to \"" + reason + "\" for " + time + " milliseconds");
}
/**
* Unblocks the specified address.
*
* @param address
* the address to unblock.
*/
public void unblockAddress(InetAddress address) {
blocked.remove(address);
for (RakNetServerListener listener : server.getListeners()) {
listener.onAddressUnblocked(address);
}
log.info(loggerName + "Unblocked address " + address);
}
/**
* @param address
* the address to check.
* @return whether or not the specified address is blocked.
*/
public boolean addressBlocked(InetAddress address) {
return blocked.containsKey(address);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof DatagramPacket) {
// Get packet and sender data
DatagramPacket datagram = (DatagramPacket) msg;
InetSocketAddress sender = datagram.sender();
RakNetPacket packet = new RakNetPacket(datagram);
// If an exception happens it's because of this address
this.causeAddress = sender;
// Is the sender blocked?
if (this.addressBlocked(sender.getAddress())) {
BlockedAddress status = blocked.get(sender.getAddress());
if (status.getTime() <= BlockedAddress.PERMANENT_BLOCK) {
return; // Permanently blocked
}
if (System.currentTimeMillis() - status.getStartTime() < status.getTime()) {
return; // Time hasn't expired
}
this.unblockAddress(sender.getAddress());
}
// Handle the packet and release the buffer
server.handleMessage(packet, sender);
datagram.content().readerIndex(0); // Reset position
log.debug(loggerName + "Sent packet to server and reset Datagram buffer read position");
for (RakNetServerListener listener : server.getListeners()) {
listener.handleNettyMessage(datagram.content(), sender);
}
datagram.content().release(); // No longer needed
log.debug(loggerName + "Sent Datagram buffer to server and released it");
// No exceptions occurred, release the suspect
this.causeAddress = null;
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
server.handleHandlerException(this.causeAddress, cause);
}
}
|
Fixed #79
|
src/main/java/com/whirvis/jraknet/server/RakNetServerHandler.java
|
Fixed #79
|
|
Java
|
mit
|
9bcdc6cdd3ed82cbbe6ef91098d5358088aeaa16
| 0
|
classgraph/classgraph,lukehutch/fast-classpath-scanner,lukehutch/fast-classpath-scanner
|
/*
* This file is part of ClassGraph.
*
* Author: R. Kempees
*
* With contributions from @cpierceworld (#414)
*
* Hosted at: https://github.com/classgraph/classgraph
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2017 R. Kempees (contributed to the ClassGraph project)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package nonapi.io.github.classgraph.classloaderhandler;
import java.io.File;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import nonapi.io.github.classgraph.classpath.ClassLoaderOrder;
import nonapi.io.github.classgraph.classpath.ClasspathOrder;
import nonapi.io.github.classgraph.scanspec.ScanSpec;
import nonapi.io.github.classgraph.utils.LogNode;
import nonapi.io.github.classgraph.utils.ReflectionUtils;
/**
* WebsphereLibertyClassLoaderHandler.
*
* <p>
* Used to support WAS Liberty Profile classloading in io.github.classgraph
*
* @author R. Kempees
*/
class WebsphereLibertyClassLoaderHandler implements ClassLoaderHandler {
/** {@code "com.ibm.ws.classloading.internal."} */
private static final String PKG_PREFIX = "com.ibm.ws.classloading.internal.";
/** {@code "com.ibm.ws.classloading.internal.AppClassLoader"} */
private static final String IBM_APP_CLASS_LOADER = PKG_PREFIX + "AppClassLoader";
/** {@code "com.ibm.ws.classloading.internal.ThreadContextClassLoader"} */
private static final String IBM_THREAD_CONTEXT_CLASS_LOADER = PKG_PREFIX + "ThreadContextClassLoader";
/** Class cannot be constructed. */
private WebsphereLibertyClassLoaderHandler() {
}
/**
* Check whether this {@link ClassLoaderHandler} can handle a given {@link ClassLoader}.
*
* @param classLoaderClass
* the {@link ClassLoader} class or one of its superclasses.
* @param log
* the log
* @return true if this {@link ClassLoaderHandler} can handle the {@link ClassLoader}.
*/
public static boolean canHandle(final Class<?> classLoaderClass, final LogNode log) {
return IBM_APP_CLASS_LOADER.equals(classLoaderClass.getName())
|| IBM_THREAD_CONTEXT_CLASS_LOADER.equals(classLoaderClass.getName());
}
/**
* Find the {@link ClassLoader} delegation order for a {@link ClassLoader}.
*
* @param classLoader
* the {@link ClassLoader} to find the order for.
* @param classLoaderOrder
* a {@link ClassLoaderOrder} object to update.
* @param log
* the log
*/
public static void findClassLoaderOrder(final ClassLoader classLoader, final ClassLoaderOrder classLoaderOrder,
final LogNode log) {
classLoaderOrder.delegateTo(classLoader.getParent(), /* isParent = */ true, log);
classLoaderOrder.add(classLoader, log);
}
/**
* Get the paths from a containerClassLoader object.
*
* <p>
* The passed in object should be an instance of "com.ibm.ws.classloading.internal.ContainerClassLoader".
* <p>
* Will attempt to use "getContainerURLs" methods to recap the classpath.
*
* @param containerClassLoader
* the containerClassLoader object
* @return Collection of path objects as a {@link URL} or {@link String}.
*/
private static Collection<Object> getPaths(final Object containerClassLoader) {
if (containerClassLoader == null) {
return Collections.<Object> emptyList();
}
// Expecting this to be an instance of
// "com.ibm.ws.classloading.internal.ContainerClassLoader$UniversalContainer".
// Call "getContainerURLs" to get its container's classpath.
Collection<Object> urls = callGetUrls(containerClassLoader, "getContainerURLs");
if (urls != null && !urls.isEmpty()) {
return urls;
}
// "getContainerURLs" didn't work, try getting the container object...
final Object container = ReflectionUtils.getFieldVal(containerClassLoader, "container", false);
if (container == null) {
return Collections.<Object> emptyList();
}
// Should be an instance of "com.ibm.wsspi.adaptable.module.Container".
// Call "getURLs" to get its classpath.
urls = callGetUrls(container, "getURLs");
if (urls != null && !urls.isEmpty()) {
return urls;
}
// "getURLs" did not work, reverting to previous logic of introspection of the "delegate".
final Object delegate = ReflectionUtils.getFieldVal(container, "delegate", false);
if (delegate == null) {
return Collections.<Object> emptyList();
}
final String path = (String) ReflectionUtils.getFieldVal(delegate, "path", false);
if (path != null && path.length() > 0) {
return Collections.singletonList((Object) path);
}
final Object base = ReflectionUtils.getFieldVal(delegate, "base", false);
if (base == null) {
// giving up.
return Collections.<Object> emptyList();
}
final Object archiveFile = ReflectionUtils.getFieldVal(base, "archiveFile", false);
if (archiveFile != null) {
final File file = (File) archiveFile;
return Collections.singletonList((Object) file.getAbsolutePath());
}
return Collections.<Object> emptyList();
}
/**
* Utility to call a "getURLs" method, flattening "collections of collections" and ignoring
* "UnsupportedOperationException".
*
* All of the "getURLs" methods eventually call "com.ibm.wsspi.adaptable.module.Container#getURLs()".
*
* https://www.ibm.com/support/knowledgecenter/SSEQTP_liberty/com.ibm.websphere.javadoc.liberty.doc
* /com.ibm.websphere.appserver.spi.artifact_1.2-javadoc
* /com/ibm/wsspi/adaptable/module/Container.html?view=embed#getURLs() "A collection of URLs that represent all
* of the locations on disk that contribute to this container"
*/
@SuppressWarnings("unchecked")
private static Collection<Object> callGetUrls(final Object container, final String methodName) {
if (container != null) {
try {
final Collection<Object> results = (Collection<Object>) ReflectionUtils.invokeMethod(container,
methodName, false);
if (results != null && !results.isEmpty()) {
final Collection<Object> allUrls = new HashSet<>();
for (final Object result : results) {
if (result instanceof Collection) {
// SmartClassPath returns collection of collection of URLs.
for (final Object url : ((Collection<Object>) result)) {
if (url != null) {
allUrls.add(url);
}
}
} else if (result != null) {
allUrls.add(result);
}
}
return allUrls;
}
} catch (final UnsupportedOperationException e) {
/* ignore */
}
}
return Collections.<Object> emptyList();
}
/**
* Find the classpath entries for the associated {@link ClassLoader}.
*
* @param classLoader
* the {@link ClassLoader} to find the classpath entries order for.
* @param classpathOrder
* a {@link ClasspathOrder} object to update.
* @param scanSpec
* the {@link ScanSpec}.
* @param log
* the log.
*/
public static void findClasspathOrder(final ClassLoader classLoader, final ClasspathOrder classpathOrder,
final ScanSpec scanSpec, final LogNode log) {
Object smartClassPath;
final Object appLoader = ReflectionUtils.getFieldVal(classLoader, "appLoader", false);
if (appLoader != null) {
smartClassPath = ReflectionUtils.getFieldVal(appLoader, "smartClassPath", false);
} else {
smartClassPath = ReflectionUtils.getFieldVal(classLoader, "smartClassPath", false);
}
if (smartClassPath != null) {
// "com.ibm.ws.classloading.internal.ContainerClassLoader$SmartClassPath"
// interface specifies a "getClassPath" to return all urls that makeup its path.
final Collection<Object> paths = callGetUrls(smartClassPath, "getClassPath");
if (!paths.isEmpty()) {
for (final Object path : paths) {
classpathOrder.addClasspathEntry(path, classLoader, scanSpec, log);
}
} else {
// "getClassPath" didn't work... reverting to looping over "classPath" elements.
@SuppressWarnings("unchecked")
final List<Object> classPathElements = (List<Object>) ReflectionUtils.getFieldVal(smartClassPath,
"classPath", false);
if (classPathElements != null && !classPathElements.isEmpty()) {
for (final Object classPathElement : classPathElements) {
final Collection<Object> subPaths = getPaths(classPathElement);
for (final Object path : subPaths) {
classpathOrder.addClasspathEntry(path, classLoader, scanSpec, log);
}
}
}
}
}
}
}
|
src/main/java/nonapi/io/github/classgraph/classloaderhandler/WebsphereLibertyClassLoaderHandler.java
|
/*
* This file is part of ClassGraph.
*
* Author: R. Kempees
*
* With contributions from @cpierceworld (#414)
*
* Hosted at: https://github.com/classgraph/classgraph
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2017 R. Kempees (contributed to the ClassGraph project)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package nonapi.io.github.classgraph.classloaderhandler;
import java.io.File;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import nonapi.io.github.classgraph.classpath.ClassLoaderOrder;
import nonapi.io.github.classgraph.classpath.ClasspathOrder;
import nonapi.io.github.classgraph.scanspec.ScanSpec;
import nonapi.io.github.classgraph.utils.LogNode;
import nonapi.io.github.classgraph.utils.ReflectionUtils;
/**
* WebsphereLibertyClassLoaderHandler.
*
* <p>
* Used to support WAS Liberty Profile classloading in io.github.classgraph
*
* @author R. Kempees
*/
class WebsphereLibertyClassLoaderHandler implements ClassLoaderHandler {
/** {@code "com.ibm.ws.classloading.internal."} */
private static final String PKG_PREFIX = "com.ibm.ws.classloading.internal.";
/** {@code "com.ibm.ws.classloading.internal.AppClassLoader"} */
private static final String IBM_APP_CLASS_LOADER = PKG_PREFIX + "AppClassLoader";
/** {@code "com.ibm.ws.classloading.internal.ThreadContextClassLoader"} */
private static final String IBM_THREAD_CONTEXT_CLASS_LOADER = PKG_PREFIX + "ThreadContextClassLoader";
/** Class cannot be constructed. */
private WebsphereLibertyClassLoaderHandler() {
}
/**
* Check whether this {@link ClassLoaderHandler} can handle a given {@link ClassLoader}.
*
* @param classLoaderClass
* the {@link ClassLoader} class or one of its superclasses.
* @param log
* the log
* @return true if this {@link ClassLoaderHandler} can handle the {@link ClassLoader}.
*/
public static boolean canHandle(final Class<?> classLoaderClass, final LogNode log) {
return IBM_APP_CLASS_LOADER.equals(classLoaderClass.getName())
|| IBM_THREAD_CONTEXT_CLASS_LOADER.equals(classLoaderClass.getName());
}
/**
* Find the {@link ClassLoader} delegation order for a {@link ClassLoader}.
*
* @param classLoader
* the {@link ClassLoader} to find the order for.
* @param classLoaderOrder
* a {@link ClassLoaderOrder} object to update.
* @param log
* the log
*/
public static void findClassLoaderOrder(final ClassLoader classLoader, final ClassLoaderOrder classLoaderOrder,
final LogNode log) {
classLoaderOrder.delegateTo(classLoader.getParent(), /* isParent = */ true, log);
classLoaderOrder.add(classLoader, log);
}
/**
* Get the paths from a containerClassLoader object.
*
* <p>
* The passed in object should be an instance of "com.ibm.ws.classloading.internal.ContainerClassLoader".
* <p>
* Will attempt to use "getContainerURLs" methods to recap the classpath.
*
* @param containerClassLoader
* the containerClassLoader object
* @return Collection of path objects as a {@link URL} or {@link String}.
*/
private static Collection<Object> getPaths(final Object containerClassLoader) {
if (containerClassLoader == null) {
return Collections.<Object> emptyList();
}
// Expecting this to be an instance of
// "com.ibm.ws.classloading.internal.ContainerClassLoader$UniversalContainer".
// Call "getContainerURLs" to get its container's classpath.
Collection<Object> urls = callGetUrls(containerClassLoader, "getContainerURLs");
if (urls != null && !urls.isEmpty()) {
return urls;
}
// "getContainerURLs" didn't work, try getting the container object...
final Object container = ReflectionUtils.getFieldVal(containerClassLoader, "container", false);
if (container == null) {
return Collections.<Object> emptyList();
}
// Should be an instance of "com.ibm.wsspi.adaptable.module.Container".
// Call "getURLs" to get its classpath.
urls = callGetUrls(container, "getURLs");
if (urls != null && !urls.isEmpty()) {
return urls;
}
// "getURLs" did not work, reverting to previous logic of introspection of the "delegate".
final Object delegate = ReflectionUtils.getFieldVal(container, "delegate", false);
if (delegate == null) {
return Collections.<Object> emptyList();
}
final String path = (String) ReflectionUtils.getFieldVal(delegate, "path", false);
if (path != null && path.length() > 0) {
return Arrays.asList((Object) path);
}
final Object base = ReflectionUtils.getFieldVal(delegate, "base", false);
if (base == null) {
// giving up.
return Collections.<Object> emptyList();
}
final Object archiveFile = ReflectionUtils.getFieldVal(base, "archiveFile", false);
if (archiveFile != null) {
final File file = (File) archiveFile;
return Arrays.asList((Object) file.getAbsolutePath());
}
return Collections.<Object> emptyList();
}
/**
* Utility to call a "getURLs" method, flattening "collections of collections" and ignoring
* "UnsupportedOperationException".
*
* All of the "getURLs" methods eventually call "com.ibm.wsspi.adaptable.module.Container#getURLs()".
*
* https://www.ibm.com/support/knowledgecenter/SSEQTP_liberty/com.ibm.websphere.javadoc.liberty.doc
* /com.ibm.websphere.appserver.spi.artifact_1.2-javadoc
* /com/ibm/wsspi/adaptable/module/Container.html?view=embed#getURLs() "A collection of URLs that represent all
* of the locations on disk that contribute to this container"
*/
@SuppressWarnings("unchecked")
private static Collection<Object> callGetUrls(final Object container, final String methodName) {
if (container != null) {
try {
final Collection<Object> results = (Collection<Object>) ReflectionUtils.invokeMethod(container,
methodName, false);
if (results != null && !results.isEmpty()) {
final Collection<Object> allUrls = new HashSet<>();
for (final Object result : results) {
if (result instanceof Collection) {
// SmartClassPath returns collection of collection of URLs.
for (final Object url : ((Collection<Object>) result)) {
if (url != null) {
allUrls.add(url);
}
}
} else if (result != null) {
allUrls.add(result);
}
}
return allUrls;
}
} catch (final UnsupportedOperationException e) {
/* ignore */
}
}
return Collections.<Object> emptyList();
}
/**
* Find the classpath entries for the associated {@link ClassLoader}.
*
* @param classLoader
* the {@link ClassLoader} to find the classpath entries order for.
* @param classpathOrder
* a {@link ClasspathOrder} object to update.
* @param scanSpec
* the {@link ScanSpec}.
* @param log
* the log.
*/
public static void findClasspathOrder(final ClassLoader classLoader, final ClasspathOrder classpathOrder,
final ScanSpec scanSpec, final LogNode log) {
Object smartClassPath;
final Object appLoader = ReflectionUtils.getFieldVal(classLoader, "appLoader", false);
if (appLoader != null) {
smartClassPath = ReflectionUtils.getFieldVal(appLoader, "smartClassPath", false);
} else {
smartClassPath = ReflectionUtils.getFieldVal(classLoader, "smartClassPath", false);
}
if (smartClassPath != null) {
// "com.ibm.ws.classloading.internal.ContainerClassLoader$SmartClassPath"
// interface specifies a "getClassPath" to return all urls that makeup its path.
final Collection<Object> paths = callGetUrls(smartClassPath, "getClassPath");
if (!paths.isEmpty()) {
for (final Object path : paths) {
classpathOrder.addClasspathEntry(path, classLoader, scanSpec, log);
}
} else {
// "getClassPath" didn't work... reverting to looping over "classPath" elements.
@SuppressWarnings("unchecked")
final List<Object> classPathElements = (List<Object>) ReflectionUtils.getFieldVal(smartClassPath,
"classPath", false);
if (classPathElements != null && !classPathElements.isEmpty()) {
for (final Object classPathElement : classPathElements) {
final Collection<Object> subPaths = getPaths(classPathElement);
for (final Object path : subPaths) {
classpathOrder.addClasspathEntry(path, classLoader, scanSpec, log);
}
}
}
}
}
}
}
|
Call to 'Arrays.asList()' with too few arguments
|
src/main/java/nonapi/io/github/classgraph/classloaderhandler/WebsphereLibertyClassLoaderHandler.java
|
Call to 'Arrays.asList()' with too few arguments
|
|
Java
|
mit
|
325043b51e442b7191d76aceaa1ba83152f32076
| 0
|
seanmonstar/ServiceDroid
|
package com.monstarlab.servicedroid.activity;
import com.monstarlab.servicedroid.R;
import com.monstarlab.servicedroid.service.ReminderService;
import com.monstarlab.servicedroid.util.Changelog;
import android.app.Activity;
import android.app.TabActivity;
import android.content.Intent;
import android.os.Bundle;
import android.widget.TabHost;
public class ServiceDroidActivity extends TabActivity implements TabHost.OnTabChangeListener {
private TabHost mTabHost;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTabHost = getTabHost();
mTabHost.setOnTabChangedListener(this);
setupTimeActivity();
setupReturnVisitsActivity();
setupStatisticsActivity();
showWhatsNew();
//setup reminders
setupReminderService();
}
public void setupTimeActivity() {
Intent intent = new Intent(this, TimeActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("time")
.setIndicator(getString(R.string.time), getResources().getDrawable(R.drawable.clock))
.setContent(intent));
}
public void setupReturnVisitsActivity() {
Intent intent = new Intent(this, ReturnVisitsActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("rvs")
.setIndicator(getString(R.string.callbook), getResources().getDrawable(R.drawable.home))
.setContent(intent));
}
public void setupStatisticsActivity() {
Intent intent = new Intent(this, StatisticsActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("stats")
.setIndicator(getString(R.string.stats), getResources().getDrawable(R.drawable.calendar))
.setContent(intent));
}
public void launchTimeView() {
Intent i = new Intent(this, TimeActivity.class);
this.startActivity(i);
}
public void onTabChanged(String tabId) {
Activity activity = getLocalActivityManager().getActivity(tabId);
if (activity != null) {
activity.onWindowFocusChanged(true);
}
}
private void setupReminderService() {
Intent i = new Intent(this, ReminderService.class);
startService(i);
}
private void showWhatsNew() {
Changelog.showFirstTime(this);
}
}
|
src/com/monstarlab/servicedroid/activity/ServiceDroidActivity.java
|
package com.monstarlab.servicedroid.activity;
import com.monstarlab.servicedroid.R;
import com.monstarlab.servicedroid.service.ReminderService;
import com.monstarlab.servicedroid.util.Changelog;
import android.app.Activity;
import android.app.TabActivity;
import android.content.Intent;
import android.os.Bundle;
import android.widget.TabHost;
public class ServiceDroidActivity extends TabActivity implements TabHost.OnTabChangeListener {
private TabHost mTabHost;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTabHost = getTabHost();
mTabHost.setOnTabChangedListener(this);
setupTimeActivity();
setupReturnVisitsActivity();
setupStatisticsActivity();
showWhatsNew();
//setup reminders
setupReminderService();
}
public void setupTimeActivity() {
Intent intent = new Intent(this, TimeActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("time")
.setIndicator(getString(R.string.time), getResources().getDrawable(R.drawable.clock))
.setContent(intent));
}
public void setupReturnVisitsActivity() {
Intent intent = new Intent(this, ReturnVisitsActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("rvs")
.setIndicator(getString(R.string.rv), getResources().getDrawable(R.drawable.home))
.setContent(intent));
}
public void setupStatisticsActivity() {
Intent intent = new Intent(this, StatisticsActivity.class);
mTabHost.addTab(mTabHost.newTabSpec("stats")
.setIndicator(getString(R.string.stats), getResources().getDrawable(R.drawable.calendar))
.setContent(intent));
}
public void launchTimeView() {
Intent i = new Intent(this, TimeActivity.class);
this.startActivity(i);
}
public void onTabChanged(String tabId) {
Activity activity = getLocalActivityManager().getActivity(tabId);
if (activity != null) {
activity.onWindowFocusChanged(true);
}
}
private void setupReminderService() {
Intent i = new Intent(this, ReminderService.class);
startService(i);
}
private void showWhatsNew() {
Changelog.showFirstTime(this);
}
}
|
using new callbook string
|
src/com/monstarlab/servicedroid/activity/ServiceDroidActivity.java
|
using new callbook string
|
|
Java
|
epl-1.0
|
0fd5f4c29ccab9601d8c163ea960c43d74b1a9e4
| 0
|
Charling-Huang/birt,rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt,sguan-actuate/birt,Charling-Huang/birt,sguan-actuate/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1
|
/*******************************************************************************
* Copyright (c) 2012 Megha Nidhi Dahal and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Megha Nidhi Dahal - initial API and implementation and/or initial documentation
* Actuate Corporation - more efficient xlsx processing;
* support of timestamp, datetime, time, and date data types
* Actuate Corporation - support defining an Excel input file path or URI as part of the data source definition
*******************************************************************************/
package org.eclipse.birt.report.data.oda.excel.impl.util;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.poi.openxml4j.opc.OPCPackage;
import org.apache.poi.ss.usermodel.BuiltinFormats;
import org.apache.poi.xssf.eventusermodel.XSSFReader;
import org.apache.poi.xssf.model.SharedStringsTable;
import org.apache.poi.xssf.model.StylesTable;
import org.apache.poi.xssf.usermodel.XSSFCellStyle;
import org.apache.poi.xssf.usermodel.XSSFRichTextString;
import org.eclipse.birt.report.data.oda.excel.ExcelODAConstants;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
public class XlsxFileReader {
final static String PARSER_CLASS_NAME = "org.apache.xerces.parsers.SAXParser"; //$NON-NLS-1$
final static String ROW_LIMIT_REACHED_EX_MSG = "Row Limit Reached"; //$NON-NLS-1$
final private XSSFReader reader;
enum cDataType {
BOOL,
DATE,
DATETIME,
FORMULA,
SSTINDEX,
TIME,
NUMBER,
STATIC
}
public XlsxFileReader(InputStream fis) throws IOException,
OpenXML4JException {
OPCPackage pkg = OPCPackage.open(fis);
reader = new XSSFReader(pkg);
}
public LinkedHashMap<String, String> getSheetNames()
throws InvalidFormatException, IOException, SAXException {
BufferedInputStream wbData = new BufferedInputStream(reader.getWorkbookData());
LinkedHashMap<String, String> sheetMap = new LinkedHashMap<String, String>();
try {
InputSource wbSource = new InputSource(wbData);
XMLReader parser = fetchWorkbookParser(sheetMap);
parser.parse(wbSource);
} finally {
if (wbData != null)
wbData.close();
}
return sheetMap;
}
public void processSheet(String rid, XlsxRowCallBack callback, int xlsxRowsToRead)
throws InvalidFormatException, IOException, SAXException {
SharedStringsTable sst = reader.getSharedStringsTable();
StylesTable st = reader.getStylesTable();
XMLReader parser = fetchSheetParser(st, sst, callback, xlsxRowsToRead);
BufferedInputStream sheet = new BufferedInputStream(reader.getSheet(rid));
try {
InputSource sheetSource = new InputSource(sheet);
parser.parse(sheetSource);
} finally {
if (sheet != null)
sheet.close();
}
}
private XMLReader fetchSheetParser(StylesTable st, SharedStringsTable sst,
XlsxRowCallBack callback, int xlsxRowsToRead) throws SAXException {
XMLReader parser = XMLReaderFactory
.createXMLReader( PARSER_CLASS_NAME );
ContentHandler handler = new SheetHandler(st, sst, callback, xlsxRowsToRead);
parser.setContentHandler(handler);
return parser;
}
private XMLReader fetchWorkbookParser(LinkedHashMap<String, String> sheetMap)
throws SAXException {
XMLReader parser = XMLReaderFactory
.createXMLReader( PARSER_CLASS_NAME );
ContentHandler handler = new WorkbookHandler(sheetMap);
parser.setContentHandler(handler);
return parser;
}
/**
* See org.xml.sax.helpers.DefaultHandler javadocs
*/
private static class SheetHandler extends DefaultHandler {
private cDataType cellDataType;
private int columnCount =1;
final private SharedStringsTable sst;
final private StylesTable st;
final private XlsxRowCallBack callback;
private String lastContents;
private ArrayList<Object> values;
private int currentColumn = 0;
private int xlsxRowsToRead=0;
private int currentXlsxRowNumber = 0;
private SheetHandler(StylesTable st, SharedStringsTable sst, XlsxRowCallBack callback, int xlsxRowsToRead) {
this.sst = sst;
this.st = st;
this.callback = callback;
values = new ArrayList<Object>();
this.cellDataType = cDataType.NUMBER;
this.xlsxRowsToRead = xlsxRowsToRead;
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equals("c")) {
String vCellType = attributes.getValue("t");
String cellS = attributes.getValue("s");
if ("b".equals(vCellType))
cellDataType = cDataType.BOOL;
else if ("e".equals(vCellType))
cellDataType = cDataType.FORMULA;
else if ("s".equals(vCellType))
cellDataType = cDataType.SSTINDEX;
else if("str".equals(vCellType))
cellDataType = cDataType.STATIC;
else if (cellS != null) {
//number with formatting or date
int styleIndex = Integer.parseInt(cellS);
XSSFCellStyle style = st.getStyleAt(styleIndex);
short formatIndex = style.getDataFormat();
String formatString = style.getDataFormatString();
if (formatString == null)
formatString = BuiltinFormats.getBuiltinFormat(formatIndex);
if( org.apache.poi.ss.usermodel.DateUtil.isADateFormat(formatIndex, formatString) ){
cellDataType = cDataType.DATETIME;
}else{
cellDataType = cDataType.NUMBER;
}
}
else
cellDataType = cDataType.NUMBER;
String r = attributes.getValue("r");
currentColumn = getColumnNumber( r );
//expand the number of columns if needed in existing rows
if( currentColumn+1 > columnCount){
callback.columnExpansion(currentColumn+1);
//clean up current row
int newvals = (currentColumn+1) - columnCount;
for( int ii=0; ii<newvals;ii++){
values.add(ExcelODAConstants.EMPTY_STRING);
}
columnCount = currentColumn+1;
}
}
//empty cells are not in the xml so we have
//create them in the row
if (qName.equals("row")) {
for( int i=0;i<columnCount; i++){
values.add(i, ExcelODAConstants.EMPTY_STRING);
}
}
lastContents = ExcelODAConstants.EMPTY_STRING;
}
public void endElement(String uri, String localName, String name)
throws SAXException {
if (name.equals("row")) {
callback.handleRow(values);
values.clear();
currentColumn = -1;
currentXlsxRowNumber++;
if( xlsxRowsToRead > 0 ){
if( currentXlsxRowNumber > xlsxRowsToRead){
throw new SAXException( ROW_LIMIT_REACHED_EX_MSG );
}
}
return;
} else if (name.equals("c")) {
cellDataType = cDataType.NUMBER;
return;
} else if (name.equals("v")) {
String val = ExcelODAConstants.EMPTY_STRING;
// Process the last contents as required.
// Do now, as characters() may be called more than once
if (cellDataType == cDataType.SSTINDEX) {
int idx;
idx = Integer.parseInt(lastContents);
val = new XSSFRichTextString(sst.getEntryAt(idx))
.toString();
} else if (cellDataType == cDataType.STATIC
|| cellDataType == cDataType.NUMBER) {
val = lastContents;
}else if( cellDataType == cDataType.DATETIME || cellDataType == cDataType.DATE || cellDataType == cDataType.TIME ){
Date myjavadate = org.apache.poi.ss.usermodel.DateUtil.getJavaDate(Double.parseDouble(lastContents));
long millis = myjavadate.getTime();
val = Long.toString(millis);
}else if( cellDataType == cDataType.BOOL ){
if( lastContents.compareTo("1") == 0){
Boolean mybool = new Boolean(true);
val = mybool.toString();
}else if( lastContents.compareTo("0") == 0 ){
Boolean mybool = new Boolean(false);
val = mybool.toString();
}
}
// v => contents of a cell
if (val != null) {
if( currentColumn != -1){
values.remove(currentColumn);
values.add(currentColumn, val);
}
}
}
}
public void characters(char[] ch, int start, int length)
throws SAXException {
lastContents += new String(ch, start, length);
}
private int getColumnNumber(String colname) {
int tmpcol = 0;
String drpNumber = colname;
for (int ch = 0; ch < colname.length(); ++ch) {
if (!Character.isLetter(colname.charAt(ch))) {
drpNumber = colname.substring(0, ch);
break;
}
}
int sum = 0;
for (int ii = 0; ii < drpNumber.length(); ii++) {
tmpcol = (drpNumber.charAt(ii) - 'A') + 1;
sum = sum * 26 + tmpcol;
}
return sum - 1;
}
}
/**
* See org.xml.sax.helpers.DefaultHandler javadocs
*/
private static class WorkbookHandler extends DefaultHandler {
final private LinkedHashMap<String, String> sheetMap;
private WorkbookHandler(LinkedHashMap<String, String> sheetMap) {
this.sheetMap = sheetMap;
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
// <sheet r:id="rId1" name="Sheet1" />
if (qName.equals("sheet")) {
String rid = attributes.getValue("r:id");
String sheetName = attributes.getValue("name");
sheetMap.put(sheetName, rid);
}
}
}
}
|
data/org.eclipse.birt.report.data.oda.excel/src/org/eclipse/birt/report/data/oda/excel/impl/util/XlsxFileReader.java
|
/*******************************************************************************
* Copyright (c) 2012 Megha Nidhi Dahal and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Megha Nidhi Dahal - initial API and implementation and/or initial documentation
* Actuate Corporation - more efficient xlsx processing;
* support of timestamp, datetime, time, and date data types
* Actuate Corporation - support defining an Excel input file path or URI as part of the data source definition
*******************************************************************************/
package org.eclipse.birt.report.data.oda.excel.impl.util;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.poi.openxml4j.opc.OPCPackage;
import org.apache.poi.ss.usermodel.BuiltinFormats;
import org.apache.poi.xssf.eventusermodel.XSSFReader;
import org.apache.poi.xssf.model.SharedStringsTable;
import org.apache.poi.xssf.model.StylesTable;
import org.apache.poi.xssf.usermodel.XSSFCellStyle;
import org.apache.poi.xssf.usermodel.XSSFRichTextString;
import org.eclipse.birt.report.data.oda.excel.ExcelODAConstants;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
public class XlsxFileReader {
final static String PARSER_CLASS_NAME = "org.apache.xerces.parsers.SAXParser"; //$NON-NLS-1$
final static String ROW_LIMIT_REACHED_EX_MSG = "Row Limit Reached"; //$NON-NLS-1$
final private XSSFReader reader;
enum cDataType {
BOOL,
DATE,
DATETIME,
FORMULA,
SSTINDEX,
TIME,
NUMBER,
STATIC
}
public XlsxFileReader(InputStream fis) throws IOException,
OpenXML4JException {
OPCPackage pkg = OPCPackage.open(fis);
reader = new XSSFReader(pkg);
}
public LinkedHashMap<String, String> getSheetNames()
throws InvalidFormatException, IOException, SAXException {
BufferedInputStream wbData = new BufferedInputStream(reader.getWorkbookData());
LinkedHashMap<String, String> sheetMap = new LinkedHashMap<String, String>();
try {
InputSource wbSource = new InputSource(wbData);
XMLReader parser = fetchWorkbookParser(sheetMap);
parser.parse(wbSource);
} finally {
if (wbData != null)
wbData.close();
}
return sheetMap;
}
public void processSheet(String rid, XlsxRowCallBack callback, int xlsxRowsToRead)
throws InvalidFormatException, IOException, SAXException {
SharedStringsTable sst = reader.getSharedStringsTable();
StylesTable st = reader.getStylesTable();
XMLReader parser = fetchSheetParser(st, sst, callback, xlsxRowsToRead);
BufferedInputStream sheet = new BufferedInputStream(reader.getSheet(rid));
try {
InputSource sheetSource = new InputSource(sheet);
parser.parse(sheetSource);
} finally {
if (sheet != null)
sheet.close();
}
}
private XMLReader fetchSheetParser(StylesTable st, SharedStringsTable sst,
XlsxRowCallBack callback, int xlsxRowsToRead) throws SAXException {
XMLReader parser = XMLReaderFactory
.createXMLReader( PARSER_CLASS_NAME );
ContentHandler handler = new SheetHandler(st, sst, callback, xlsxRowsToRead);
parser.setContentHandler(handler);
return parser;
}
private XMLReader fetchWorkbookParser(LinkedHashMap<String, String> sheetMap)
throws SAXException {
XMLReader parser = XMLReaderFactory
.createXMLReader( PARSER_CLASS_NAME );
ContentHandler handler = new WorkbookHandler(sheetMap);
parser.setContentHandler(handler);
return parser;
}
/**
* See org.xml.sax.helpers.DefaultHandler javadocs
*/
private static class SheetHandler extends DefaultHandler {
private cDataType cellDataType;
private int columnCount =1;
final private SharedStringsTable sst;
final private StylesTable st;
final private XlsxRowCallBack callback;
private String lastContents;
private ArrayList<Object> values;
private int currentColumn = 0;
private int xlsxRowsToRead=0;
private int currentXlsxRowNumber = 0;
private SheetHandler(StylesTable st, SharedStringsTable sst, XlsxRowCallBack callback, int xlsxRowsToRead) {
this.sst = sst;
this.st = st;
this.callback = callback;
values = new ArrayList<Object>();
this.cellDataType = cDataType.NUMBER;
this.xlsxRowsToRead = xlsxRowsToRead;
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equals("c")) {
String vCellType = attributes.getValue("t");
String cellS = attributes.getValue("s");
if ("b".equals(vCellType))
cellDataType = cDataType.BOOL;
else if ("e".equals(vCellType))
cellDataType = cDataType.FORMULA;
else if ("s".equals(vCellType))
cellDataType = cDataType.SSTINDEX;
else if("str".equals(vCellType))
cellDataType = cDataType.STATIC;
else if (cellS != null) {
//number with formatting or date
int styleIndex = Integer.parseInt(cellS);
XSSFCellStyle style = st.getStyleAt(styleIndex);
short formatIndex = style.getDataFormat();
String formatString = style.getDataFormatString();
if (formatString == null)
formatString = BuiltinFormats.getBuiltinFormat(formatIndex);
if( org.apache.poi.ss.usermodel.DateUtil.isADateFormat(formatIndex, formatString) ){
cellDataType = cDataType.DATETIME;
}else{
cellDataType = cDataType.NUMBER;
}
}
else
cellDataType = cDataType.NUMBER;
String r = attributes.getValue("r");
currentColumn = getColumnNumber( r );
//expand the number of columns if needed in existing rows
if( currentColumn+1 > columnCount){
callback.columnExpansion(currentColumn+1);
//clean up current row
int newvals = (currentColumn+1) - columnCount;
for( int ii=0; ii<newvals;ii++){
values.add(ExcelODAConstants.EMPTY_STRING);
}
columnCount = currentColumn+1;
}
}
//empty cells are not in the xml so we have
//create them in the row
if (qName.equals("row")) {
for( int i=0;i<columnCount; i++){
values.add(i, ExcelODAConstants.EMPTY_STRING);
}
}
lastContents = ExcelODAConstants.EMPTY_STRING;
}
public void endElement(String uri, String localName, String name)
throws SAXException {
if (name.equals("row")) {
callback.handleRow(values);
values.clear();
currentColumn = -1;
currentXlsxRowNumber++;
if( xlsxRowsToRead > 0 ){
if( currentXlsxRowNumber > xlsxRowsToRead){
throw new SAXException( ROW_LIMIT_REACHED_EX_MSG );
}
}
return;
} else if (name.equals("c")) {
cellDataType = cDataType.NUMBER;
return;
} else if (name.equals("v")) {
String val = ExcelODAConstants.EMPTY_STRING;
// Process the last contents as required.
// Do now, as characters() may be called more than once
if (cellDataType == cDataType.SSTINDEX) {
int idx;
idx = Integer.parseInt(lastContents);
val = new XSSFRichTextString(sst.getEntryAt(idx))
.toString();
} else if (cellDataType == cDataType.STATIC
|| cellDataType == cDataType.NUMBER) {
val = lastContents;
}else if( cellDataType == cDataType.DATETIME || cellDataType == cDataType.DATE || cellDataType == cDataType.TIME ){
Date myjavadate = org.apache.poi.ss.usermodel.DateUtil.getJavaDate(Double.parseDouble(lastContents));
long millis = myjavadate.getTime();
val = Long.toString(millis);
}else if( cellDataType == cDataType.BOOL ){
if( lastContents.compareTo("1") == 0){
Boolean mybool = new Boolean(true);
val = mybool.toString();
}else if( lastContents.compareTo("0") == 0 ){
Boolean mybool = new Boolean(false);
val = mybool.toString();
}
}
// v => contents of a cell
if (val != null) {
if( currentColumn != -1){
values.remove(currentColumn);
values.add(currentColumn, val);
}
}
}
}
public void characters(char[] ch, int start, int length)
throws SAXException {
lastContents += new String(ch, start, length);
}
private int getColumnNumber(String colname) {
int tmpcol = 0;
//remove cell number
for (int ch = 0; ch < colname.length(); ++ch) {
if( Character.isLetter(colname.charAt(ch)) ){
tmpcol = (ch*26) + colname.charAt(ch) - 'A';
}
}
return tmpcol;
}
}
/**
* See org.xml.sax.helpers.DefaultHandler javadocs
*/
private static class WorkbookHandler extends DefaultHandler {
final private LinkedHashMap<String, String> sheetMap;
private WorkbookHandler(LinkedHashMap<String, String> sheetMap) {
this.sheetMap = sheetMap;
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
// <sheet r:id="rId1" name="Sheet1" />
if (qName.equals("sheet")) {
String rid = attributes.getValue("r:id");
String sheetName = attributes.getValue("name");
sheetMap.put(sheetName, rid);
}
}
}
}
|
fix 49898 Cannot find all 112 columns in xlsx by OOTB Excel Connector
|
data/org.eclipse.birt.report.data.oda.excel/src/org/eclipse/birt/report/data/oda/excel/impl/util/XlsxFileReader.java
|
fix 49898 Cannot find all 112 columns in xlsx by OOTB Excel Connector
|
|
Java
|
agpl-3.0
|
0f541f5be7cb40b309aae0735fbb40950d32c223
| 0
|
esofthead/mycollab,onlylin/mycollab,MyCollab/mycollab,esofthead/mycollab,MyCollab/mycollab,aglne/mycollab,maduhu/mycollab,uniteddiversity/mycollab,maduhu/mycollab,esofthead/mycollab,uniteddiversity/mycollab,aglne/mycollab,onlylin/mycollab,MyCollab/mycollab,maduhu/mycollab,aglne/mycollab,onlylin/mycollab,aglne/mycollab,onlylin/mycollab,MyCollab/mycollab,uniteddiversity/mycollab,MyCollab/mycollab,uniteddiversity/mycollab,esofthead/mycollab,maduhu/mycollab
|
/**
* This file is part of mycollab-services-community.
*
* mycollab-services-community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-services-community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-services-community. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.wiki.service;
import java.util.List;
import javax.jcr.RepositoryException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import com.esofthead.mycollab.common.i18n.WikiI18nEnum;
import com.esofthead.mycollab.module.wiki.domain.Page;
import com.esofthead.mycollab.module.wiki.domain.PageVersion;
import com.esofthead.mycollab.test.MyCollabClassRunner;
import com.esofthead.mycollab.test.service.ServiceTest;
@RunWith(MyCollabClassRunner.class)
public class WikiServiceTest extends ServiceTest {
@Autowired
private WikiService wikiService;
@Before
public void setup() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world");
page.setContent("My name is <b>Hai Nguyen</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
}
@After
public void teardown() {
wikiService.removeResource("");
}
@Test
public void testGetWikiPages() {
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(1, pages.size());
Page page = pages.get(0);
Assert.assertEquals("abc", page.getCategory());
}
private void savePage2() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_2");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
page.setStatus(WikiI18nEnum.status_private.name());
wikiService.savePage(page, "hainguyen@esofthead.com");
}
@Test
public void testGetResources() {
savePage2();
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(2, pages.size());
}
@Test
public void testUpdatePage() throws RepositoryException {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(1, pages.size());
page = pages.get(0);
Assert.assertEquals("Hello world 2", page.getSubject());
}
@Test
public void testGetVersions() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
page.setSubject("Hello world 3");
wikiService.savePage(page, "hainguyen@esofthead.com");
List<PageVersion> versions = wikiService
.getPageVersions("1/page/document_1");
Assert.assertEquals(2, versions.size());
page = wikiService.getPageByVersion("1/page/document_1", "1.0");
Assert.assertEquals("Hello world 2", page.getSubject());
}
}
|
mycollab-services-community/src/test/java/com/esofthead/mycollab/module/wiki/service/WikiServiceTest.java
|
/**
* This file is part of mycollab-services-community.
*
* mycollab-services-community is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-services-community is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-services-community. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.wiki.service;
import java.util.List;
import javax.jcr.RepositoryException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import com.esofthead.mycollab.common.i18n.WikiI18nEnum;
import com.esofthead.mycollab.module.wiki.domain.Page;
import com.esofthead.mycollab.module.wiki.domain.PageVersion;
import com.esofthead.mycollab.test.MyCollabClassRunner;
import com.esofthead.mycollab.test.service.ServiceTest;
@RunWith(MyCollabClassRunner.class)
public class WikiServiceTest extends ServiceTest {
@Autowired
private WikiService wikiService;
@Before
public void setup() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world");
page.setContent("My name is <b>Hai Nguyen</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
}
@After
public void teardown() {
wikiService.removeResource("");
}
@Test
public void testGetWikiPages() {
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(1, pages.size());
Page page = pages.get(0);
Assert.assertEquals("abc", page.getCategory());
}
private void savePage2() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_2");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
page.setStatus(WikiI18nEnum.status_private.name());
wikiService.savePage(page, "hainguyen@esofthead.com");
}
@Test
public void testGetResources() {
savePage2();
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(2, pages.size());
}
@Test
public void testUpdatePage() throws RepositoryException {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
List<Page> pages = wikiService.getPages("1/page",
"hainguyen@esofthead.com");
Assert.assertEquals(1, pages.size());
page = pages.get(0);
Assert.assertEquals("Hello world 2", page.getSubject());
}
@Test
public void testGetVersions() {
Page page = new Page();
page.setCreatedUser("hainguyen@esofthead.com");
page.setCategory("abc");
page.setPath("1/page/document_1");
page.setStatus(WikiI18nEnum.status_public.name());
page.setSubject("Hello world 2");
page.setContent("My name is <b>Bao Han</b>");
wikiService.savePage(page, "hainguyen@esofthead.com");
page.setSubject("Hello world 3");
wikiService.savePage(page, "hainguyen@esofthead.com");
List<PageVersion> versions = wikiService
.getPageVersions("1/page/document_1");
Assert.assertEquals(3, versions.size());
page = wikiService.getPageByVersion("1/page/document_1", "1.0");
Assert.assertEquals("Hello world 2", page.getSubject());
}
}
|
Fixed unit test
|
mycollab-services-community/src/test/java/com/esofthead/mycollab/module/wiki/service/WikiServiceTest.java
|
Fixed unit test
|
|
Java
|
lgpl-2.1
|
e826e9e3cd594a39827231aa792743e5adad0a56
| 0
|
geotools/geotools,geotools/geotools,geotools/geotools,geotools/geotools
|
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2009-2011, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.data.complex;
import java.io.IOException;
import java.util.logging.Logger;
import org.geotools.data.FeatureSource;
import org.geotools.data.Query;
import org.geotools.data.complex.config.AppSchemaDataAccessConfigurator;
import org.geotools.data.complex.filter.ComplexFilterSplitter;
import org.geotools.data.joining.JoiningQuery;
import org.geotools.filter.FidFilterImpl;
import org.geotools.filter.FilterCapabilities;
import org.geotools.filter.NestedAttributeExpression;
import org.geotools.filter.visitor.DefaultFilterVisitor;
import org.geotools.jdbc.JDBCFeatureSource;
import org.opengis.filter.Filter;
import org.opengis.filter.expression.PropertyName;
/**
* @author Russell Petty (GeoScience Victoria)
*
*
*
* @source $URL$
* http://svn.osgeo.org/geotools/trunk/modules/unsupported/app-schema/app-schema/src/main
* /java/org/geotools/data/complex/MappingFeatureIteratorFactory.java $
*/
public class MappingFeatureIteratorFactory {
protected static final Logger LOGGER = org.geotools.util.logging.Logging
.getLogger("org.geotools.data.complex");
protected static class CheckIfNestedFilterVisitor extends DefaultFilterVisitor {
public boolean hasNestedAttributes = false;
public Object visit( PropertyName expression, Object data ) {
if (expression instanceof NestedAttributeExpression) {
hasNestedAttributes = true;
}
return data;
}
}
public static IMappingFeatureIterator getInstance(AppSchemaDataAccess store,
FeatureTypeMapping mapping, Query query, Filter unrolledFilter) throws IOException {
if (mapping instanceof XmlFeatureTypeMapping) {
return new XmlMappingFeatureIterator(store, mapping, query);
}
if (AppSchemaDataAccessConfigurator.isJoining()) {
if (!(query instanceof JoiningQuery)) {
query = new JoiningQuery(query);
}
FeatureSource mappedSource = mapping.getSource();
FilterCapabilities capabilities = null;
if (mappedSource instanceof JDBCFeatureSource) {
capabilities = ((JDBCFeatureSource) mappedSource).getDataStore().getFilterCapabilities();
}
else {
throw new IllegalArgumentException("Joining Queries only work on JDBC Feature Source!");
}
IMappingFeatureIterator iterator;
if (unrolledFilter != null) {
query.setFilter(Filter.INCLUDE);
Query unrolledQuery = store.unrollQuery(query, mapping);
unrolledQuery.setFilter(unrolledFilter);
iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false, unrolledQuery);
} else {
Filter filter = query.getFilter();
ComplexFilterSplitter splitter = new ComplexFilterSplitter( capabilities , mapping );
filter.accept(splitter, null);
query.setFilter(splitter.getFilterPre());
filter = splitter.getFilterPost();
int maxFeatures = Query.DEFAULT_MAX;
if (filter != null && filter != Filter.INCLUDE) {
maxFeatures = query.getMaxFeatures();
query.setMaxFeatures(Query.DEFAULT_MAX);
}
iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false);
if (filter != null && filter != Filter.INCLUDE) {
iterator = new PostFilteringMappingFeatureIterator(iterator, filter, maxFeatures);
}
}
return iterator;
} else {
if (query.getFilter() != null) {
Query unrolledQuery = store.unrollQuery(query, mapping);
Filter filter = unrolledQuery.getFilter();
CheckIfNestedFilterVisitor visitor = new CheckIfNestedFilterVisitor();
filter.accept(visitor, null);
if (visitor.hasNestedAttributes) {
//has nested attribute in the filter expression
unrolledQuery.setFilter(Filter.INCLUDE);
return new FilteringMappingFeatureIterator(store, mapping, query, unrolledQuery, filter);
} else if (!filter.equals(Filter.INCLUDE) && !filter.equals(Filter.EXCLUDE)
&& !(filter instanceof FidFilterImpl)) {
// normal filters
return new DataAccessMappingFeatureIterator(store, mapping, query, true, unrolledQuery);
}
}
return new DataAccessMappingFeatureIterator(store, mapping, query, false);
}
}
}
|
modules/extension/app-schema/app-schema/src/main/java/org/geotools/data/complex/MappingFeatureIteratorFactory.java
|
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2009-2011, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.data.complex;
import java.io.IOException;
import java.util.logging.Logger;
import org.geotools.data.FeatureSource;
import org.geotools.data.Query;
import org.geotools.data.complex.config.AppSchemaDataAccessConfigurator;
import org.geotools.data.complex.filter.ComplexFilterSplitter;
import org.geotools.data.joining.JoiningQuery;
import org.geotools.filter.FidFilterImpl;
import org.geotools.filter.FilterCapabilities;
import org.geotools.filter.NestedAttributeExpression;
import org.geotools.filter.visitor.DefaultFilterVisitor;
import org.geotools.jdbc.JDBCFeatureSource;
import org.opengis.filter.Filter;
import org.opengis.filter.expression.PropertyName;
/**
* @author Russell Petty (GeoScience Victoria)
*
*
*
* @source $URL$
* http://svn.osgeo.org/geotools/trunk/modules/unsupported/app-schema/app-schema/src/main
* /java/org/geotools/data/complex/MappingFeatureIteratorFactory.java $
*/
public class MappingFeatureIteratorFactory {
protected static final Logger LOGGER = org.geotools.util.logging.Logging
.getLogger("org.geotools.data.complex");
protected static class CheckIfNestedFilterVisitor extends DefaultFilterVisitor {
public boolean hasNestedAttributes = false;
public Object visit( PropertyName expression, Object data ) {
if (expression instanceof NestedAttributeExpression) {
hasNestedAttributes = true;
}
return data;
}
}
public static IMappingFeatureIterator getInstance(AppSchemaDataAccess store,
FeatureTypeMapping mapping, Query query, Filter unrolledFilter) throws IOException {
if (mapping instanceof XmlFeatureTypeMapping) {
return new XmlMappingFeatureIterator(store, mapping, query);
}
if (AppSchemaDataAccessConfigurator.isJoining()) {
if (!(query instanceof JoiningQuery)) {
query = new JoiningQuery(query);
}
FeatureSource mappedSource = mapping.getSource();
FilterCapabilities capabilities = null;
if (mappedSource instanceof JDBCFeatureSource) {
capabilities = ((JDBCFeatureSource) mappedSource).getDataStore().getFilterCapabilities();
}
else {
throw new IllegalArgumentException("Joining Queries only work on JDBC Feature Source!");
}
IMappingFeatureIterator iterator;
if (unrolledFilter != null) {
query.setFilter(Filter.INCLUDE);
Query unrolledQuery = store.unrollQuery(query, mapping);
unrolledQuery.setFilter(unrolledFilter);
iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false, unrolledQuery);
} else {
Filter filter = query.getFilter();
ComplexFilterSplitter splitter = new ComplexFilterSplitter( capabilities , mapping );
filter.accept(splitter, null);
//--just verifying this for certainty
CheckIfNestedFilterVisitor visitor = new CheckIfNestedFilterVisitor();
splitter.getFilterPre().accept(visitor, null);
if (visitor.hasNestedAttributes) {
throw new IllegalArgumentException("Internal Error: filter was not split properly.");
}
query.setFilter(splitter.getFilterPre());
filter = splitter.getFilterPost();
int maxFeatures = Query.DEFAULT_MAX;
if (filter != null && filter != Filter.INCLUDE) {
maxFeatures = query.getMaxFeatures();
query.setMaxFeatures(Query.DEFAULT_MAX);
}
iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false);
if (filter != null && filter != Filter.INCLUDE) {
iterator = new PostFilteringMappingFeatureIterator(iterator, filter, maxFeatures);
}
}
return iterator;
} else {
if (query.getFilter() != null) {
Query unrolledQuery = store.unrollQuery(query, mapping);
Filter filter = unrolledQuery.getFilter();
CheckIfNestedFilterVisitor visitor = new CheckIfNestedFilterVisitor();
filter.accept(visitor, null);
if (visitor.hasNestedAttributes) {
//has nested attribute in the filter expression
unrolledQuery.setFilter(Filter.INCLUDE);
return new FilteringMappingFeatureIterator(store, mapping, query, unrolledQuery, filter);
} else if (!filter.equals(Filter.INCLUDE) && !filter.equals(Filter.EXCLUDE)
&& !(filter instanceof FidFilterImpl)) {
// normal filters
return new DataAccessMappingFeatureIterator(store, mapping, query, true, unrolledQuery);
}
}
return new DataAccessMappingFeatureIterator(store, mapping, query, false);
}
}
}
|
Removing some obsolete code in joining
git-svn-id: b0f10281c9a1a817905b9aa75a7907aa928f8a7d@37439 e5c1c795-43da-0310-a71f-fac65c449510
|
modules/extension/app-schema/app-schema/src/main/java/org/geotools/data/complex/MappingFeatureIteratorFactory.java
|
Removing some obsolete code in joining
|
|
Java
|
apache-2.0
|
50137aa691ab9f51b405d1ea4708fcb8e8e4d56d
| 0
|
Natio/Places,Natio/Places,Natio/Places
|
package com.gcw.sapienza.places.utils;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Looper;
import android.util.Log;
import com.gcw.sapienza.places.BuildConfig;
import com.gcw.sapienza.places.R;
import com.gcw.sapienza.places.model.Flag;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ProgressCallback;
import com.parse.SaveCallback;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
/**
* This class allows to asynchronously upload a Flag and to have completion callbacks
*
*
* Usage:
*
* File video;
* File audio;
* File picture;
*
* FlagUploader up = new FlagUploader();
* up.setVideoFile(video);
* up.setAudioFile(audio);
* up.setPictureFile(picture);
*
* uploader.upload(new FlagUploaderCallbacks{...});
*
*
* How It Works
*
* When upload() is called FlagUploader.loadFileLoop() is automatically invoked.
* FlagUploader.loadFileLoop() is the responsible of ParseFile creation and loading.
*
* 1) loadFileLoop() checks that this.files is not empty
* 2) loadFileLoop() removes a File from this.files (that is the Files to upload storage)and schedules a FileLoaderTask that loads the file
* in memory and creates a ParseFile.
* 3) When the ParseFile is configured FileLoaderTask calls FlagUploader onParseFileInMemoryLoadDone(ParseFile parse_file)
* 4) onParseFileInMemoryLoadDone starts the ParseFile upload; when the upload is finished control returns to 1.
*
* 5) if this.files is empty loadFlag() will be called and will upload the flag to Parse.com
* 6) in case of success onFinish() will be called otherwise onError()
*
*
* Created by paolo on 12/01/15.
*/
public class FlagUploader {
private static final String TAG = "FlagUploader";
private static final String AUDIO_KEY = Flag.AUDIO_KEY;
private static final String VIDEO_KEY = Flag.VIDEO_KEY;
private static final String PICTURE_KEY = Flag.PICTURE_KEY;
private final Flag flag;
private HashMap<String, File> files;
private HashMap<String, File> usedFiled;
private boolean isUploading;
private FlagUploaderCallbacks callbacks;
// private boolean deleteFilesOnFinish = false;
private String currentFileKey = null;
private final Context context;
/**
* Creates the instance
* @param f the flag to upload
* @param ctx a context
* @throws IllegalArgumentException if cbk is null
*/
public FlagUploader(Flag f, Context ctx){
this.flag = f;
this.isUploading = false;
this.files = new HashMap<>();
this.usedFiled = new HashMap<>();
this.context = ctx;
}
/*
public void setDeletesFilesOnFinish(boolean deletes){
this.deleteFilesOnFinish = deletes;
}
*/
/**
*
* @return true if method upload has been already called, false otherwise
*/
public boolean isUploading(){
return this.isUploading;
}
/**
* Sets the ParseFile representing a video
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param video file to upload as a video
*/
public void setVideoFile(File video){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(VIDEO_KEY, video);
}
/**
* Sets the ParseFile representing a audio rec
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param audio file to upload as a audio rec
*/
public void setAudioFile(File audio){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(AUDIO_KEY, audio);
}
/**
* Sets the ParseFile representing a picture
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param picture file to upload as a picture
*/
public void setPictureFile(File picture){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(PICTURE_KEY, picture);
}
/**
*
* @param cbk callbacks MUST NOT be null
*/
public void upload( FlagUploaderCallbacks cbk){
if(cbk == null){
throw new IllegalArgumentException("callbacks parameter must not be null");
}
this.callbacks = cbk;
this.isUploading = true;
this.loadFileLoop();
}
/**
* When this method is called it starts to upload a file asynchronously.
* When a file is uploaded this method will be called again and a new upload will start until
* there will be nothing to upload. If there is nothing to upload the Flag will be saved
*/
private void loadFileLoop(){
Log.d(TAG, "CIAo");
if(BuildConfig.DEBUG && Looper.getMainLooper().getThread() != Thread.currentThread()){
throw new RuntimeException("Something went wrong with threads");
}
if(this.files.isEmpty()){
this.loadFlag();
return;
}
String key = this.getNextKeyFromFilesMap();
this.currentFileKey = key;
File current_file = this.files.remove(key);
this.usedFiled.put(key, current_file);
new FileLoaderTask().execute(current_file);
}
/**
* Called when an error occurs. If this method is called the upload is stopped and no data will be created on parse.com
* @param e error description
*/
private void onError(Exception e){
this.callbacks.onError(e);
}
/**
* Called when a ParseFile is successfully loaded in memory
* @param parse_file parse file to upload
*/
private void onParseFileInMemoryLoadDone(ParseFile parse_file){
if(parse_file == null){
this.onError(new Exception("Error loading file. Flag cannot be placed"));
return;
}
this.flag.put(this.currentFileKey, parse_file);
final String message_to_user = this.getUserMessageForKey(this.currentFileKey);
this.currentFileKey = null;
FlagUploader.this.callbacks.onPercentage(0, message_to_user);
parse_file.saveInBackground(new SaveCallback() {
@Override
public void done(ParseException e) {
if(e != null){
FlagUploader.this.onError(e);
}
else{
FlagUploader.this.loadFileLoop();
}
}
}, new ProgressCallback() {
@Override
public void done(Integer integer) {
FlagUploader.this.callbacks.onPercentage(integer, message_to_user);
}
});
}
/**
* When all files are uploaded ti uploads the flag
*/
private void loadFlag(){
FlagUploader.this.callbacks.onPercentage(100, "Loading last bits :)");
this.flag.saveInBackground(new SaveCallback() {
@Override
public void done(ParseException e) {
if(e != null){
FlagUploader.this.onError(e);
}
else{
FlagUploader.this.onFinish();
}
}
});
}
/**
* Called when everything is uploaded. If #deleteOnFinish is true all files will be deleted
*/
private void onFinish(){
FlagUploader.this.callbacks.onSuccess();
/*if(this.deleteFilesOnFinish){
for(String key : this.usedFiled.keySet()){
File f = this.usedFiled.get(key);
Log.d(TAG, "Deleted file: "+f.getName()+ " ? "+ f.delete());
}
}*/
File f = this.usedFiled.get(AUDIO_KEY);
if(f != null){
Log.d(TAG, "Deleted file: "+f.getName()+ " ? " + f.delete());
}
this.usedFiled.clear();
}
/**
*
* @param key file key
* @return a string representing the message to the user
*/
private String getUserMessageForKey(String key){
switch (key) {
case AUDIO_KEY:
return this.context.getString(R.string.upload_audio_progress);
case PICTURE_KEY:
return this.context.getString(R.string.upload_picture_progress);
case VIDEO_KEY:
return this.context.getString(R.string.upload_video_progress);
}
return null;
}
/**
*
* @return the correct key for the current upload round
*/
private String getNextKeyFromFilesMap(){
if(this.files.containsKey(AUDIO_KEY)){
return AUDIO_KEY;
}
else if(this.files.containsKey(PICTURE_KEY)){
return PICTURE_KEY;
}
else if(this.files.containsKey(VIDEO_KEY)){
return VIDEO_KEY;
}
return null;
}
public interface FlagUploaderCallbacks{
/**
* Progress of current upload
* @param percentage value between 0 and 100
* @param text_to_show text to display to the user
*/
void onPercentage(int percentage, String text_to_show);
/**
* Called when an error occurs. #onSucces() wont be called in case of error
* @param e error
*/
void onError(Exception e);
/**
* Called when everything is uploaded
*/
void onSuccess();
}
private class FileLoaderTask extends AsyncTask<File, Integer, ParseFile>{
@Override
protected ParseFile doInBackground(File... params) {
if(params.length == 0){
return null;
}
File file = params[0];
try{
return new ParseFile(file.getName(), this.loadFileInMemory(file));
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private byte[] loadFileInMemory(File f) throws IOException{
FileInputStream is = new FileInputStream(f);
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
byte[] buff = new byte[Utils.CHUNK_SIZE];
int i;
while ((i = is.read(buff, 0, buff.length)) > 0){
outStream.write(buff, 0, i);
}
return outStream.toByteArray();
}
@Override
protected void onPostExecute(ParseFile file) {
FlagUploader.this.onParseFileInMemoryLoadDone(file);
}
}
}
|
app/src/main/java/com/gcw/sapienza/places/utils/FlagUploader.java
|
package com.gcw.sapienza.places.utils;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Looper;
import android.util.Log;
import com.gcw.sapienza.places.BuildConfig;
import com.gcw.sapienza.places.R;
import com.gcw.sapienza.places.model.Flag;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ProgressCallback;
import com.parse.SaveCallback;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
/**
* This class allows to asynchronously upload a Flag and to have completion callbacks
*
*
* Usage:
*
* File video;
* File audio;
* File picture;
*
* FlagUploader up = new FlagUploader();
* up.setVideoFile(video);
* up.setAudioFile(audio);
* up.setPictureFile(picture);
*
* uploader.upload(new FlagUploaderCallbacks{...});
*
*
* How It Works
*
* When upload() is called FlagUploader.loadFileLoop() is automatically invoked.
* FlagUploader.loadFileLoop() is the responsible of ParseFile creation and loading.
*
* 1) loadFileLoop() checks that this.files is not empty
* 2) loadFileLoop() removes a File from this.files (that is the Files to upload storage)and schedules a FileLoaderTask that loads the file
* in memory and creates a ParseFile.
* 3) When the ParseFile is configured FileLoaderTask calls FlagUploader onParseFileInMemoryLoadDone(ParseFile parse_file)
* 4) onParseFileInMemoryLoadDone starts the ParseFile upload; when the upload is finished control returns to 1.
*
* 5) if this.files is empty loadFlag() will be called and will upload the flag to Parse.com
* 6) in case of success onFinish() will be called otherwise onError()
*
*
* Created by paolo on 12/01/15.
*/
public class FlagUploader {
private static final String TAG = "FlagUploader";
private static final String AUDIO_KEY = Flag.AUDIO_KEY;
private static final String VIDEO_KEY = Flag.VIDEO_KEY;
private static final String PICTURE_KEY = Flag.PICTURE_KEY;
private final Flag flag;
private HashMap<String, File> files;
private HashMap<String, File> usedFiled;
private boolean isUploading;
private FlagUploaderCallbacks callbacks;
// private boolean deleteFilesOnFinish = false;
private String currentFileKey = null;
private final Context context;
/**
* Creates the instance
* @param f the flag to upload
* @param ctx a context
* @throws IllegalArgumentException if cbk is null
*/
public FlagUploader(Flag f, Context ctx){
this.flag = f;
this.isUploading = false;
this.files = new HashMap<>();
this.usedFiled = new HashMap<>();
this.context = ctx;
}
/*
public void setDeletesFilesOnFinish(boolean deletes){
this.deleteFilesOnFinish = deletes;
}
*/
/**
*
* @return true if method upload has been already called, false otherwise
*/
public boolean isUploading(){
return this.isUploading;
}
/**
* Sets the ParseFile representing a video
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param video file to upload as a video
*/
public void setVideoFile(File video){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(VIDEO_KEY, video);
}
/**
* Sets the ParseFile representing a audio rec
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param audio file to upload as a audio rec
*/
public void setAudioFile(File audio){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(AUDIO_KEY, audio);
}
/**
* Sets the ParseFile representing a picture
* @throws java.lang.IllegalStateException if you call this method after having started uploading
* @param picture file to upload as a picture
*/
public void setPictureFile(File picture){
if(this.isUploading()){
throw new IllegalStateException("Cannot set a file while uploading");
}
this.files.put(PICTURE_KEY, picture);
}
/**
*
* @param cbk callbacks MUST NOT be null
*/
public void upload( FlagUploaderCallbacks cbk){
if(cbk == null){
throw new IllegalArgumentException("callbacks parameter must not be null");
}
this.callbacks = cbk;
this.isUploading = true;
this.loadFileLoop();
}
/**
* When this method is called it starts to upload a file asynchronously.
* When a file is uploaded this method will be called again and a new upload will start until
* there will be nothing to upload. If there is nothing to upload the Flag will be saved
*/
private void loadFileLoop(){
Log.d(TAG, "CIAo");
if(BuildConfig.DEBUG && Looper.getMainLooper().getThread() != Thread.currentThread()){
throw new RuntimeException("Something went wrong with threads");
}
if(this.files.isEmpty()){
this.loadFlag();
return;
}
String key = this.getNextKeyFromFilesMap();
this.currentFileKey = key;
File current_file = this.files.remove(key);
this.usedFiled.put(key, current_file);
new FileLoaderTask().execute(current_file);
}
/**
* Called when an error occurs. If this method is called the upload is stopped and no data will be created on parse.com
* @param e error description
*/
private void onError(Exception e){
this.callbacks.onError(e);
}
/**
* Called when a ParseFile is successfully loaded in memory
* @param parse_file parse file to upload
*/
private void onParseFileInMemoryLoadDone(ParseFile parse_file){
if(parse_file == null){
this.onError(new Exception("Error loading file. Flag cannot be placed"));
return;
}
this.flag.put(this.currentFileKey, parse_file);
final String message_to_user = this.getUserMessageForKey(this.currentFileKey);
this.currentFileKey = null;
FlagUploader.this.callbacks.onPercentage(0, message_to_user);
parse_file.saveInBackground(new SaveCallback() {
@Override
public void done(ParseException e) {
if(e != null){
FlagUploader.this.onError(e);
}
else{
FlagUploader.this.loadFileLoop();
}
}
}, new ProgressCallback() {
@Override
public void done(Integer integer) {
FlagUploader.this.callbacks.onPercentage(integer, message_to_user);
}
});
}
/**
* When all files are uploaded ti uploads the flag
*/
private void loadFlag(){
FlagUploader.this.callbacks.onPercentage(100, "Loading last bits :)");
this.flag.saveInBackground(new SaveCallback() {
@Override
public void done(ParseException e) {
if(e != null){
FlagUploader.this.onError(e);
}
else{
FlagUploader.this.onFinish();
}
}
});
}
/**
* Called when everything is uploaded. If #deleteOnFinish is true all files will be deleted
*/
private void onFinish(){
FlagUploader.this.callbacks.onSuccess();
/*if(this.deleteFilesOnFinish){
for(String key : this.usedFiled.keySet()){
File f = this.usedFiled.get(key);
Log.d(TAG, "Deleted file: "+f.getName()+ " ? "+ f.delete());
}
}*/
File f = this.usedFiled.get(AUDIO_KEY);
Log.d(TAG, "Deleted file: "+f.getName()+ " ? " + f.delete());
this.usedFiled.clear();
}
/**
*
* @param key file key
* @return a string representing the message to the user
*/
private String getUserMessageForKey(String key){
switch (key) {
case AUDIO_KEY:
return this.context.getString(R.string.upload_audio_progress);
case PICTURE_KEY:
return this.context.getString(R.string.upload_picture_progress);
case VIDEO_KEY:
return this.context.getString(R.string.upload_video_progress);
}
return null;
}
/**
*
* @return the correct key for the current upload round
*/
private String getNextKeyFromFilesMap(){
if(this.files.containsKey(AUDIO_KEY)){
return AUDIO_KEY;
}
else if(this.files.containsKey(PICTURE_KEY)){
return PICTURE_KEY;
}
else if(this.files.containsKey(VIDEO_KEY)){
return VIDEO_KEY;
}
return null;
}
public interface FlagUploaderCallbacks{
/**
* Progress of current upload
* @param percentage value between 0 and 100
* @param text_to_show text to display to the user
*/
void onPercentage(int percentage, String text_to_show);
/**
* Called when an error occurs. #onSucces() wont be called in case of error
* @param e error
*/
void onError(Exception e);
/**
* Called when everything is uploaded
*/
void onSuccess();
}
private class FileLoaderTask extends AsyncTask<File, Integer, ParseFile>{
@Override
protected ParseFile doInBackground(File... params) {
if(params.length == 0){
return null;
}
File file = params[0];
try{
return new ParseFile(file.getName(), this.loadFileInMemory(file));
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private byte[] loadFileInMemory(File f) throws IOException{
FileInputStream is = new FileInputStream(f);
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
byte[] buff = new byte[Utils.CHUNK_SIZE];
int i;
double k = 0;
double l = f.length();
while ((i = is.read(buff, 0, buff.length)) > 0)
{
k+=1;
outStream.write(buff, 0, i);
Log.d(TAG, "Loading "+f.getName()+" "+(k/l)*100.0);
}
return outStream.toByteArray();
}
@Override
protected void onPostExecute(ParseFile file) {
FlagUploader.this.onParseFileInMemoryLoadDone(file);
}
}
}
|
fixed a bug that caused the app to crash when sharing without audio
|
app/src/main/java/com/gcw/sapienza/places/utils/FlagUploader.java
|
fixed a bug that caused the app to crash when sharing without audio
|
|
Java
|
apache-2.0
|
941b12938a0181927980d13c20249199f0512b8b
| 0
|
safarmer/bazel,dslomov/bazel-windows,davidzchen/bazel,aehlig/bazel,werkt/bazel,ulfjack/bazel,ButterflyNetwork/bazel,davidzchen/bazel,werkt/bazel,dslomov/bazel,twitter-forks/bazel,akira-baruah/bazel,perezd/bazel,katre/bazel,dslomov/bazel-windows,cushon/bazel,werkt/bazel,bazelbuild/bazel,dslomov/bazel-windows,bazelbuild/bazel,ButterflyNetwork/bazel,perezd/bazel,katre/bazel,bazelbuild/bazel,twitter-forks/bazel,ulfjack/bazel,dslomov/bazel,ulfjack/bazel,meteorcloudy/bazel,davidzchen/bazel,twitter-forks/bazel,werkt/bazel,meteorcloudy/bazel,aehlig/bazel,cushon/bazel,aehlig/bazel,katre/bazel,aehlig/bazel,perezd/bazel,aehlig/bazel,davidzchen/bazel,ulfjack/bazel,twitter-forks/bazel,bazelbuild/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,meteorcloudy/bazel,katre/bazel,dslomov/bazel,twitter-forks/bazel,ulfjack/bazel,ulfjack/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,twitter-forks/bazel,ButterflyNetwork/bazel,safarmer/bazel,safarmer/bazel,perezd/bazel,perezd/bazel,dslomov/bazel,meteorcloudy/bazel,werkt/bazel,cushon/bazel,katre/bazel,perezd/bazel,cushon/bazel,cushon/bazel,akira-baruah/bazel,akira-baruah/bazel,dslomov/bazel,aehlig/bazel,dslomov/bazel-windows,davidzchen/bazel,akira-baruah/bazel,davidzchen/bazel,akira-baruah/bazel,perezd/bazel,twitter-forks/bazel,davidzchen/bazel,safarmer/bazel,ButterflyNetwork/bazel,dslomov/bazel-windows,cushon/bazel,safarmer/bazel,aehlig/bazel,safarmer/bazel,dslomov/bazel,werkt/bazel,meteorcloudy/bazel,katre/bazel,meteorcloudy/bazel,dslomov/bazel,dslomov/bazel-windows,bazelbuild/bazel,ulfjack/bazel
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import static com.google.common.base.Verify.verifyNotNull;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.prettyArtifactNames;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getClasspath;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getCompileTimeDependencyArtifacts;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getDirectJars;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getJavacArguments;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getProcessorpath;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getStrictJavaDepsMode;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.truth.Truth;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.OutputGroupInfo;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode;
import com.google.devtools.build.lib.analysis.configuredtargets.FileConfiguredTarget;
import com.google.devtools.build.lib.analysis.configuredtargets.OutputFileConfiguredTarget;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.rules.android.AndroidLibraryAarInfo.Aar;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider;
import com.google.devtools.build.lib.rules.java.JavaCompilationInfoProvider;
import com.google.devtools.build.lib.rules.java.JavaExportsProvider;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider;
import com.google.devtools.build.lib.rules.java.JavaSemantics;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link AndroidLibrary}.
*/
@RunWith(JUnit4.class)
public class AndroidLibraryTest extends AndroidBuildViewTestCase {
@Test
public void testSimpleLibrary() throws Exception {
scratch.file("java/android/BUILD", "android_library(name = 'a', srcs = ['A.java'])");
getConfiguredTarget("//java/android:a");
}
@Test
public void testBaselineCoverageArtifacts() throws Exception {
useConfiguration("--collect_code_coverage");
ConfiguredTarget target =
scratchConfiguredTarget(
"java/a",
"a",
"android_library(",
" name='a',",
" srcs=['A.java'],",
" deps=[':b'],",
")",
"android_library(",
" name='b',",
" srcs=['B.java'],",
")");
assertThat(baselineCoverageArtifactBasenames(target)).containsExactly("A.java", "B.java");
}
// regression test for #3169099
@Test
public void testLibrarySrcs() throws Exception {
scratch.file("java/srcs/a.foo", "foo");
scratch.file(
"java/srcs/BUILD",
"android_library(",
" name = 'valid',",
" srcs = [",
" 'a.java',",
" 'b.srcjar',",
" ':gvalid',",
" ':gmix',",
" ],",
")",
"android_library(",
" name = 'invalid',",
" srcs = [",
" 'a.foo',",
" ':ginvalid',",
" ],",
")",
"android_library(",
" name = 'mix',",
" srcs = [",
" 'a.java',",
" 'a.foo',",
" ],",
")",
"genrule(name = 'gvalid', srcs = ['a.java'], outs = ['b.java'], cmd = '')",
"genrule(name = 'ginvalid', srcs = ['a.java'], outs = ['b.foo'], cmd = '')",
"genrule(name = 'gmix', srcs = ['a.java'], outs = ['c.java', 'c.foo'], cmd = '')");
assertSrcsValidityForRuleType("//java/srcs", "android_library",
".java or .srcjar");
}
// regression test for #3169095
@Test
public void testXmbInSrcsDoesNotThrow() throws Exception {
reporter.removeHandler(failFastHandler);
scratchConfiguredTarget("java/xmb", "a", "android_library(name = 'a', srcs = ['a.xmb'])");
}
@Test
public void testSlashInIdlImportRoot() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"avocado",
"android_library(",
" name = 'avocado',",
" idl_parcelables = ['tropical/fruit/Avocado.aidl'],",
" idl_import_root = 'tropical/fruit',",
")");
}
@Test
public void testAndroidLibraryWithIdlImportAndNoIdls() throws Exception {
checkError(
"java/com/google/android",
"lib",
"Neither idl_srcs nor idl_parcelables were specified, "
+ "but 'idl_import_root' attribute was set",
"android_library(",
" name = 'lib',",
" srcs = ['Dummy.java'],",
" idl_import_root = 'src',",
")");
}
@Test
public void testAndroidLibraryWithIdlImportAndIdlSrcs() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(",
" name = 'lib',",
" idl_srcs = ['Dummy.aidl'],",
" idl_import_root = 'src',",
")");
}
@Test
public void testAndroidLibraryWithIdlImportAndIdlParcelables() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(",
" name = 'lib',",
" idl_parcelables = ['src/android/DummyParcelable.aidl'],",
" idl_import_root = 'src',",
")");
}
@Test
public void testAndroidLibraryWithIdlImportAndBothIdlTypes() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(",
" name = 'lib',",
" idl_srcs = ['src/android/Dummy.aidl'],",
" idl_parcelables = ['src/android/DummyParcelable.aidl'],",
" idl_import_root = 'src',",
")");
}
@Test
public void testAndroidLibraryWithIdlImportAndEmptyLists() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(",
" name = 'lib',",
" idl_srcs = [],",
" idl_parcelables = [],",
" idl_import_root = 'src',",
")");
}
@Test
public void testAndroidLibraryWithIdlPreprocessed() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(",
" name = 'lib',",
" idl_srcs = ['src/android/Dummy.aidl'],",
" idl_preprocessed = ['src/android/DummyParcelable.aidl'],",
")");
}
@Test
public void testCommandLineContainsTargetLabelAndRuleKind() throws Exception {
scratch.file("java/android/BUILD", "android_library(name = 'a', srcs = ['A.java'])");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
String commandLine = Iterables.toString(getJavacArguments(javacAction));
assertThat(commandLine).contains("--target_label, //java/android:a");
}
@Test
public void testStrictAndroidDepsOff() throws Exception {
useConfiguration("--strict_java_deps=OFF");
scratch.file(
"java/android/strict/BUILD",
"android_library(",
" name = 'b',",
" srcs = ['B.java'],",
")");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.OFF);
}
@Test
public void testStrictAndroidDepsOn() throws Exception {
scratch.file(
"java/android/strict/BUILD",
"android_library(",
" name = 'b',",
" srcs = ['B.java'],",
")");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.ERROR);
}
@Test
public void testStrictAndroidDepsWarn() throws Exception {
useConfiguration("--strict_android_deps=WARN");
scratch.file(
"java/android/strict/BUILD",
"android_library(",
" name = 'b',",
" srcs = ['B.java'],",
")");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.WARN);
}
@Test
public void testFixDepsToolEmpty() throws Exception {
scratch.file("java/android/BUILD", "android_library(name = 'b', srcs = ['B.java'])");
List<String> commandLine =
getGeneratingSpawnActionArgs(
getFileConfiguredTarget("//java/android:libb.jar").getArtifact());
assertThat(commandLine).containsAllOf("--experimental_fix_deps_tool", "add_dep").inOrder();
}
@Test
public void testFixDepsTool() throws Exception {
useConfiguration("--experimental_fix_deps_tool=auto_fixer");
scratch.file("java/android/BUILD", "android_library(name = 'b', srcs = ['B.java'])");
List<String> commandLine =
getGeneratingSpawnActionArgs(
getFileConfiguredTarget("//java/android:libb.jar").getArtifact());
assertThat(commandLine).containsAllOf("--experimental_fix_deps_tool", "auto_fixer").inOrder();
}
@Test
public void testJavaPluginProcessorPath() throws Exception {
scratch.file(
"java/test/BUILD",
"java_library(",
" name = 'plugin_dep',",
" srcs = [ 'ProcessorDep.java'],",
")",
"java_plugin(",
" name = 'plugin',",
" srcs = ['AnnotationProcessor.java'],",
" processor_class = 'com.google.process.stuff',",
" deps = [ ':plugin_dep' ],",
")",
"android_library(",
" name = 'to_be_processed',",
" plugins = [':plugin'],",
" srcs = ['ToBeProcessed.java'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/test:to_be_processed");
OutputFileConfiguredTarget output = (OutputFileConfiguredTarget)
getFileConfiguredTarget("//java/test:libto_be_processed.jar");
SpawnAction javacAction = (SpawnAction) getGeneratingAction(output.getArtifact());
assertThat(getProcessorNames(javacAction)).contains("com.google.process.stuff");
assertThat(getProcessorNames(javacAction)).hasSize(1);
assertThat(
ActionsTestUtil.baseArtifactNames(
getInputs(javacAction, getProcessorpath(javacAction))))
.containsExactly("libplugin.jar", "libplugin_dep.jar");
assertThat(
actionsTestUtil()
.predecessorClosureOf(getFilesToBuild(target), JavaSemantics.JAVA_SOURCE))
.isEqualTo("ToBeProcessed.java AnnotationProcessor.java ProcessorDep.java");
}
// Same test as above, enabling the plugin through the command line.
@Test
public void testPluginCommandLine() throws Exception {
scratch.file(
"java/test/BUILD",
"java_library(",
" name = 'plugin_dep',",
" srcs = [ 'ProcessorDep.java'],",
")",
"java_plugin(",
" name = 'plugin',",
" srcs = ['AnnotationProcessor.java'],",
" processor_class = 'com.google.process.stuff',",
" deps = [ ':plugin_dep' ],",
")",
"android_library(",
" name = 'to_be_processed',",
" srcs = ['ToBeProcessed.java'],",
")");
useConfiguration("--plugin=//java/test:plugin");
ConfiguredTarget target = getConfiguredTarget("//java/test:to_be_processed");
OutputFileConfiguredTarget output =
(OutputFileConfiguredTarget) getFileConfiguredTarget("//java/test:libto_be_processed.jar");
SpawnAction javacAction = (SpawnAction) getGeneratingAction(output.getArtifact());
assertThat(getProcessorNames(javacAction)).contains("com.google.process.stuff");
assertThat(getProcessorNames(javacAction)).hasSize(1);
assertThat(
ActionsTestUtil.baseArtifactNames(
getInputs(javacAction, getProcessorpath(javacAction))))
.containsExactly("libplugin.jar", "libplugin_dep.jar");
assertThat(
actionsTestUtil()
.predecessorClosureOf(getFilesToBuild(target), JavaSemantics.JAVA_SOURCE))
.isEqualTo("ToBeProcessed.java AnnotationProcessor.java ProcessorDep.java");
}
@Test
public void testInvalidPlugin() throws Exception {
checkError(
"java/test",
"lib",
// error:
getErrorMsgMisplacedRules(
"plugins",
"android_library",
"//java/test:lib",
"java_library",
"//java/test:not_a_plugin"),
// BUILD file:
"java_library(",
" name = 'not_a_plugin',",
" srcs = ['NotAPlugin.java'],",
")",
"android_library(",
" name = 'lib',",
" plugins = [':not_a_plugin'],",
" srcs = ['Lib.java'],",
")");
}
@Test
public void testDisallowDepsWithoutSrcsWarning() throws Exception {
useConfiguration("--experimental_allow_android_library_deps_without_srcs=true");
checkWarning(
"android/deps",
"b",
// message:
"android_library will be deprecating the use of deps to export targets implicitly",
// build file
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b',",
" deps = [':a'],",
")");
}
@Test
public void testDisallowDepsWithoutSrcsError() throws Exception {
checkError(
"android/deps",
"b",
// message:
"android_library will be deprecating the use of deps to export targets implicitly",
// build file
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b',",
" deps = [':a'],",
")");
}
@Test
public void testAlwaysAllowDepsWithoutSrcsIfLocalResources() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
" deps = [':a'],",
")");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
useConfiguration("--experimental_allow_android_library_deps_without_srcs=false");
getConfiguredTarget("//java/android:r");
assertNoEvents();
}
@Test
public void testTransitiveDependencyThroughExports() throws Exception {
scratch.file(
"java/test/BUILD",
"android_library(",
" name = 'somelib',",
" srcs = ['Lib.java'],",
" deps = [':somealias'],",
")",
"android_library(",
" name = 'somealias',",
" exports = [':somedep'],",
")",
"android_library(",
" name = 'somedep',",
" srcs = ['Dependency.java'],",
" deps = [ ':otherdep' ],",
")",
"android_library(",
" name = 'otherdep',",
" srcs = ['OtherDependency.java'],",
")");
ConfiguredTarget libTarget = getConfiguredTarget("//java/test:somelib");
assertThat(actionsTestUtil().predecessorClosureAsCollection(getFilesToBuild(libTarget),
JavaSemantics.JAVA_SOURCE)).containsExactly(
"Lib.java", "Dependency.java", "OtherDependency.java");
assertNoEvents();
}
@Test
public void testTransitiveStrictDeps() throws Exception {
scratch.file(
"java/peach/BUILD",
"android_library(",
" name='a',",
" exports=[':b'],",
")",
"android_library(",
" name='b',",
" srcs=['B.java'],",
" deps=[':c'],",
")",
"android_library(",
" name='c',",
" srcs=['C.java'],",
")");
useConfiguration("--strict_java_deps=ERROR");
ConfiguredTarget a = getConfiguredTarget("//java/peach:a");
Iterable<String> compileTimeJars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, a).getDirectCompileTimeJars());
assertThat(compileTimeJars).contains("libb-hjar.jar");
assertThat(compileTimeJars).doesNotContain("libc-hjar.jar");
assertNoEvents();
}
@Test
public void testEmitOutputDeps() throws Exception {
scratch.file(
"java/deps/BUILD",
"android_library(",
" name = 'a',",
" exports = [':b'],",
")",
"android_library(",
" name = 'b',",
" srcs = ['B.java'],",
")");
useConfiguration("--java_deps");
SpawnAction aAction = (SpawnAction) getGeneratingActionForLabel("//java/deps:liba.jar");
List<String> aOutputs = prettyArtifactNames(aAction.getOutputs());
assertThat(aOutputs).doesNotContain("java/deps/liba.jdeps");
SpawnAction bAction = (SpawnAction) getGeneratingActionForLabel("//java/deps:libb.jar");
List<String> bOutputs = prettyArtifactNames(bAction.getOutputs());
assertThat(bOutputs).contains("java/deps/libb.jdeps");
assertNoEvents();
}
@Test
public void testDependencyArtifactsWithExports() throws Exception {
scratch.file(
"java/classpath/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['A.java'],",
" deps = [",
" ':b',",
" ':c',",
" ],",
")",
"android_library(",
" name = 'b',",
" exports = [':d'],",
")",
"android_library(",
" name = 'c',",
" srcs = ['C.java'],",
" exports = [':e'],",
")",
"android_library(",
" name = 'd',",
" srcs = ['D.java'],",
")",
"android_library(",
" name = 'e',",
" srcs = ['E.java'],",
")");
SpawnAction aAction = (SpawnAction) getGeneratingActionForLabel("//java/classpath:liba.jar");
List<String> deps =
prettyArtifactNames(getInputs(aAction, getCompileTimeDependencyArtifacts(aAction)));
assertThat(deps)
.containsExactly(
"java/classpath/libc-hjar.jdeps",
"java/classpath/libd-hjar.jdeps",
"java/classpath/libe-hjar.jdeps");
assertNoEvents();
}
@Test
public void testSrcsLessExportsAreDisallowed() throws Exception {
checkError(
"java/deps",
"b",
"android_library will be deprecating the use of deps to export targets implicitly",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b',",
" deps = ['a'],",
")");
}
@Test
public void testExportsWithStrictJavaDepsFlag() throws Exception {
scratch.file(
"java/exports/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b',",
" srcs = ['b.java'],",
" exports = ['a'],",
")",
"android_library(",
" name = 'c',",
" srcs = ['c.java'],",
" deps = [':b'],",
")");
useConfiguration("--strict_java_deps=WARN");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/exports:libc.jar");
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly("java/exports/libb-hjar.jar", "java/exports/liba-hjar.jar");
assertNoEvents();
}
@Test
public void testExportsRunfiles() throws Exception {
scratch.file(
"java/exports/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
" data = ['data.txt'],",
")",
"android_library(",
" name = 'b',",
" srcs = ['b.java'],",
" exports = [':a'],",
")");
ConfiguredTarget bTarget = getConfiguredTarget("//java/exports:b");
assertThat(Arrays.asList("data.txt", "liba.jar", "libb.jar"))
.isEqualTo(ActionsTestUtil.baseArtifactNames(getDefaultRunfiles(bTarget).getArtifacts()));
assertNoEvents();
}
@Test
public void testTransitiveExports() throws Exception {
scratch.file(
"java/com/google/exports/BUILD",
"android_library(",
" name = 'dummy',",
" srcs = ['dummy.java'],",
" exports = [':dummy2'],",
")",
"android_library(",
" name = 'dummy2',",
" srcs = ['dummy2.java'],",
" exports = [':dummy3'],",
")",
"android_library(",
" name = 'dummy3',",
" srcs = ['dummy3.java'],",
" exports = [':dummy4'],",
")",
"android_library(",
" name = 'dummy4',",
" srcs = ['dummy4.java'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/com/google/exports:dummy");
List<Label> exports =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaExportsProvider.class, target).getTransitiveExports());
assertThat(exports)
.containsExactly(
Label.parseAbsolute("//java/com/google/exports:dummy2", ImmutableMap.of()),
Label.parseAbsolute("//java/com/google/exports:dummy3", ImmutableMap.of()),
Label.parseAbsolute("//java/com/google/exports:dummy4", ImmutableMap.of()));
assertNoEvents();
}
@Test
public void testSimpleIdl() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'idl',",
" idl_srcs = ['a.aidl'],",
")");
getConfiguredTarget("//java/android:idl");
assertNoEvents();
}
@Test
public void testIdlSrcsFromAnotherPackageFails() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("java/android/a/BUILD",
"exports_files(['A.aidl'])");
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'idl',",
" idl_srcs = ['//java/android/a:A.aidl'],",
")");
getConfiguredTarget("//java/android:idl");
assertContainsEvent("do not import '//java/android/a:A.aidl' directly. You should either"
+ " move the file to this package or depend on an appropriate rule there");
}
@Test
public void testIdlClassJarAction() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'idl',",
" idl_srcs = [",
" 'a.aidl',",
" 'b.aidl',",
" 'c.aidl',",
" ],",
")");
ConfiguredTarget idlTarget =
getConfiguredTarget("//java/android:idl");
NestedSet<Artifact> outputGroup =
getOutputGroup(idlTarget, AndroidIdlHelper.IDL_JARS_OUTPUT_GROUP);
SpawnAction classJarAction = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(outputGroup), "libidl-idl.jar");
SpawnAction sourceJarAction = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(outputGroup), "libidl-idl.srcjar");
assertThat(sourceJarAction).isSameAs(classJarAction);
PathFragment genfilesPath =
getTargetConfiguration()
.getGenfilesDirectory(RepositoryName.MAIN)
.getExecPath()
.getRelative("java/android/idl_aidl/java/android");
assertThat(classJarAction.getArguments()).containsAllOf(
genfilesPath.getRelative("a.java").getPathString(),
genfilesPath.getRelative("b.java").getPathString(),
genfilesPath.getRelative("c.java").getPathString());
}
@Test
public void testIdlOutputGroupTransitivity() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'lib',",
" idl_srcs = ['a.aidl'],",
" deps = [':dep'],",
")",
"android_library(",
" name = 'dep',",
" idl_srcs = ['b.aidl'],",
")");
ConfiguredTarget idlTarget =
getConfiguredTarget("//java/android:lib");
NestedSet<Artifact> outputGroup =
getOutputGroup(idlTarget, AndroidIdlHelper.IDL_JARS_OUTPUT_GROUP);
List<String> asString = Lists.newArrayList();
for (Artifact artifact : outputGroup) {
asString.add(artifact.getRootRelativePathString());
}
assertThat(asString).containsAllOf(
"java/android/libdep-idl.jar",
"java/android/libdep-idl.srcjar",
"java/android/liblib-idl.jar",
"java/android/liblib-idl.srcjar"
);
}
@Test
public void testNoJavaDir() throws Exception {
checkError(
"android/hello",
"idl",
// message:
"Cannot determine java/javatests root for import android/hello/Import.aidl",
// build file:
"android_library(",
" name = 'idl',",
" srcs = ['Import.java'],",
" idl_parcelables = ['Import.aidl'],",
")");
}
@Test
public void testExportedPluginsAreInherited() throws Exception {
scratch.file(
"java/test/BUILD",
"java_plugin(",
" name = 'plugin',",
" srcs = [ 'Plugin.java' ],",
" processor_class = 'com.google.process.stuff',",
")",
"android_library(",
" name = 'exporting_lib',",
" srcs = [ 'ExportingLib.java' ],",
" exported_plugins = [ ':plugin' ],",
")",
"android_library(",
" name = 'consuming_lib',",
" srcs = [ 'ConsumingLib.java' ],",
" deps = [ ':exporting_lib' ],",
")",
"android_library(",
" name = 'leaf_lib',",
" srcs = [ 'LeafLib.java' ],",
" deps = [ ':consuming_lib' ],",
")");
getConfiguredTarget("//java/test:consuming_lib");
getConfiguredTarget("//java/test:leaf_lib");
// libconsuming_lib should include the plugin, since it directly depends on exporting_lib
assertThat(getProcessorNames("//java/test:libconsuming_lib.jar"))
.containsExactly("com.google.process.stuff");
// but libleaf_lib should not, because its dependency is transitive.
assertThat(getProcessorNames("//java/test:libleaf_lib.jar")).isEmpty();
}
@Test
public void testAidlLibAddsProguardSpecs() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(",
" name = 'sdk',",
" aapt = 'aapt',",
" adb = 'adb',",
" aidl = 'aidl',",
" aidl_lib = ':aidl_lib',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign',",
")",
"java_library(",
" name = 'aidl_lib',",
" srcs = ['AidlLib.java'],",
" proguard_specs = ['aidl_lib.cfg'],",
")");
scratch.file(
"java/com/google/android/hello/BUILD",
"android_library(",
" name = 'library',",
" srcs = ['MainActivity.java'],",
" idl_srcs = ['IMyInterface.aidl'],",
")",
"android_library(",
" name = 'library_no_idl',",
" srcs = ['MainActivity.java'],",
")",
"android_binary(",
" name = 'binary',",
" deps = [':library'],",
" manifest = 'AndroidManifest.xml',",
" proguard_specs = ['proguard-spec.pro'],",
")",
"android_binary(",
" name = 'binary_no_idl',",
" deps = [':library_no_idl'],",
" manifest = 'AndroidManifest.xml',",
" proguard_specs = ['proguard-spec.pro'],",
")");
useConfiguration("--android_sdk=//sdk:sdk");
// Targets with AIDL-generated sources also get AIDL support lib Proguard specs
ConfiguredTarget binary = getConfiguredTarget("//java/com/google/android/hello:binary");
Action action = actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(binary), "_proguard.jar");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(
action.getInputs(), "sdk/aidl_lib.cfg_valid"))
.isNotNull();
// Targets without AIDL-generated sources don't care
ConfiguredTarget binaryNoIdl =
getConfiguredTarget("//java/com/google/android/hello:binary_no_idl");
Action actionNoIdl = actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(binaryNoIdl), "_proguard.jar");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(
actionNoIdl.getInputs(), "sdk/aidl_lib.cfg_valid"))
.isNull();
}
private List<String> getDependentAssetDirs(String flag, List<String> actualArgs) {
assertThat(actualArgs).contains(flag);
String actualFlagValue = actualArgs.get(actualArgs.indexOf(flag) + 1);
ImmutableList.Builder<String> actualPaths = ImmutableList.builder();
for (String resourceDependency : Splitter.on(',').split(actualFlagValue)) {
assertThat(actualFlagValue).matches("[^;]*;[^;]*;[^;]*;.*");
actualPaths.add(resourceDependency.split(";")[1].split("#"));
}
return actualPaths.build();
}
@Test
public void testResourcesMultipleDirectoriesFromPackage() throws Exception {
scratch.file(
"c/b/m/a/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" custom_package = 'com.google.android.apps.a',",
" resource_files = [",
" 'b_/res/values/strings.xml',",
" ],",
")");
scratch.file("c/b/m/a/b_/res",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//c/b/m/a:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("c/b/m/a/b_/res"), args);
}
@Test
public void testSimpleResources() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
")");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/res"), args);
}
@Test
public void testResourcesWithConfigurationQualifier() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
")");
scratch.file("java/android/res/values-en/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/res"), args);
}
@Test
public void testResourcesInOtherPackage_exported() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['//java/other:res/values/strings.xml'],",
")");
scratch.file("java/other/BUILD",
"exports_files(['res/values/strings.xml'])");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
@Test
public void testResourcesInOtherPackage_filegroup() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['//java/other:fg'],",
")");
scratch.file(
"java/other/BUILD",
"filegroup(",
" name = 'fg',",
" srcs = ['res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
// Regression test for b/11924769
@Test
public void testResourcesInOtherPackage_filegroupWithExternalSources() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [':fg'],",
")",
"filegroup(",
" name = 'fg',",
" srcs = ['//java/other:res/values/strings.xml'],",
")");
scratch.file("java/other/BUILD",
"exports_files(['res/values/strings.xml'])");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
// Regression test for b/11924769
@Test
public void testResourcesInOtherPackage_doubleFilegroup() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [':fg'],",
")",
"filegroup(",
" name = 'fg',",
" srcs = ['//java/other:fg'],",
")");
scratch.file(
"java/other/BUILD",
"filegroup(",
" name = 'fg',",
" srcs = ['res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
@Test
public void testManifestMissingFails() throws Exception {
checkError(
"java/android",
"r",
"is required when resource_files or assets are defined.",
"filegroup(name = 'b')",
"android_library(",
" name = 'r',",
" resource_files = [':b'],",
")");
}
@Test
public void testResourcesDoesNotMatchDirectoryLayout_BadFile() throws Exception {
checkError(
"java/android",
"r",
"'java/android/res/somefile.xml' is not in the expected resource directory structure of"
+ " <resource directory>/{"
+ Joiner.on(',').join(AndroidResources.RESOURCE_DIRECTORY_TYPES)
+ "}",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [",
" 'res/somefile.xml',",
" 'r/t/f/m/raw/fold',",
" ],",
")");
}
@Test
public void testResourcesDoesNotMatchDirectoryLayout_BadDirectory() throws Exception {
checkError(
"java/android",
"r",
"'java/android/res/other/somefile.xml' is not in the expected resource directory structure"
+ " of <resource directory>/{"
+ Joiner.on(',').join(AndroidResources.RESOURCE_DIRECTORY_TYPES)
+ "}",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [",
" 'res/other/somefile.xml',",
" 'r/t/f/m/raw/fold',",
" ],",
")");
}
@Test
public void testResourcesNotUnderCommonDirectoryFails() throws Exception {
checkError(
"java/android",
"r",
"'java/android/r/t/f/m/raw/fold' (generated by '//java/android:r/t/f/m/raw/fold') is not"
+ " in the same directory 'res' (derived from java/android/res/raw/speed). All"
+ " resources must share a common directory.",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [",
" 'res/raw/speed',",
" 'r/t/f/m/raw/fold',",
" ],",
")");
}
@Test
public void testAssetsDirAndNoAssetsFails() throws Exception {
checkError(
"cpp/android",
"r",
"'assets' and 'assets_dir' should be either both empty or both non-empty",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets_dir = 'assets',",
")");
}
@Test
public void testAssetsNotUnderAssetsDirFails() throws Exception {
checkError(
"java/android",
"r",
"'java/android/r/t/f/m' (generated by '//java/android:r/t/f/m') is not beneath 'assets'",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets_dir = 'assets',",
" assets = [",
" 'assets/valuable',",
" 'r/t/f/m',",
" ]",
")");
}
@Test
public void testAssetsAndNoAssetsDirFails() throws Exception {
scratch.file("java/android/assets/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
checkError(
"java/android",
"r",
"'assets' and 'assets_dir' should be either both empty or both non-empty",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets = glob(['assets/**']),",
")");
}
@Test
public void testFileLocation() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
")");
ConfiguredTarget foo = getConfiguredTarget("//java/android:r");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(getFilesToBuild(foo), "r.srcjar").getRoot())
.isEqualTo(getTargetConfiguration().getBinDirectory(RepositoryName.MAIN));
}
// regression test for #3294893
@Test
public void testNoJavaPathFoundDoesNotThrow() throws Exception {
checkError(
"third_party/java_src/android/app",
"r",
"The location of your BUILD file determines the Java package used for Android resource "
+ "processing. A directory named \"java\" or \"javatests\" will be used as your Java "
+ "source root and the path of your BUILD file relative to the Java source root will "
+ "be used as the package for Android resource processing. The Java source root could "
+ "not be determined for \"third_party/java_src/android/app\". Move your BUILD file "
+ "under a java or javatests directory, or set the 'custom_package' attribute.",
"licenses(['notice'])",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
")");
}
@Test
public void testWithRenameManifestPackage() throws Exception {
scratch.file(
"a/r/BUILD",
"android_library(",
" name = 'r',",
" srcs = ['Foo.java'],",
" custom_package = 'com.google.android.bar',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
")");
ConfiguredTarget r = getConfiguredTarget("//a/r:r");
assertNoEvents();
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(r));
assertContainsSublist(args,
ImmutableList.of("--packageForR", "com.google.android.bar"));
}
@Test
public void testDebugConfiguration() throws Exception {
scratch.file(
"java/apps/android/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
")");
checkDebugMode("//java/apps/android:r", true);
useConfiguration("--compilation_mode=opt");
checkDebugMode("//java/apps/android:r", false);
}
@Test
public void testNeverlinkResources_AndroidResourcesInfo() throws Exception {
scratch.file(
"java/apps/android/BUILD",
"android_library(",
" name = 'foo',",
" manifest = 'AndroidManifest.xml',",
" deps = [",
" ':lib',",
" ':lib_neverlink',",
" ],",
")",
"android_library(",
" name = 'lib_neverlink',",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',",
" deps = [':bar'],",
")",
"android_library(",
" name = 'lib',",
" manifest = 'AndroidManifest.xml',",
" deps = [':bar'],",
")",
"android_library(",
" name = 'bar',",
" manifest = 'AndroidManifest.xml',",
")");
Function<ValidatedAndroidResources, Label> getLabel = ValidatedAndroidResources::getLabel;
ConfiguredTarget foo = getConfiguredTarget("//java/apps/android:foo");
assertThat(
Iterables.transform(
foo.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources(), getLabel))
.containsExactly(
Label.parseAbsolute("//java/apps/android:lib", ImmutableMap.of()),
Label.parseAbsolute("//java/apps/android:bar", ImmutableMap.of()));
assertThat(
Iterables.transform(
foo.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:foo", ImmutableMap.of()));
ConfiguredTarget lib = getConfiguredTarget("//java/apps/android:lib");
assertThat(
Iterables.transform(
lib.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:bar", ImmutableMap.of()));
assertThat(
Iterables.transform(
lib.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:lib", ImmutableMap.of()));
ConfiguredTarget libNeverlink = getConfiguredTarget("//java/apps/android:lib_neverlink");
assertThat(libNeverlink.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources())
.isEmpty();
assertThat(libNeverlink.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources())
.isEmpty();
}
@Test
public void testNeverlinkResources_compileAndRuntimeJars() throws Exception {
scratch.file(
"java/apps/android/BUILD",
"android_library(",
" name = 'foo',",
" manifest = 'AndroidManifest.xml',",
" exports = [",
" ':lib',",
" ':lib_neverlink',",
" ],",
")",
"android_library(",
" name = 'lib_neverlink',",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',",
")",
"android_library(",
" name = 'lib',",
" manifest = 'AndroidManifest.xml',",
")");
ConfiguredTarget foo = getConfiguredTarget("//java/apps/android:foo");
ConfiguredTarget lib = getConfiguredTarget("//java/apps/android:lib");
ConfiguredTarget libNeverlink = getConfiguredTarget("//java/apps/android:lib_neverlink");
NestedSet<Artifact> neverLinkFilesToBuild = getFilesToBuild(libNeverlink);
NestedSet<Artifact> libFilesToBuild = getFilesToBuild(lib);
JavaCompilationArgsProvider argsProvider =
JavaInfo.getProvider(JavaCompilationArgsProvider.class, foo);
assertThat(argsProvider.getDirectCompileTimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(neverLinkFilesToBuild),
"lib_neverlink_resources.jar"));
assertThat(argsProvider.getDirectCompileTimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(libFilesToBuild), "lib_resources.jar"));
assertThat(argsProvider.getRuntimeJars())
.doesNotContain(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(neverLinkFilesToBuild),
"lib_neverlink_resources.jar"));
assertThat(argsProvider.getRuntimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(libFilesToBuild), "lib_resources.jar"));
}
@Test
public void testResourceMergeAndProcessParallel() throws Exception {
// Test that for android_library, we can divide the resource processing action into
// smaller actions.
scratch.file(
"java/android/app/foo/BUILD",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
")");
scratch.file(
"java/android/app/foo/res/values/strings.xml",
"<resources>",
"<string name='hello'>Aloha!</string>",
"<string name='goodbye'>Aloha!</string>",
"</resources>");
ConfiguredTarget target = getConfiguredTarget("//java/android/app/foo:r");
NestedSet<Artifact> filesToBuild = getFilesToBuild(target);
Set<Artifact> artifacts = actionsTestUtil().artifactClosureOf(filesToBuild);
ValidatedAndroidResources resources =
Iterables.getOnlyElement(
target.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources());
SpawnAction resourceParserAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getSymbols().getFilename());
SpawnAction resourceClassJarAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getJavaClassJar().getFilename());
SpawnAction resourceSrcJarAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getJavaSourceJar().getFilename());
assertThat(resourceParserAction.getMnemonic()).isEqualTo("AndroidResourceParser");
assertThat(resourceClassJarAction.getMnemonic()).isEqualTo("AndroidResourceMerger");
assertThat(resourceSrcJarAction.getMnemonic()).isEqualTo("AndroidResourceValidator");
// Validator also generates an R.txt.
assertThat(resourceSrcJarAction.getOutputs()).contains(resources.getRTxt());
}
private void checkDebugMode(String target, boolean isDebug) throws Exception {
ConfiguredTarget foo = getConfiguredTarget(target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(foo), "r.srcjar");
assertThat(ImmutableList.copyOf(paramFileArgsOrActionArgs(action)).contains("--debug"))
.isEqualTo(isDebug);
}
@Test
public void testGeneratedManifestPackage() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'l',",
" srcs = ['foo.java'],",
")",
"android_library(",
" name = 'l2',",
" custom_package = 'foo',",
" srcs = ['foo.java'],",
")");
scratch.file(
"third_party/android/BUILD",
"licenses(['notice'])",
"android_library(",
" name = 'l',",
" srcs = ['foo.java'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android:l");
Artifact manifest = getBinArtifact("_generated/l/AndroidManifest.xml", target);
FileWriteAction action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"android\"");
target = getConfiguredTarget("//java/android:l2");
manifest = getBinArtifact("_generated/l2/AndroidManifest.xml", target);
action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"foo\"");
target = getConfiguredTarget("//third_party/android:l");
manifest = getBinArtifact("_generated/l/AndroidManifest.xml", target);
action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"third_party.android\"");
}
@Test
public void testGeneratedIdlSrcs() throws Exception {
scratch.file(
"java/android/BUILD",
"genrule(",
" name = 'idl',",
" outs = ['MyInterface.aidl'],",
" cmd = 'touch $@',",
")",
"android_library(",
" name = 'lib',",
" idl_srcs = [':idl'],",
" idl_parcelables = ['MyParcelable.aidl'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android:lib");
PathFragment genfilesJavaPath =
getTargetConfiguration()
.getGenfilesDirectory(RepositoryName.MAIN)
.getExecPath()
.getRelative("java");
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(getFilesToBuild(target)), "MyInterface.java");
assertThat(action.getArguments())
.containsAllOf("-Ijava", "-I" + genfilesJavaPath.getPathString());
}
@Test
public void testMultipleLibsSameIdls() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'idl1',",
" idl_srcs = ['MyInterface.aidl'],",
")",
"android_library(",
" name = 'idl2',",
" idl_srcs = ['MyInterface.aidl'],",
")");
getConfiguredTarget("//java/android:idl1");
getConfiguredTarget("//java/android:idl2");
}
@Test
public void testIdeInfoProvider() throws Exception {
scratch.file(
"java/android/BUILD",
"genrule(",
" name='genrule',",
" srcs=[],",
" outs=['assets/genrule.so'],",
" cmd='',",
")",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
")");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget("//java/android:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "java/android/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//java/android:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testIdeInfoProviderOutsideJavaRoot() throws Exception {
String rootPath = "research/handwriting/java/com/google/research/handwriting/";
scratch.file(
rootPath + "BUILD",
"genrule(",
" name='genrule',",
" srcs=[],",
" outs=['assets/genrule.so'],",
" cmd='',",
")",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
")");
scratch.file(rootPath + "res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file(rootPath + "/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget(
"//research/handwriting/java/com/google/research/handwriting:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "handwriting/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//research/handwriting/java/com/google/research/handwriting:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testIdeInfoProviderGeneratedIdl() throws Exception {
scratch.file(
"java/android/BUILD",
"genrule(",
" name='genrule',",
" srcs=[],",
" outs=['assets/genrule.so'],",
" cmd='',",
")",
"genrule(",
" name = 'idl',",
" outs = ['MyGeneratedInterface.aidl'],",
" cmd = 'touch $@',",
")",
"android_library(",
" name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ ':idl' ],",
" idl_parcelables = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
")");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget("//java/android:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "java/android/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//java/android:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testAndroidLibraryWithMessagesDoNotCrash() throws Exception {
scratch.file(
"java/com/google/atest/BUILD",
"filegroup(",
" name = 'sources',",
" srcs = [",
" 'source.java',",
" 'message.xmb',",
" ],",
")",
"android_library(",
" name = 'alib',",
" srcs = [':sources'],",
")");
getConfiguredTarget("//java/com/google/atest:alib");
}
@Test
public void testMultipleDirectDependentResourceDirectories() throws Exception {
scratch.file(
"java/android/resources/d1/BUILD",
"android_library(",
" name = 'd1',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['d1-res/values/strings.xml'],",
" assets = ['assets-d1/some/random/file'],",
" assets_dir = 'assets-d1',",
" deps = ['//java/android/resources/d2:d2'],",
")");
scratch.file(
"java/android/resources/d2/BUILD",
"android_library(",
" name = 'd2',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d2/some/random/file'],",
" assets_dir = 'assets-d2',",
" resource_files = ['d2-res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android/resources/d1:d1");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/resources/d1/d1-res"), args);
assertThat(getDirectDependentResourceDirs(args)).contains("java/android/resources/d2/d2-res");
List<String> assetArgs = getGeneratingSpawnActionArgs(getDecoupledAssetArtifact(resource));
assertThat(getDependentAssetDirs("--directData", assetArgs))
.contains("java/android/resources/d2/assets-d2");
assertNoEvents();
}
@Test
public void testTransitiveDependentResourceDirectories() throws Exception {
scratch.file(
"java/android/resources/d1/BUILD",
"android_library(",
" name = 'd1',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['d1-res/values/strings.xml'],",
" assets = ['assets-d1/some/random/file'],",
" assets_dir = 'assets-d1',",
" deps = ['//java/android/resources/d2:d2']",
")");
scratch.file(
"java/android/resources/d2/BUILD",
"android_library(",
" name = 'd2',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d2/some/random/file'],",
" assets_dir = 'assets-d2',",
" resource_files = ['d2-res/values/strings.xml'],",
" deps = ['//java/android/resources/d3:d3'],",
")");
scratch.file(
"java/android/resources/d3/BUILD",
"android_library(",
" name = 'd3',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d3/some/random/file'],",
" assets_dir = 'assets-d3',",
" resource_files = ['d3-res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android/resources/d1:d1");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/resources/d1/d1-res"), args);
Truth.assertThat(getDirectDependentResourceDirs(args))
.contains("java/android/resources/d2/d2-res");
Truth.assertThat(getTransitiveDependentResourceDirs(args))
.contains("java/android/resources/d3/d3-res");
List<String> assetArgs = getGeneratingSpawnActionArgs(getDecoupledAssetArtifact(resource));
Truth.assertThat(getDependentAssetDirs("--directData", assetArgs))
.contains("java/android/resources/d2/assets-d2");
Truth.assertThat(getDependentAssetDirs("--data", assetArgs))
.contains("java/android/resources/d3/assets-d3");
assertNoEvents();
}
@Test
public void testCustomJavacopts() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['A.java'],",
" javacopts = ['-g:lines,source'],",
")");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
assertThat(getJavacArguments(javacAction)).contains("-g:lines,source");
}
// Regression test for b/23079127
@Test
public void testSrcjarStrictDeps() throws Exception {
scratch.file(
"java/strict/BUILD",
"android_library(",
" name='a',",
" srcs=['A.java'],",
" deps=[':b'],",
")",
"android_library(",
" name='b',",
" srcs=['b.srcjar'],",
" deps=[':c'],",
")",
"android_library(",
" name='c',",
" srcs=['C.java'],",
")");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/strict:liba.jar");
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly("java/strict/libb-hjar.jar");
}
@Test
public void testDisallowPrecompiledJars() throws Exception {
checkError(
"java/precompiled",
"library",
// messages:
"does not produce any android_library srcs files (expected .java or .srcjar)",
// build file:
"android_library(",
" name = 'library',",
" srcs = [':jar'],",
")",
"filegroup(",
" name = 'jar',",
" srcs = ['lib.jar'],",
")");
}
@Test
public void hjarPredecessors() throws Exception {
scratch.file(
"java/test/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['A.java'],",
" deps = [':b'],",
")",
"android_library(",
" name = 'b',",
" srcs = ['B.java'],",
")");
useConfiguration("--java_header_compilation");
Action a = getGeneratingActionForLabel("//java/test:liba.jar");
List<String> inputs = prettyArtifactNames(a.getInputs());
assertThat(inputs).doesNotContain("java/test/libb.jdeps");
assertThat(inputs).contains("java/test/libb-hjar.jdeps");
}
@Test
public void resourcesFromRuntimeDepsAreIncluded() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'dummyParentLibrary',",
" deps = [':dummyLibraryOne',",
" ':dummyLibraryTwo'],",
" srcs = ['libraryParent.java'],",
")",
"android_library(",
" name = 'dummyLibraryOne',",
" exports_manifest = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource1.png'],",
" srcs = ['libraryOne.java'],",
")",
"android_library(",
" name = 'dummyLibraryTwo',",
" exports_manifest = 1,",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource2.png'],",
" deps = ['dummyLibraryNested'],",
" srcs = ['libraryTwo.java'],",
")",
"android_library(",
" name = 'dummyLibraryNested',",
" exports_manifest = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource1.png'],",
" srcs = ['libraryOne.java'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android:dummyLibraryOne");
AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
target = getConfiguredTarget("//java/android:dummyLibraryTwo");
provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNull();
target = getConfiguredTarget("//java/android:dummyParentLibrary");
provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
assertThat(provider.getTransitiveAars()).hasSize(1);
}
@Test
public void aapt2ArtifactGenerationWhenSdkIsDefined() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(",
" name = 'sdk',",
" aapt = 'aapt',",
" aapt2 = 'aapt2',",
" adb = 'adb',",
" aidl = 'aidl',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign',",
")");
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" deps = [':b'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = ['res/values/a.xml'],",
")",
"android_library(",
" name = 'b', ",
" srcs = ['B.java'],",
" manifest = 'b/AndroidManifest.xml',",
" resource_files = ['res/values/b.xml'],",
")");
useConfiguration("--android_sdk=//sdk:sdk");
ConfiguredTargetAndData a = getConfiguredTargetAndData("//java/a:a");
ConfiguredTargetAndData b = getConfiguredTargetAndDataDirectPrerequisite(a, "//java/a:b");
ConfiguredTargetAndData sdk = getConfiguredTargetAndDataDirectPrerequisite(a, "//sdk:sdk");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNotNull();
SpawnAction linkAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_LIBRARY_APK));
assertThat(linkAction).isNotNull();
assertThat(linkAction.getInputs())
.containsAllOf(
sdk.getConfiguredTarget().get(AndroidSdkProvider.PROVIDER).getAndroidJar(),
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS),
getImplicitOutputArtifact(
b.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(linkAction.getOutputs())
.containsAllOf(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_R_TXT),
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_SOURCE_JAR));
}
@Test
public void aapt2ArtifactGenerationSkippedWhenSdkIsNotDefined() throws Exception {
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = ['res/values/a.xml'],",
")");
ConfiguredTarget a = getConfiguredTarget("//java/a:a");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNull();
SpawnAction linkAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_LIBRARY_APK));
assertThat(linkAction).isNull();
}
@Test
public void compileDataBindingOutputWhenDataBindingEnabled() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(",
" name = 'sdk',",
" aapt = 'aapt',",
" aapt2 = 'aapt2',",
" adb = 'adb',",
" aidl = 'aidl',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign',",
")");
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" enable_data_binding = 1,",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = ['res/values/a.xml']",
")");
useConfiguration("--android_sdk=//sdk:sdk");
ConfiguredTarget a = getConfiguredTarget("//java/a:a");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNotNull();
Iterable<String> args = paramFileArgsOrActionArgs(compileAction);
assertThat(args).contains("--dataBindingInfoOut");
}
@Test
public void testUseManifestFromResourceApk() throws Exception {
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = ['res/values/a.xml'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/a:a");
AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
assertThat(provider
.getAar()
.getManifest()
.getPath()
.toString()).contains("processed_manifest");
}
@Test
public void testAndroidLibrary_SrcsLessDepsHostConfigurationNoOverride() throws Exception {
scratch.file(
"java/srclessdeps/BUILD",
"android_library(",
" name = 'dep_for_foo',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'foo',",
" deps = [':dep_for_foo'],",
")",
"genrule(",
" name = 'some_genrule',",
" tools = [':foo'],",
" outs = ['some_outs'],",
" cmd = '$(location :foo) do_something $@',",
")");
useConfiguration("--experimental_allow_android_library_deps_without_srcs");
// genrule builds its tools using the host configuration.
ConfiguredTarget genruleTarget = getConfiguredTarget("//java/srclessdeps:some_genrule");
ConfiguredTarget target = getDirectPrerequisite(genruleTarget, "//java/srclessdeps:foo");
assertThat(
getConfiguration(target)
.getFragment(AndroidConfiguration.class)
.allowSrcsLessAndroidLibraryDeps(getRuleContext(target)))
.isTrue();
}
@Test
public void testAndroidLibraryValidatesProguardSpec() throws Exception {
scratch.file(
"java/com/google/android/hello/BUILD",
"android_library(",
" name = 'l2',",
" srcs = ['MoreMaps.java'],",
" proguard_specs = ['library_spec.cfg'],",
")",
"android_binary(",
" name = 'b',",
" srcs = ['HelloApp.java'],",
" manifest = 'AndroidManifest.xml',",
" deps = [':l2'],",
" proguard_specs = ['proguard-spec.pro'],",
")");
Set<Artifact> transitiveArtifacts =
actionsTestUtil()
.artifactClosureOf(
getFilesToBuild(getConfiguredTarget("//java/com/google/android/hello:b")));
Action action =
actionsTestUtil()
.getActionForArtifactEndingWith(transitiveArtifacts, "library_spec.cfg_valid");
assertWithMessage("proguard validate action was spawned for binary target.")
.that(
actionsTestUtil()
.getActionForArtifactEndingWith(transitiveArtifacts, "proguard-spec.pro_valid"))
.isNull();
assertWithMessage("Proguard validate action was not spawned.")
.that(prettyArtifactNames(action.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
}
@Test
public void testAndroidLibraryValidatesProguardSpecWithoutBinary() throws Exception {
scratch.file(
"java/com/google/android/hello/BUILD",
"android_library(",
" name = 'l2',",
" srcs = ['MoreMaps.java'],",
" proguard_specs = ['library_spec.cfg'],",
")",
"android_library(",
" name = 'l3',",
" srcs = ['MoreMaps.java'],",
" deps = [':l2'],",
")");
Action action =
actionsTestUtil()
.getActionForArtifactEndingWith(
getOutputGroup(
getConfiguredTarget("//java/com/google/android/hello:l2"),
OutputGroupInfo.HIDDEN_TOP_LEVEL),
"library_spec.cfg_valid");
assertWithMessage("Proguard validate action was not spawned.").that(action).isNotNull();
assertWithMessage("Proguard validate action was spawned without correct input.")
.that(prettyArtifactNames(action.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
Action transitiveAction =
actionsTestUtil()
.getActionForArtifactEndingWith(
getOutputGroup(
getConfiguredTarget("//java/com/google/android/hello:l3"),
OutputGroupInfo.HIDDEN_TOP_LEVEL),
"library_spec.cfg_valid");
assertWithMessage("Proguard validate action was not spawned.")
.that(transitiveAction)
.isNotNull();
assertWithMessage("Proguard validate action was spawned without correct input.")
.that(prettyArtifactNames(transitiveAction.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
}
@Test
public void testForwardedDeps() throws Exception {
scratch.file(
"java/fwdeps/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b1',",
" exports = [':a'],",
")",
"android_library(",
" name = 'b2',",
" srcs = [],",
" exports = [':a'],",
")",
"android_library(",
" name = 'c1',",
" srcs = ['c1.java'],",
" deps = [':b1'],",
")",
"android_library(",
" name = 'c2',",
" srcs = ['c2.java'],",
" deps = [':b2'],",
")");
ConfiguredTarget c1Target = getConfiguredTarget("//java/fwdeps:c1");
ConfiguredTarget c2Target = getConfiguredTarget("//java/fwdeps:c2");
Iterable<String> c1Jars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, c1Target)
.getCompilationClasspath());
Iterable<String> c2Jars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, c2Target)
.getCompilationClasspath());
assertThat(c1Jars).containsExactly("liba-hjar.jar");
assertThat(c2Jars).containsExactly("liba-hjar.jar");
assertNoEvents();
}
@Test
public void testExportsAreIndirectNotDirect() throws Exception {
scratch.file(
"java/exports/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['a.java'],",
")",
"android_library(",
" name = 'b',",
" srcs = ['b.java'],",
" exports = ['a'],",
")",
"android_library(",
" name = 'c',",
" srcs = ['c.java'],",
" deps = [':b'],",
")");
ConfiguredTarget aTarget = getConfiguredTarget("//java/exports:a");
ConfiguredTarget bTarget = getConfiguredTarget("//java/exports:b");
ConfiguredTarget cTarget = getConfiguredTarget("//java/exports:c");
ImmutableList<Artifact> bClasspath =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, bTarget)
.getCompilationClasspath());
ImmutableList<Artifact> cClasspath =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, cTarget)
.getCompilationClasspath());
assertThat(bClasspath).isEmpty();
assertThat(cClasspath)
.containsAllIn(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, aTarget)
.getDirectCompileTimeJars());
assertThat(cClasspath)
.containsAllIn(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, bTarget)
.getDirectCompileTimeJars());
assertNoEvents();
}
@Test
public void testAndroidJavacoptsCanBeOverridden() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'a',",
" srcs = ['A.java'],",
" javacopts = ['-g:lines,source'],",
")");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
String commandLine = Iterables.toString(getJavacArguments(javacAction));
assertThat(commandLine).contains("-g:lines,source");
}
@Test
public void testAarGeneration_LocalResources() throws Exception {
scratch.file(
"java/android/aartest/BUILD",
"android_library(",
" name = 'aartest',",
" deps = ['dep'],",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
" assets = ['assets/some/random/file'],",
" assets_dir = 'assets',",
")",
"android_library(",
" name = 'dep',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['dep/res/values/strings.xml'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android/aartest:aartest");
Artifact aar = getBinArtifact("aartest.aar", target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(aar), "aartest.aar");
assertThat(action).isNotNull();
assertThat(prettyArtifactNames(getNonToolInputs(action)))
.containsAllOf(
"java/android/aartest/aartest_processed_manifest/AndroidManifest.xml",
"java/android/aartest/aartest_symbols/R.txt",
"java/android/aartest/res/values/strings.xml",
"java/android/aartest/assets/some/random/file",
"java/android/aartest/libaartest.jar");
}
@Test
public void testAarGeneration_NoResources() throws Exception {
scratch.file(
"java/android/aartest/BUILD",
"android_library(",
" name = 'aartest',",
" exports = ['dep'],",
")",
"android_library(",
" name = 'dep',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['dep/res/values/strings.xml'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android/aartest:aartest");
Artifact aar = getBinArtifact("aartest.aar", target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(aar), "aartest.aar");
assertThat(action).isNotNull();
assertThat(prettyArtifactNames(getNonToolInputs(action)))
.containsAllOf(
"java/android/aartest/aartest_processed_manifest/AndroidManifest.xml",
"java/android/aartest/aartest_symbols/R.txt",
"java/android/aartest/libaartest.jar");
}
@Test
public void testAarProvider_localResources() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'test',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
" deps = [",
" ':t1',",
" ':t2',",
" ],",
")",
"android_library(",
" name = 't1',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
")",
"android_library(",
" name = 't2',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android:test");
ConfiguredTarget t1Target = getConfiguredTarget("//java/android:t1");
ConfiguredTarget t2Target = getConfiguredTarget("//java/android:t2");
final AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
final Aar test =
Aar.create(
getBinArtifact("test.aar", target),
getBinArtifact("test_processed_manifest/AndroidManifest.xml", target));
final Aar t1 =
Aar.create(
getBinArtifact("t1.aar", t1Target),
getBinArtifact("t1_processed_manifest/AndroidManifest.xml", t1Target));
final Aar t2 =
Aar.create(
getBinArtifact("t2.aar", t2Target),
getBinArtifact("t2_processed_manifest/AndroidManifest.xml", t2Target));
assertThat(provider.getAar()).isEqualTo(test);
assertThat(provider.getTransitiveAars()).containsExactly(test, t1, t2);
}
@Test
public void testAarProvider_noResources() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(",
" name = 'test',",
" exports = [':transitive'],",
")",
"android_library(",
" name = 'transitive',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
")");
ConfiguredTarget target = getConfiguredTarget("//java/android:test");
final AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
ConfiguredTarget transitiveTarget = getConfiguredTarget("//java/android:transitive");
final Aar transitive =
Aar.create(
getBinArtifact("transitive.aar", transitiveTarget),
getBinArtifact("transitive_processed_manifest/AndroidManifest.xml", transitiveTarget));
assertThat(provider.getAar()).isNull();
assertThat(provider.getTransitiveAars()).containsExactly(transitive);
}
@Test
public void nativeHeaderOutputs() throws Exception {
scratch.file(
"java/com/google/jni/BUILD", //
"android_library(",
" name = 'jni',",
" srcs = [",
" 'Foo.java',",
" 'Bar.java',",
" ],",
")");
FileConfiguredTarget target = getFileConfiguredTarget("//java/com/google/jni:libjni.jar");
SpawnAction action = (SpawnAction) getGeneratingAction(target.getArtifact());
String outputPath = outputPath(action, "java/com/google/jni/libjni-native-header.jar");
Iterable<String> result = paramFileArgsForAction(action);
assertThat(Joiner.on(' ').join(result))
.contains(Joiner.on(' ').join("--native_header_output", outputPath));
Artifact nativeHeaderOutput =
JavaInfo.getProvider(
JavaRuleOutputJarsProvider.class, getConfiguredTarget("//java/com/google/jni"))
.getNativeHeaders();
assertThat(nativeHeaderOutput.getExecPathString()).isEqualTo(outputPath);
}
private static String outputPath(Action action, String suffix) {
System.err.println(action.getOutputs());
Artifact artifact = ActionsTestUtil.getFirstArtifactEndingWith(action.getOutputs(), suffix);
return verifyNotNull(artifact, suffix).getExecPath().getPathString();
}
@Test
public void skylarkJavaInfoToAndroidLibraryAttributes() throws Exception {
scratch.file(
"foo/extension.bzl",
"def _impl(ctx):",
" dep_params = ctx.attr.dep[JavaInfo]",
" return [dep_params]",
"my_rule = rule(",
" _impl,",
" attrs = {",
" 'dep': attr.label(),",
" },",
")");
scratch.file(
"foo/BUILD",
"load(':extension.bzl', 'my_rule')",
"android_library(",
" name = 'al_bottom_for_deps',",
" srcs = ['java/A.java'],",
")",
"android_library(",
" name = 'jl_bottom_for_exports',",
" srcs = ['java/A2.java'],",
")",
"my_rule(",
" name = 'mya',",
" dep = ':al_bottom_for_deps',",
")",
"my_rule(",
" name = 'myb',",
" dep = ':jl_bottom_for_exports',",
")",
"android_library(",
" name = 'lib_foo',",
" srcs = ['java/B.java'],",
" deps = [':mya'],",
" exports = [':myb'],",
")");
// Test that all bottom jars are on the runtime classpath of lib_android.
ConfiguredTarget target = getConfiguredTarget("//foo:lib_foo");
Collection<Artifact> transitiveSrcJars =
OutputGroupInfo.get(target).getOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP)
.toCollection();
assertThat(ActionsTestUtil.baseArtifactNames(transitiveSrcJars)).containsExactly(
"libjl_bottom_for_exports-src.jar",
"libal_bottom_for_deps-src.jar",
"liblib_foo-src.jar");
}
@Test
public void testLocalResourcesFirstInJavaCompilationClasspath() throws Exception {
scratch.file(
"java/foo/BUILD",
"android_library(",
" name='dep',",
" srcs=['dep.java'], ",
" resource_files=['res/values/dep.xml'],",
" manifest='AndroidManifest.xml',",
")",
"android_library(",
" name='lib',",
" srcs=['lib.java'],",
" resource_files=['res/values/lib.xml'],",
" manifest='AndroidManifest.xml',",
" deps=[':dep']",
")");
SpawnAction javacAction =
(SpawnAction)
getGeneratingAction(getFileConfiguredTarget("//java/foo:liblib.jar").getArtifact());
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly(
"java/foo/lib_resources.jar", "java/foo/dep_resources.jar", "java/foo/libdep-hjar.jar")
.inOrder();
assertThat(prettyArtifactNames(getInputs(javacAction, getClasspath(javacAction))))
.containsExactly(
"java/foo/lib_resources.jar", "java/foo/dep_resources.jar", "java/foo/libdep-hjar.jar")
.inOrder();
}
@Test
public void testAndroidCcLinkParamsProvider() throws Exception {
scratch.file(
"java/foo/BUILD",
"cc_library(",
" name='cc_dep',",
" srcs=['dep.cc'],",
" linkopts = ['-CC_DEP'],",
")",
"android_library(",
" name='lib',",
" srcs=['lib.java'],",
" deps=[':cc_dep']",
")");
ConfiguredTarget target = getConfiguredTarget("//java/foo:lib");
assertThat(
target
.get(AndroidCcLinkParamsProvider.PROVIDER)
.getLinkParams()
.getDynamicModeParamsForDynamicLibrary()
.flattenedLinkopts())
.containsExactly("-CC_DEP")
.inOrder();
}
}
|
src/test/java/com/google/devtools/build/lib/rules/android/AndroidLibraryTest.java
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import static com.google.common.base.Verify.verifyNotNull;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.prettyArtifactNames;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getClasspath;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getCompileTimeDependencyArtifacts;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getDirectJars;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getJavacArguments;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getProcessorpath;
import static com.google.devtools.build.lib.rules.java.JavaCompileActionTestHelper.getStrictJavaDepsMode;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.truth.Truth;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.OutputGroupInfo;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode;
import com.google.devtools.build.lib.analysis.configuredtargets.FileConfiguredTarget;
import com.google.devtools.build.lib.analysis.configuredtargets.OutputFileConfiguredTarget;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.rules.android.AndroidLibraryAarInfo.Aar;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider;
import com.google.devtools.build.lib.rules.java.JavaCompilationInfoProvider;
import com.google.devtools.build.lib.rules.java.JavaExportsProvider;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider;
import com.google.devtools.build.lib.rules.java.JavaSemantics;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link AndroidLibrary}.
*/
@RunWith(JUnit4.class)
public class AndroidLibraryTest extends AndroidBuildViewTestCase {
@Test
public void testSimpleLibrary() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'a',",
" srcs = ['A.java'],",
" )");
getConfiguredTarget("//java/android:a");
}
@Test
public void testBaselineCoverageArtifacts() throws Exception {
useConfiguration("--collect_code_coverage");
ConfiguredTarget target = scratchConfiguredTarget("java/a", "a",
"android_library(name='a', srcs=['A.java'], deps=[':b'])",
"android_library(name='b', srcs=['B.java'])");
assertThat(baselineCoverageArtifactBasenames(target)).containsExactly("A.java", "B.java");
}
// regression test for #3169099
@Test
public void testLibrarySrcs() throws Exception {
scratch.file("java/srcs/a.foo", "foo");
scratch.file("java/srcs/BUILD",
"android_library(name = 'valid', srcs = ['a.java', 'b.srcjar', ':gvalid', ':gmix'])",
"android_library(name = 'invalid', srcs = ['a.foo', ':ginvalid'])",
"android_library(name = 'mix', srcs = ['a.java', 'a.foo'])",
"genrule(name = 'gvalid', srcs = ['a.java'], outs = ['b.java'], cmd = '')",
"genrule(name = 'ginvalid', srcs = ['a.java'], outs = ['b.foo'], cmd = '')",
"genrule(name = 'gmix', srcs = ['a.java'], outs = ['c.java', 'c.foo'], cmd = '')"
);
assertSrcsValidityForRuleType("//java/srcs", "android_library",
".java or .srcjar");
}
// regression test for #3169095
@Test
public void testXmbInSrcsDoesNotThrow() throws Exception {
reporter.removeHandler(failFastHandler);
scratchConfiguredTarget("java/xmb", "a", "android_library(name = 'a', srcs = ['a.xmb'])");
}
@Test
public void testSlashInIdlImportRoot() throws Exception {
scratchConfiguredTarget("java/com/google/android", "avocado",
"android_library(name='avocado',",
" idl_parcelables=['tropical/fruit/Avocado.aidl'],",
" idl_import_root='tropical/fruit')");
}
@Test
public void testAndroidLibraryWithIdlImportAndNoIdls() throws Exception {
checkError("java/com/google/android", "lib",
"Neither idl_srcs nor idl_parcelables were specified, "
+ "but 'idl_import_root' attribute was set",
"android_library(name = 'lib',",
" srcs = ['Dummy.java'],",
" idl_import_root = 'src')");
}
@Test
public void testAndroidLibraryWithIdlImportAndIdlSrcs() throws Exception {
scratchConfiguredTarget("java/com/google/android", "lib",
"android_library(name = 'lib',",
" idl_srcs = ['Dummy.aidl'],",
" idl_import_root = 'src')");
}
@Test
public void testAndroidLibraryWithIdlImportAndIdlParcelables() throws Exception {
scratchConfiguredTarget("java/com/google/android", "lib",
"android_library(name = 'lib',",
" idl_parcelables = ['src/android/DummyParcelable.aidl'],",
" idl_import_root = 'src')");
}
@Test
public void testAndroidLibraryWithIdlImportAndBothIdlTypes() throws Exception {
scratchConfiguredTarget("java/com/google/android", "lib",
"android_library(name = 'lib',",
" idl_srcs = ['src/android/Dummy.aidl'],",
" idl_parcelables = ['src/android/DummyParcelable.aidl'],",
" idl_import_root = 'src')");
}
@Test
public void testAndroidLibraryWithIdlImportAndEmptyLists() throws Exception {
scratchConfiguredTarget("java/com/google/android", "lib",
"android_library(name = 'lib',",
" idl_srcs = [],",
" idl_parcelables = [],",
" idl_import_root = 'src')");
}
@Test
public void testAndroidLibraryWithIdlPreprocessed() throws Exception {
scratchConfiguredTarget(
"java/com/google/android",
"lib",
"android_library(name = 'lib',",
" idl_srcs = ['src/android/Dummy.aidl'],",
" idl_preprocessed = ['src/android/DummyParcelable.aidl'])");
}
@Test
public void testCommandLineContainsTargetLabelAndRuleKind() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'a', srcs = ['A.java'])");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
String commandLine = Iterables.toString(getJavacArguments(javacAction));
assertThat(commandLine).contains("--target_label, //java/android:a");
}
@Test
public void testStrictAndroidDepsOff() throws Exception {
useConfiguration("--strict_java_deps=OFF");
scratch.file("java/android/strict/BUILD",
"android_library(name = 'b', srcs = ['B.java'])");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.OFF);
}
@Test
public void testStrictAndroidDepsOn() throws Exception {
scratch.file("java/android/strict/BUILD",
"android_library(name = 'b', srcs = ['B.java'])");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.ERROR);
}
@Test
public void testStrictAndroidDepsWarn() throws Exception {
useConfiguration("--strict_android_deps=WARN");
scratch.file("java/android/strict/BUILD",
"android_library(name = 'b', srcs = ['B.java'])");
Artifact artifact = getFileConfiguredTarget("//java/android/strict:libb.jar").getArtifact();
SpawnAction compileAction = (SpawnAction) getGeneratingAction(artifact);
assertThat(getStrictJavaDepsMode(compileAction)).isEqualTo(StrictDepsMode.WARN);
}
@Test
public void testFixDepsToolEmpty() throws Exception {
scratch.file("java/android/BUILD", "android_library(name = 'b', srcs = ['B.java'])");
List<String> commandLine =
getGeneratingSpawnActionArgs(
getFileConfiguredTarget("//java/android:libb.jar").getArtifact());
assertThat(commandLine).containsAllOf("--experimental_fix_deps_tool", "add_dep").inOrder();
}
@Test
public void testFixDepsTool() throws Exception {
useConfiguration("--experimental_fix_deps_tool=auto_fixer");
scratch.file("java/android/BUILD", "android_library(name = 'b', srcs = ['B.java'])");
List<String> commandLine =
getGeneratingSpawnActionArgs(
getFileConfiguredTarget("//java/android:libb.jar").getArtifact());
assertThat(commandLine).containsAllOf("--experimental_fix_deps_tool", "auto_fixer").inOrder();
}
@Test
public void testJavaPluginProcessorPath() throws Exception {
scratch.file("java/test/BUILD",
"java_library(name = 'plugin_dep',",
" srcs = [ 'ProcessorDep.java'])",
"java_plugin(name = 'plugin',",
" srcs = ['AnnotationProcessor.java'],",
" processor_class = 'com.google.process.stuff',",
" deps = [ ':plugin_dep' ])",
"android_library(name = 'to_be_processed',",
" plugins = [':plugin'],",
" srcs = ['ToBeProcessed.java'])");
ConfiguredTarget target = getConfiguredTarget("//java/test:to_be_processed");
OutputFileConfiguredTarget output = (OutputFileConfiguredTarget)
getFileConfiguredTarget("//java/test:libto_be_processed.jar");
SpawnAction javacAction = (SpawnAction) getGeneratingAction(output.getArtifact());
assertThat(getProcessorNames(javacAction)).contains("com.google.process.stuff");
assertThat(getProcessorNames(javacAction)).hasSize(1);
assertThat(
ActionsTestUtil.baseArtifactNames(
getInputs(javacAction, getProcessorpath(javacAction))))
.containsExactly("libplugin.jar", "libplugin_dep.jar");
assertThat(
actionsTestUtil()
.predecessorClosureOf(getFilesToBuild(target), JavaSemantics.JAVA_SOURCE))
.isEqualTo("ToBeProcessed.java AnnotationProcessor.java ProcessorDep.java");
}
// Same test as above, enabling the plugin through the command line.
@Test
public void testPluginCommandLine() throws Exception {
scratch.file("java/test/BUILD",
"java_library(name = 'plugin_dep',",
" srcs = [ 'ProcessorDep.java'])",
"java_plugin(name = 'plugin',",
" srcs = ['AnnotationProcessor.java'],",
" processor_class = 'com.google.process.stuff',",
" deps = [ ':plugin_dep' ])",
"android_library(name = 'to_be_processed',",
" srcs = ['ToBeProcessed.java'])");
useConfiguration("--plugin=//java/test:plugin");
ConfiguredTarget target = getConfiguredTarget("//java/test:to_be_processed");
OutputFileConfiguredTarget output =
(OutputFileConfiguredTarget) getFileConfiguredTarget("//java/test:libto_be_processed.jar");
SpawnAction javacAction = (SpawnAction) getGeneratingAction(output.getArtifact());
assertThat(getProcessorNames(javacAction)).contains("com.google.process.stuff");
assertThat(getProcessorNames(javacAction)).hasSize(1);
assertThat(
ActionsTestUtil.baseArtifactNames(
getInputs(javacAction, getProcessorpath(javacAction))))
.containsExactly("libplugin.jar", "libplugin_dep.jar");
assertThat(
actionsTestUtil()
.predecessorClosureOf(getFilesToBuild(target), JavaSemantics.JAVA_SOURCE))
.isEqualTo("ToBeProcessed.java AnnotationProcessor.java ProcessorDep.java");
}
@Test
public void testInvalidPlugin() throws Exception {
checkError("java/test", "lib",
// error:
getErrorMsgMisplacedRules("plugins", "android_library",
"//java/test:lib", "java_library", "//java/test:not_a_plugin"),
// BUILD file:
"java_library(name = 'not_a_plugin',",
" srcs = [ 'NotAPlugin.java'])",
"android_library(name = 'lib',",
" plugins = [':not_a_plugin'],",
" srcs = ['Lib.java'])");
}
@Test
public void testDisallowDepsWithoutSrcsWarning() throws Exception {
useConfiguration("--experimental_allow_android_library_deps_without_srcs=true");
checkWarning("android/deps", "b",
// message:
"android_library will be deprecating the use of deps to export targets implicitly",
// build file
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b', deps = [':a'])");
}
@Test
public void testDisallowDepsWithoutSrcsError() throws Exception {
checkError("android/deps", "b",
// message:
"android_library will be deprecating the use of deps to export targets implicitly",
// build file
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b', deps = [':a'])");
}
@Test
public void testAlwaysAllowDepsWithoutSrcsIfLocalResources() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
" deps = [':a'])");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
useConfiguration("--experimental_allow_android_library_deps_without_srcs=false");
getConfiguredTarget("//java/android:r");
assertNoEvents();
}
@Test
public void testTransitiveDependencyThroughExports() throws Exception {
scratch.file("java/test/BUILD",
"android_library(name = 'somelib',",
" srcs = ['Lib.java'],",
" deps = [':somealias'])",
"android_library(name = 'somealias',",
" exports = [':somedep'])",
"android_library(name = 'somedep',",
" srcs = ['Dependency.java'],",
" deps = [ ':otherdep' ])",
"android_library(name = 'otherdep',",
" srcs = ['OtherDependency.java'])");
ConfiguredTarget libTarget = getConfiguredTarget("//java/test:somelib");
assertThat(actionsTestUtil().predecessorClosureAsCollection(getFilesToBuild(libTarget),
JavaSemantics.JAVA_SOURCE)).containsExactly(
"Lib.java", "Dependency.java", "OtherDependency.java");
assertNoEvents();
}
@Test
public void testTransitiveStrictDeps() throws Exception {
scratch.file("java/peach/BUILD",
"android_library(name='a', exports=[':b'])",
"android_library(name='b', srcs=['B.java'], deps=[':c'])",
"android_library(name='c', srcs=['C.java'])");
useConfiguration("--strict_java_deps=ERROR");
ConfiguredTarget a = getConfiguredTarget("//java/peach:a");
Iterable<String> compileTimeJars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, a).getDirectCompileTimeJars());
assertThat(compileTimeJars).contains("libb-hjar.jar");
assertThat(compileTimeJars).doesNotContain("libc-hjar.jar");
assertNoEvents();
}
@Test
public void testEmitOutputDeps() throws Exception {
scratch.file("java/deps/BUILD",
"android_library(name = 'a', exports = [':b'])",
"android_library(name = 'b', srcs = ['B.java'])");
useConfiguration("--java_deps");
SpawnAction aAction = (SpawnAction) getGeneratingActionForLabel("//java/deps:liba.jar");
List<String> aOutputs = prettyArtifactNames(aAction.getOutputs());
assertThat(aOutputs).doesNotContain("java/deps/liba.jdeps");
SpawnAction bAction = (SpawnAction) getGeneratingActionForLabel("//java/deps:libb.jar");
List<String> bOutputs = prettyArtifactNames(bAction.getOutputs());
assertThat(bOutputs).contains("java/deps/libb.jdeps");
assertNoEvents();
}
@Test
public void testDependencyArtifactsWithExports() throws Exception {
scratch.file("java/classpath/BUILD",
"android_library(name = 'a', srcs = ['A.java'], deps = [':b', ':c'])",
"android_library(name = 'b', exports = [':d'])",
"android_library(name = 'c', srcs = ['C.java'], exports = [':e'])",
"android_library(name = 'd', srcs = ['D.java'])",
"android_library(name = 'e', srcs = ['E.java'])");
SpawnAction aAction = (SpawnAction) getGeneratingActionForLabel("//java/classpath:liba.jar");
List<String> deps =
prettyArtifactNames(getInputs(aAction, getCompileTimeDependencyArtifacts(aAction)));
assertThat(deps)
.containsExactly(
"java/classpath/libc-hjar.jdeps",
"java/classpath/libd-hjar.jdeps",
"java/classpath/libe-hjar.jdeps");
assertNoEvents();
}
@Test
public void testSrcsLessExportsAreDisallowed() throws Exception {
checkError(
"java/deps",
"b",
"android_library will be deprecating the use of deps to export targets implicitly",
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b', deps = ['a'])"
);
}
@Test
public void testExportsWithStrictJavaDepsFlag() throws Exception {
scratch.file("java/exports/BUILD",
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b', srcs = ['b.java'], exports = ['a'])",
"android_library(name = 'c', srcs = ['c.java'], deps = [':b'])");
useConfiguration("--strict_java_deps=WARN");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/exports:libc.jar");
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly("java/exports/libb-hjar.jar", "java/exports/liba-hjar.jar");
assertNoEvents();
}
@Test
public void testExportsRunfiles() throws Exception {
scratch.file("java/exports/BUILD",
"android_library(name = 'a', srcs = ['a.java'], data = ['data.txt'])",
"android_library(name = 'b', srcs = ['b.java'], exports = [':a'])");
ConfiguredTarget bTarget = getConfiguredTarget("//java/exports:b");
assertThat(Arrays.asList("data.txt", "liba.jar", "libb.jar"))
.isEqualTo(ActionsTestUtil.baseArtifactNames(getDefaultRunfiles(bTarget).getArtifacts()));
assertNoEvents();
}
@Test
public void testTransitiveExports() throws Exception {
scratch.file("java/com/google/exports/BUILD",
"android_library(name = 'dummy',",
" srcs = ['dummy.java'],",
" exports = [':dummy2'])",
"android_library(name = 'dummy2',",
" srcs = ['dummy2.java'],",
" exports = [':dummy3'])",
"android_library(name = 'dummy3',",
" srcs = ['dummy3.java'],",
" exports = [':dummy4'])",
"android_library(name = 'dummy4',",
" srcs = ['dummy4.java'])");
ConfiguredTarget target = getConfiguredTarget("//java/com/google/exports:dummy");
List<Label> exports =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaExportsProvider.class, target).getTransitiveExports());
assertThat(exports)
.containsExactly(
Label.parseAbsolute("//java/com/google/exports:dummy2", ImmutableMap.of()),
Label.parseAbsolute("//java/com/google/exports:dummy3", ImmutableMap.of()),
Label.parseAbsolute("//java/com/google/exports:dummy4", ImmutableMap.of()));
assertNoEvents();
}
@Test
public void testSimpleIdl() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'idl',",
" idl_srcs = ['a.aidl'])");
getConfiguredTarget("//java/android:idl");
assertNoEvents();
}
@Test
public void testIdlSrcsFromAnotherPackageFails() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("java/android/a/BUILD",
"exports_files(['A.aidl'])");
scratch.file("java/android/BUILD",
"android_library(name = 'idl',",
" idl_srcs = ['//java/android/a:A.aidl'])");
getConfiguredTarget("//java/android:idl");
assertContainsEvent("do not import '//java/android/a:A.aidl' directly. You should either"
+ " move the file to this package or depend on an appropriate rule there");
}
@Test
public void testIdlClassJarAction() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'idl',",
" idl_srcs = ['a.aidl', 'b.aidl', 'c.aidl'])");
ConfiguredTarget idlTarget =
getConfiguredTarget("//java/android:idl");
NestedSet<Artifact> outputGroup =
getOutputGroup(idlTarget, AndroidIdlHelper.IDL_JARS_OUTPUT_GROUP);
SpawnAction classJarAction = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(outputGroup), "libidl-idl.jar");
SpawnAction sourceJarAction = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(outputGroup), "libidl-idl.srcjar");
assertThat(sourceJarAction).isSameAs(classJarAction);
PathFragment genfilesPath =
getTargetConfiguration()
.getGenfilesDirectory(RepositoryName.MAIN)
.getExecPath()
.getRelative("java/android/idl_aidl/java/android");
assertThat(classJarAction.getArguments()).containsAllOf(
genfilesPath.getRelative("a.java").getPathString(),
genfilesPath.getRelative("b.java").getPathString(),
genfilesPath.getRelative("c.java").getPathString());
}
@Test
public void testIdlOutputGroupTransitivity() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'lib',",
" idl_srcs = ['a.aidl'],",
" deps = [':dep'])",
"android_library(name = 'dep',",
" idl_srcs = ['b.aidl'])");
ConfiguredTarget idlTarget =
getConfiguredTarget("//java/android:lib");
NestedSet<Artifact> outputGroup =
getOutputGroup(idlTarget, AndroidIdlHelper.IDL_JARS_OUTPUT_GROUP);
List<String> asString = Lists.newArrayList();
for (Artifact artifact : outputGroup) {
asString.add(artifact.getRootRelativePathString());
}
assertThat(asString).containsAllOf(
"java/android/libdep-idl.jar",
"java/android/libdep-idl.srcjar",
"java/android/liblib-idl.jar",
"java/android/liblib-idl.srcjar"
);
}
@Test
public void testNoJavaDir() throws Exception {
checkError("android/hello", "idl",
// message:
"Cannot determine java/javatests root for import android/hello/Import.aidl",
// build file:
"android_library(name = 'idl',",
" srcs = ['Import.java'],",
" idl_parcelables = ['Import.aidl'])");
}
@Test
public void testExportedPluginsAreInherited() throws Exception {
scratch.file(
"java/test/BUILD",
"java_plugin(name = 'plugin',",
" srcs = [ 'Plugin.java' ],",
" processor_class = 'com.google.process.stuff')",
"android_library(name = 'exporting_lib',",
" srcs = [ 'ExportingLib.java' ],",
" exported_plugins = [ ':plugin' ])",
"android_library(name = 'consuming_lib',",
" srcs = [ 'ConsumingLib.java' ],",
" deps = [ ':exporting_lib' ])",
"android_library(name = 'leaf_lib',",
" srcs = [ 'LeafLib.java' ],",
" deps = [ ':consuming_lib' ])");
getConfiguredTarget("//java/test:consuming_lib");
getConfiguredTarget("//java/test:leaf_lib");
// libconsuming_lib should include the plugin, since it directly depends on exporting_lib
assertThat(getProcessorNames("//java/test:libconsuming_lib.jar"))
.containsExactly("com.google.process.stuff");
// but libleaf_lib should not, because its dependency is transitive.
assertThat(getProcessorNames("//java/test:libleaf_lib.jar")).isEmpty();
}
@Test
public void testAidlLibAddsProguardSpecs() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(name = 'sdk',",
" aapt = 'aapt',",
" adb = 'adb',",
" aidl = 'aidl',",
" aidl_lib = ':aidl_lib',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign')",
"java_library(name = 'aidl_lib',",
" srcs = ['AidlLib.java'],",
" proguard_specs = ['aidl_lib.cfg'])");
scratch.file("java/com/google/android/hello/BUILD",
"android_library(name = 'library',",
" srcs = ['MainActivity.java'],",
" idl_srcs = ['IMyInterface.aidl'])",
"android_library(name = 'library_no_idl',",
" srcs = ['MainActivity.java'])",
"android_binary(name = 'binary',",
" deps = [':library'],",
" manifest = 'AndroidManifest.xml',",
" proguard_specs = ['proguard-spec.pro'])",
"android_binary(name = 'binary_no_idl',",
" deps = [':library_no_idl'],",
" manifest = 'AndroidManifest.xml',",
" proguard_specs = ['proguard-spec.pro'])");
useConfiguration("--android_sdk=//sdk:sdk");
// Targets with AIDL-generated sources also get AIDL support lib Proguard specs
ConfiguredTarget binary = getConfiguredTarget("//java/com/google/android/hello:binary");
Action action = actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(binary), "_proguard.jar");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(
action.getInputs(), "sdk/aidl_lib.cfg_valid"))
.isNotNull();
// Targets without AIDL-generated sources don't care
ConfiguredTarget binaryNoIdl =
getConfiguredTarget("//java/com/google/android/hello:binary_no_idl");
Action actionNoIdl = actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(binaryNoIdl), "_proguard.jar");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(
actionNoIdl.getInputs(), "sdk/aidl_lib.cfg_valid"))
.isNull();
}
private List<String> getDependentAssetDirs(String flag, List<String> actualArgs) {
assertThat(actualArgs).contains(flag);
String actualFlagValue = actualArgs.get(actualArgs.indexOf(flag) + 1);
ImmutableList.Builder<String> actualPaths = ImmutableList.builder();
for (String resourceDependency : Splitter.on(',').split(actualFlagValue)) {
assertThat(actualFlagValue).matches("[^;]*;[^;]*;[^;]*;.*");
actualPaths.add(resourceDependency.split(";")[1].split("#"));
}
return actualPaths.build();
}
@Test
public void testResourcesMultipleDirectoriesFromPackage() throws Exception {
scratch.file("c/b/m/a/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" custom_package = 'com.google.android.apps.a',",
" resource_files = [",
" 'b_/res/values/strings.xml',",
" ]",
" )");
scratch.file("c/b/m/a/b_/res",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//c/b/m/a:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("c/b/m/a/b_/res"), args);
}
@Test
public void testSimpleResources() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
" )");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/res"), args);
}
@Test
public void testResourcesWithConfigurationQualifier() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
" )");
scratch.file("java/android/res/values-en/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/res"), args);
}
@Test
public void testResourcesInOtherPackage_exported() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['//java/other:res/values/strings.xml'],",
" )");
scratch.file("java/other/BUILD",
"exports_files(['res/values/strings.xml'])");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
@Test
public void testResourcesInOtherPackage_filegroup() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['//java/other:fg'],",
" )");
scratch.file("java/other/BUILD",
"filegroup(name = 'fg',",
" srcs = ['res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
// Regression test for b/11924769
@Test
public void testResourcesInOtherPackage_filegroupWithExternalSources() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [':fg'],",
" )",
"filegroup(name = 'fg',",
" srcs = ['//java/other:res/values/strings.xml'])");
scratch.file("java/other/BUILD",
"exports_files(['res/values/strings.xml'])");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
// Regression test for b/11924769
@Test
public void testResourcesInOtherPackage_doubleFilegroup() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = [':fg'],",
" )",
"filegroup(name = 'fg',",
" srcs = ['//java/other:fg'])");
scratch.file("java/other/BUILD",
"filegroup(name = 'fg',",
" srcs = ['res/values/strings.xml'],",
")");
ConfiguredTarget resource = getConfiguredTarget("//java/android:r");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/other/res"), args);
assertNoEvents();
}
@Test
public void testManifestMissingFails() throws Exception {
checkError("java/android", "r",
"is required when resource_files or assets are defined.",
"filegroup(name = 'b')",
"android_library(name = 'r',",
" resource_files = [':b'],",
" )");
}
@Test
public void testResourcesDoesNotMatchDirectoryLayout_BadFile() throws Exception {
checkError("java/android", "r",
"'java/android/res/somefile.xml' is not in the expected resource directory structure of"
+ " <resource directory>/{"
+ Joiner.on(',').join(AndroidResources.RESOURCE_DIRECTORY_TYPES) + "}",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/somefile.xml', 'r/t/f/m/raw/fold']",
" )");
}
@Test
public void testResourcesDoesNotMatchDirectoryLayout_BadDirectory() throws Exception {
checkError("java/android", "r",
"'java/android/res/other/somefile.xml' is not in the expected resource directory structure"
+ " of <resource directory>/{"
+ Joiner.on(',').join(AndroidResources.RESOURCE_DIRECTORY_TYPES) + "}",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/other/somefile.xml', 'r/t/f/m/raw/fold']",
" )");
}
@Test
public void testResourcesNotUnderCommonDirectoryFails() throws Exception {
checkError("java/android", "r",
"'java/android/r/t/f/m/raw/fold' (generated by '//java/android:r/t/f/m/raw/fold') is not"
+ " in the same directory 'res' (derived from java/android/res/raw/speed). All"
+ " resources must share a common directory.",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/raw/speed', 'r/t/f/m/raw/fold']",
" )");
}
@Test
public void testAssetsDirAndNoAssetsFails() throws Exception {
checkError("cpp/android", "r",
"'assets' and 'assets_dir' should be either both empty or both non-empty",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets_dir = 'assets',",
" )");
}
@Test
public void testAssetsNotUnderAssetsDirFails() throws Exception {
checkError("java/android", "r",
"'java/android/r/t/f/m' (generated by '//java/android:r/t/f/m') is not beneath 'assets'",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets_dir = 'assets',",
" assets = ['assets/valuable', 'r/t/f/m']",
" )");
}
@Test
public void testAssetsAndNoAssetsDirFails() throws Exception {
scratch.file("java/android/assets/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
checkError("java/android", "r",
"'assets' and 'assets_dir' should be either both empty or both non-empty",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" assets = glob(['assets/**']),",
" )");
}
@Test
public void testFileLocation() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" )");
ConfiguredTarget foo = getConfiguredTarget("//java/android:r");
assertThat(
ActionsTestUtil.getFirstArtifactEndingWith(getFilesToBuild(foo), "r.srcjar").getRoot())
.isEqualTo(getTargetConfiguration().getBinDirectory(RepositoryName.MAIN));
}
// regression test for #3294893
@Test
public void testNoJavaPathFoundDoesNotThrow() throws Exception {
checkError("third_party/java_src/android/app", "r",
"The location of your BUILD file determines the Java package used for Android resource "
+ "processing. A directory named \"java\" or \"javatests\" will be used as your Java "
+ "source root and the path of your BUILD file relative to the Java source root will "
+ "be used as the package for Android resource processing. The Java source root could "
+ "not be determined for \"third_party/java_src/android/app\". Move your BUILD file "
+ "under a java or javatests directory, or set the 'custom_package' attribute.",
"licenses(['notice'])",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" )");
}
@Test
public void testWithRenameManifestPackage() throws Exception {
scratch.file("a/r/BUILD",
"android_library(name = 'r',",
" srcs = ['Foo.java'],",
" custom_package = 'com.google.android.bar',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
" )");
ConfiguredTarget r = getConfiguredTarget("//a/r:r");
assertNoEvents();
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(r));
assertContainsSublist(args,
ImmutableList.of("--packageForR", "com.google.android.bar"));
}
@Test
public void testDebugConfiguration() throws Exception {
scratch.file("java/apps/android/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" )");
checkDebugMode("//java/apps/android:r", true);
useConfiguration("--compilation_mode=opt");
checkDebugMode("//java/apps/android:r", false);
}
@Test
public void testNeverlinkResources_AndroidResourcesInfo() throws Exception {
scratch.file("java/apps/android/BUILD",
"android_library(name = 'foo',",
" manifest = 'AndroidManifest.xml',",
" deps = [':lib', ':lib_neverlink'])",
"android_library(name = 'lib_neverlink',",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',",
" deps = [':bar'])",
"android_library(name = 'lib',",
" manifest = 'AndroidManifest.xml',",
" deps = [':bar'])",
"android_library(name = 'bar',",
" manifest = 'AndroidManifest.xml')");
Function<ValidatedAndroidResources, Label> getLabel = ValidatedAndroidResources::getLabel;
ConfiguredTarget foo = getConfiguredTarget("//java/apps/android:foo");
assertThat(
Iterables.transform(
foo.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources(), getLabel))
.containsExactly(
Label.parseAbsolute("//java/apps/android:lib", ImmutableMap.of()),
Label.parseAbsolute("//java/apps/android:bar", ImmutableMap.of()));
assertThat(
Iterables.transform(
foo.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:foo", ImmutableMap.of()));
ConfiguredTarget lib = getConfiguredTarget("//java/apps/android:lib");
assertThat(
Iterables.transform(
lib.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:bar", ImmutableMap.of()));
assertThat(
Iterables.transform(
lib.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources(), getLabel))
.containsExactly(Label.parseAbsolute("//java/apps/android:lib", ImmutableMap.of()));
ConfiguredTarget libNeverlink = getConfiguredTarget("//java/apps/android:lib_neverlink");
assertThat(libNeverlink.get(AndroidResourcesInfo.PROVIDER).getTransitiveAndroidResources())
.isEmpty();
assertThat(libNeverlink.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources())
.isEmpty();
}
@Test
public void testNeverlinkResources_compileAndRuntimeJars() throws Exception {
scratch.file("java/apps/android/BUILD",
"android_library(name = 'foo',",
" manifest = 'AndroidManifest.xml',",
" exports = [':lib', ':lib_neverlink'],)",
"android_library(name = 'lib_neverlink',",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',)",
"android_library(name = 'lib',",
" manifest = 'AndroidManifest.xml',)");
ConfiguredTarget foo = getConfiguredTarget("//java/apps/android:foo");
ConfiguredTarget lib = getConfiguredTarget("//java/apps/android:lib");
ConfiguredTarget libNeverlink = getConfiguredTarget("//java/apps/android:lib_neverlink");
NestedSet<Artifact> neverLinkFilesToBuild = getFilesToBuild(libNeverlink);
NestedSet<Artifact> libFilesToBuild = getFilesToBuild(lib);
JavaCompilationArgsProvider argsProvider =
JavaInfo.getProvider(JavaCompilationArgsProvider.class, foo);
assertThat(argsProvider.getDirectCompileTimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(neverLinkFilesToBuild),
"lib_neverlink_resources.jar"));
assertThat(argsProvider.getDirectCompileTimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(libFilesToBuild), "lib_resources.jar"));
assertThat(argsProvider.getRuntimeJars())
.doesNotContain(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(neverLinkFilesToBuild),
"lib_neverlink_resources.jar"));
assertThat(argsProvider.getRuntimeJars())
.contains(
ActionsTestUtil.getFirstArtifactEndingWith(
actionsTestUtil().artifactClosureOf(libFilesToBuild), "lib_resources.jar"));
}
@Test
public void testResourceMergeAndProcessParallel() throws Exception {
// Test that for android_library, we can divide the resource processing action into
// smaller actions.
scratch.file(
"java/android/app/foo/BUILD",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" resource_files = glob(['res/**']),",
" )");
scratch.file(
"java/android/app/foo/res/values/strings.xml",
"<resources>",
"<string name='hello'>Aloha!</string>",
"<string name='goodbye'>Aloha!</string>",
"</resources>");
ConfiguredTarget target = getConfiguredTarget("//java/android/app/foo:r");
NestedSet<Artifact> filesToBuild = getFilesToBuild(target);
Set<Artifact> artifacts = actionsTestUtil().artifactClosureOf(filesToBuild);
ValidatedAndroidResources resources =
Iterables.getOnlyElement(
target.get(AndroidResourcesInfo.PROVIDER).getDirectAndroidResources());
SpawnAction resourceParserAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getSymbols().getFilename());
SpawnAction resourceClassJarAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getJavaClassJar().getFilename());
SpawnAction resourceSrcJarAction =
(SpawnAction)
actionsTestUtil()
.getActionForArtifactEndingWith(artifacts,
"/" + resources.getJavaSourceJar().getFilename());
assertThat(resourceParserAction.getMnemonic()).isEqualTo("AndroidResourceParser");
assertThat(resourceClassJarAction.getMnemonic()).isEqualTo("AndroidResourceMerger");
assertThat(resourceSrcJarAction.getMnemonic()).isEqualTo("AndroidResourceValidator");
// Validator also generates an R.txt.
assertThat(resourceSrcJarAction.getOutputs()).contains(resources.getRTxt());
}
private void checkDebugMode(String target, boolean isDebug) throws Exception {
ConfiguredTarget foo = getConfiguredTarget(target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
getFilesToBuild(foo), "r.srcjar");
assertThat(ImmutableList.copyOf(paramFileArgsOrActionArgs(action)).contains("--debug"))
.isEqualTo(isDebug);
}
@Test
public void testGeneratedManifestPackage() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'l',",
" srcs = ['foo.java'])",
"android_library(name = 'l2',",
" custom_package = 'foo',",
" srcs = ['foo.java'])");
scratch.file("third_party/android/BUILD",
"licenses(['notice'])",
"android_library(name = 'l',",
" srcs = ['foo.java'])");
ConfiguredTarget target = getConfiguredTarget("//java/android:l");
Artifact manifest = getBinArtifact("_generated/l/AndroidManifest.xml", target);
FileWriteAction action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"android\"");
target = getConfiguredTarget("//java/android:l2");
manifest = getBinArtifact("_generated/l2/AndroidManifest.xml", target);
action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"foo\"");
target = getConfiguredTarget("//third_party/android:l");
manifest = getBinArtifact("_generated/l/AndroidManifest.xml", target);
action = (FileWriteAction) getGeneratingAction(manifest);
assertThat(action.getFileContents()).contains("package=\"third_party.android\"");
}
@Test
public void testGeneratedIdlSrcs() throws Exception {
scratch.file("java/android/BUILD",
"genrule(name = 'idl',",
" outs = ['MyInterface.aidl'],",
" cmd = 'touch $@')",
"android_library(name = 'lib',",
" idl_srcs = [':idl'],",
" idl_parcelables = ['MyParcelable.aidl'])");
ConfiguredTarget target = getConfiguredTarget("//java/android:lib");
PathFragment genfilesJavaPath =
getTargetConfiguration()
.getGenfilesDirectory(RepositoryName.MAIN)
.getExecPath()
.getRelative("java");
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(getFilesToBuild(target)), "MyInterface.java");
assertThat(action.getArguments())
.containsAllOf("-Ijava", "-I" + genfilesJavaPath.getPathString());
}
@Test
public void testMultipleLibsSameIdls() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'idl1',",
" idl_srcs = ['MyInterface.aidl'])",
"android_library(name = 'idl2',",
" idl_srcs = ['MyInterface.aidl'])");
getConfiguredTarget("//java/android:idl1");
getConfiguredTarget("//java/android:idl2");
}
@Test
public void testIdeInfoProvider() throws Exception {
scratch.file("java/android/BUILD",
"genrule(name='genrule', srcs=[], outs=['assets/genrule.so'], cmd='')",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
" )");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget("//java/android:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "java/android/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//java/android:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testIdeInfoProviderOutsideJavaRoot() throws Exception {
String rootPath = "research/handwriting/java/com/google/research/handwriting/";
scratch.file(rootPath + "BUILD",
"genrule(name='genrule', srcs=[], outs=['assets/genrule.so'], cmd='')",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
" )");
scratch.file(rootPath + "res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file(rootPath + "/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget(
"//research/handwriting/java/com/google/research/handwriting:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "handwriting/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//research/handwriting/java/com/google/research/handwriting:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testIdeInfoProviderGeneratedIdl() throws Exception {
scratch.file("java/android/BUILD",
"genrule(name='genrule', srcs=[], outs=['assets/genrule.so'], cmd='')",
"genrule(name = 'idl',",
" outs = ['MyGeneratedInterface.aidl'],",
" cmd = 'touch $@')",
"android_library(name = 'r',",
" manifest = 'AndroidManifest.xml',",
" idl_srcs = [ ':idl' ],",
" idl_parcelables = [ 'MyInterface.aidl' ],",
" resource_files = glob(['res/**']),",
" assets_dir = 'assets',",
" assets = glob(['assets/**']) + [':genrule']",
" )");
scratch.file("java/android/res/values/strings.xml",
"<resources><string name = 'hello'>Hello Android!</string></resources>");
scratch.file("java/android/assets/values/orc.txt",
"Nabu nabu!");
ConfiguredTarget target = getConfiguredTarget("//java/android:r");
final AndroidIdeInfoProvider provider = target.get(AndroidIdeInfoProvider.PROVIDER);
Set<Artifact> artifactClosure = actionsTestUtil().artifactClosureOf(getFilesToBuild(target));
assertThat(provider.getManifest())
.isEqualTo(
ActionsTestUtil.getFirstArtifactEndingWith(
artifactClosure, "java/android/AndroidManifest.xml"));
ValidatedAndroidResources resources =
getOnlyElement(
getConfiguredTarget("//java/android:r")
.get(AndroidResourcesInfo.PROVIDER)
.getDirectAndroidResources());
assertThat(provider.getGeneratedManifest()).isEqualTo(resources.getManifest());
}
@Test
public void testAndroidLibraryWithMessagesDoNotCrash() throws Exception {
scratch.file("java/com/google/atest/BUILD",
"filegroup(name = 'sources',",
" srcs = ['source.java', 'message.xmb'])",
"android_library(name = 'alib',",
" srcs = [':sources'])");
getConfiguredTarget("//java/com/google/atest:alib");
}
@Test
public void testMultipleDirectDependentResourceDirectories() throws Exception {
scratch.file(
"java/android/resources/d1/BUILD",
"android_library(name = 'd1',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['d1-res/values/strings.xml'],",
" assets = ['assets-d1/some/random/file'],",
" assets_dir = 'assets-d1',",
" deps = ['//java/android/resources/d2:d2'])");
scratch.file(
"java/android/resources/d2/BUILD",
"android_library(name = 'd2',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d2/some/random/file'],",
" assets_dir = 'assets-d2',",
" resource_files = ['d2-res/values/strings.xml'],",
" )");
ConfiguredTarget resource = getConfiguredTarget("//java/android/resources/d1:d1");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/resources/d1/d1-res"), args);
assertThat(getDirectDependentResourceDirs(args)).contains("java/android/resources/d2/d2-res");
List<String> assetArgs = getGeneratingSpawnActionArgs(getDecoupledAssetArtifact(resource));
assertThat(getDependentAssetDirs("--directData", assetArgs))
.contains("java/android/resources/d2/assets-d2");
assertNoEvents();
}
@Test
public void testTransitiveDependentResourceDirectories() throws Exception {
scratch.file(
"java/android/resources/d1/BUILD",
"android_library(name = 'd1',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['d1-res/values/strings.xml'],",
" assets = ['assets-d1/some/random/file'],",
" assets_dir = 'assets-d1',",
" deps = ['//java/android/resources/d2:d2'])");
scratch.file(
"java/android/resources/d2/BUILD",
"android_library(name = 'd2',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d2/some/random/file'],",
" assets_dir = 'assets-d2',",
" resource_files = ['d2-res/values/strings.xml'],",
" deps = ['//java/android/resources/d3:d3'],",
" )");
scratch.file(
"java/android/resources/d3/BUILD",
"android_library(name = 'd3',",
" manifest = 'AndroidManifest.xml',",
" assets = ['assets-d3/some/random/file'],",
" assets_dir = 'assets-d3',",
" resource_files = ['d3-res/values/strings.xml'],",
" )");
ConfiguredTarget resource = getConfiguredTarget("//java/android/resources/d1:d1");
List<String> args = getGeneratingSpawnActionArgs(getResourceArtifact(resource));
assertPrimaryResourceDirs(ImmutableList.of("java/android/resources/d1/d1-res"), args);
Truth.assertThat(getDirectDependentResourceDirs(args))
.contains("java/android/resources/d2/d2-res");
Truth.assertThat(getTransitiveDependentResourceDirs(args))
.contains("java/android/resources/d3/d3-res");
List<String> assetArgs = getGeneratingSpawnActionArgs(getDecoupledAssetArtifact(resource));
Truth.assertThat(getDependentAssetDirs("--directData", assetArgs))
.contains("java/android/resources/d2/assets-d2");
Truth.assertThat(getDependentAssetDirs("--data", assetArgs))
.contains("java/android/resources/d3/assets-d3");
assertNoEvents();
}
@Test
public void testCustomJavacopts() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'a',",
" srcs = ['A.java'],",
" javacopts = ['-g:lines,source'],",
" )");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
assertThat(getJavacArguments(javacAction)).contains("-g:lines,source");
}
// Regression test for b/23079127
@Test
public void testSrcjarStrictDeps() throws Exception {
scratch.file("java/strict/BUILD",
"android_library(name='a', srcs=['A.java'], deps=[':b'])",
"android_library(name='b', srcs=['b.srcjar'], deps=[':c'])",
"android_library(name='c', srcs=['C.java'])");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/strict:liba.jar");
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly("java/strict/libb-hjar.jar");
}
@Test
public void testDisallowPrecompiledJars() throws Exception {
checkError("java/precompiled", "library",
// messages:
"does not produce any android_library srcs files (expected .java or .srcjar)",
// build file:
"android_library(name = 'library',",
" srcs = [':jar'])",
"filegroup(name = 'jar',",
" srcs = ['lib.jar'])");
}
@Test
public void hjarPredecessors() throws Exception {
scratch.file(
"java/test/BUILD",
"android_library(name = 'a', srcs = ['A.java'], deps = [':b'])",
"android_library(name = 'b', srcs = ['B.java'])");
useConfiguration("--java_header_compilation");
Action a = getGeneratingActionForLabel("//java/test:liba.jar");
List<String> inputs = prettyArtifactNames(a.getInputs());
assertThat(inputs).doesNotContain("java/test/libb.jdeps");
assertThat(inputs).contains("java/test/libb-hjar.jdeps");
}
@Test
public void resourcesFromRuntimeDepsAreIncluded() throws Exception {
scratch.file(
"java/android/BUILD",
"android_library(name = 'dummyParentLibrary',",
" deps = [':dummyLibraryOne', ':dummyLibraryTwo'],",
" srcs = ['libraryParent.java'])",
"",
"android_library(name = 'dummyLibraryOne',",
" exports_manifest = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource1.png'],",
" srcs = ['libraryOne.java'])",
"",
"android_library(name = 'dummyLibraryTwo',",
" exports_manifest = 1,",
" neverlink = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource2.png'],",
" deps = ['dummyLibraryNested'],",
" srcs = ['libraryTwo.java'])",
"",
"android_library(name = 'dummyLibraryNested',",
" exports_manifest = 1,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/drawable/dummyResource1.png'],",
" srcs = ['libraryOne.java'])");
ConfiguredTarget target = getConfiguredTarget("//java/android:dummyLibraryOne");
AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
target = getConfiguredTarget("//java/android:dummyLibraryTwo");
provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNull();
target = getConfiguredTarget("//java/android:dummyParentLibrary");
provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
assertThat(provider.getTransitiveAars()).hasSize(1);
}
@Test
public void aapt2ArtifactGenerationWhenSdkIsDefined() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(",
" name = 'sdk',",
" aapt = 'aapt',",
" aapt2 = 'aapt2',",
" adb = 'adb',",
" aidl = 'aidl',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign')");
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" deps = [':b'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = [ 'res/values/a.xml' ]",
")",
"android_library(",
" name = 'b', ",
" srcs = ['B.java'],",
" manifest = 'b/AndroidManifest.xml',",
" resource_files = [ 'res/values/b.xml' ]",
")");
useConfiguration("--android_sdk=//sdk:sdk");
ConfiguredTargetAndData a = getConfiguredTargetAndData("//java/a:a");
ConfiguredTargetAndData b = getConfiguredTargetAndDataDirectPrerequisite(a, "//java/a:b");
ConfiguredTargetAndData sdk = getConfiguredTargetAndDataDirectPrerequisite(a, "//sdk:sdk");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNotNull();
SpawnAction linkAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_LIBRARY_APK));
assertThat(linkAction).isNotNull();
assertThat(linkAction.getInputs())
.containsAllOf(
sdk.getConfiguredTarget().get(AndroidSdkProvider.PROVIDER).getAndroidJar(),
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS),
getImplicitOutputArtifact(
b.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(linkAction.getOutputs())
.containsAllOf(
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_R_TXT),
getImplicitOutputArtifact(
a.getConfiguredTarget(),
a.getConfiguration(),
AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_SOURCE_JAR));
}
@Test
public void aapt2ArtifactGenerationSkippedWhenSdkIsNotDefined() throws Exception {
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = [ 'res/values/a.xml' ]",
")");
ConfiguredTarget a = getConfiguredTarget("//java/a:a");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNull();
SpawnAction linkAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_RESOURCES_AAPT2_LIBRARY_APK));
assertThat(linkAction).isNull();
}
@Test
public void compileDataBindingOutputWhenDataBindingEnabled() throws Exception {
scratch.file(
"sdk/BUILD",
"android_sdk(",
" name = 'sdk',",
" aapt = 'aapt',",
" aapt2 = 'aapt2',",
" adb = 'adb',",
" aidl = 'aidl',",
" android_jar = 'android.jar',",
" apksigner = 'apksigner',",
" dx = 'dx',",
" framework_aidl = 'framework_aidl',",
" main_dex_classes = 'main_dex_classes',",
" main_dex_list_creator = 'main_dex_list_creator',",
" proguard = 'proguard',",
" shrinked_android_jar = 'shrinked_android_jar',",
" zipalign = 'zipalign')");
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" enable_data_binding = 1,",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = [ 'res/values/a.xml' ]",
")");
useConfiguration("--android_sdk=//sdk:sdk");
ConfiguredTarget a = getConfiguredTarget("//java/a:a");
SpawnAction compileAction =
getGeneratingSpawnAction(
getImplicitOutputArtifact(a, AndroidRuleClasses.ANDROID_COMPILED_SYMBOLS));
assertThat(compileAction).isNotNull();
Iterable<String> args = paramFileArgsOrActionArgs(compileAction);
assertThat(args).contains("--dataBindingInfoOut");
}
@Test
public void testUseManifestFromResourceApk() throws Exception {
scratch.file(
"java/a/BUILD",
"android_library(",
" name = 'a', ",
" srcs = ['A.java'],",
" manifest = 'a/AndroidManifest.xml',",
" resource_files = [ 'res/values/a.xml' ]",
")");
ConfiguredTarget target = getConfiguredTarget("//java/a:a");
AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
assertThat(provider).isNotNull();
assertThat(provider
.getAar()
.getManifest()
.getPath()
.toString()).contains("processed_manifest");
}
@Test
public void testAndroidLibrary_SrcsLessDepsHostConfigurationNoOverride() throws Exception {
scratch.file(
"java/srclessdeps/BUILD",
"android_library(name = 'dep_for_foo',",
" srcs = ['a.java'],",
" )",
"android_library(name = 'foo',",
" deps = [':dep_for_foo'],",
" )",
"genrule(name = 'some_genrule',",
" tools = [':foo'],",
" outs = ['some_outs'],",
" cmd = '$(location :foo) do_something $@',",
" )");
useConfiguration("--experimental_allow_android_library_deps_without_srcs");
// genrule builds its tools using the host configuration.
ConfiguredTarget genruleTarget = getConfiguredTarget("//java/srclessdeps:some_genrule");
ConfiguredTarget target = getDirectPrerequisite(genruleTarget, "//java/srclessdeps:foo");
assertThat(
getConfiguration(target)
.getFragment(AndroidConfiguration.class)
.allowSrcsLessAndroidLibraryDeps(getRuleContext(target)))
.isTrue();
}
@Test
public void testAndroidLibraryValidatesProguardSpec() throws Exception {
scratch.file("java/com/google/android/hello/BUILD",
"android_library(name = 'l2',",
" srcs = ['MoreMaps.java'],",
" proguard_specs = ['library_spec.cfg'])",
"android_binary(name = 'b',",
" srcs = ['HelloApp.java'],",
" manifest = 'AndroidManifest.xml',",
" deps = [':l2'],",
" proguard_specs = ['proguard-spec.pro'])");
Set<Artifact> transitiveArtifacts =
actionsTestUtil()
.artifactClosureOf(
getFilesToBuild(getConfiguredTarget("//java/com/google/android/hello:b")));
Action action =
actionsTestUtil()
.getActionForArtifactEndingWith(transitiveArtifacts, "library_spec.cfg_valid");
assertWithMessage("proguard validate action was spawned for binary target.")
.that(
actionsTestUtil()
.getActionForArtifactEndingWith(transitiveArtifacts, "proguard-spec.pro_valid"))
.isNull();
assertWithMessage("Proguard validate action was not spawned.")
.that(prettyArtifactNames(action.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
}
@Test
public void testAndroidLibraryValidatesProguardSpecWithoutBinary() throws Exception {
scratch.file("java/com/google/android/hello/BUILD",
"android_library(name = 'l2',",
" srcs = ['MoreMaps.java'],",
" proguard_specs = ['library_spec.cfg'])",
"android_library(name = 'l3',",
" srcs = ['MoreMaps.java'],",
" deps = [':l2'])");
Action action =
actionsTestUtil()
.getActionForArtifactEndingWith(
getOutputGroup(
getConfiguredTarget("//java/com/google/android/hello:l2"),
OutputGroupInfo.HIDDEN_TOP_LEVEL),
"library_spec.cfg_valid");
assertWithMessage("Proguard validate action was not spawned.").that(action).isNotNull();
assertWithMessage("Proguard validate action was spawned without correct input.")
.that(prettyArtifactNames(action.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
Action transitiveAction =
actionsTestUtil()
.getActionForArtifactEndingWith(
getOutputGroup(
getConfiguredTarget("//java/com/google/android/hello:l3"),
OutputGroupInfo.HIDDEN_TOP_LEVEL),
"library_spec.cfg_valid");
assertWithMessage("Proguard validate action was not spawned.")
.that(transitiveAction)
.isNotNull();
assertWithMessage("Proguard validate action was spawned without correct input.")
.that(prettyArtifactNames(transitiveAction.getInputs()))
.contains("java/com/google/android/hello/library_spec.cfg");
}
@Test
public void testForwardedDeps() throws Exception {
scratch.file("java/fwdeps/BUILD",
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b1', exports = [':a'])",
"android_library(name = 'b2', srcs = [], exports = [':a'])",
"android_library(name = 'c1', srcs = ['c1.java'], deps = [':b1'])",
"android_library(name = 'c2', srcs = ['c2.java'], deps = [':b2'])");
ConfiguredTarget c1Target = getConfiguredTarget("//java/fwdeps:c1");
ConfiguredTarget c2Target = getConfiguredTarget("//java/fwdeps:c2");
Iterable<String> c1Jars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, c1Target)
.getCompilationClasspath());
Iterable<String> c2Jars =
ActionsTestUtil.baseArtifactNames(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, c2Target)
.getCompilationClasspath());
assertThat(c1Jars).containsExactly("liba-hjar.jar");
assertThat(c2Jars).containsExactly("liba-hjar.jar");
assertNoEvents();
}
@Test
public void testExportsAreIndirectNotDirect() throws Exception {
scratch.file("java/exports/BUILD",
"android_library(name = 'a', srcs = ['a.java'])",
"android_library(name = 'b', srcs = ['b.java'], exports = ['a'])",
"android_library(name = 'c', srcs = ['c.java'], deps = [':b'])");
ConfiguredTarget aTarget = getConfiguredTarget("//java/exports:a");
ConfiguredTarget bTarget = getConfiguredTarget("//java/exports:b");
ConfiguredTarget cTarget = getConfiguredTarget("//java/exports:c");
ImmutableList<Artifact> bClasspath =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, bTarget)
.getCompilationClasspath());
ImmutableList<Artifact> cClasspath =
ImmutableList.copyOf(
JavaInfo.getProvider(JavaCompilationInfoProvider.class, cTarget)
.getCompilationClasspath());
assertThat(bClasspath).isEmpty();
assertThat(cClasspath)
.containsAllIn(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, aTarget)
.getDirectCompileTimeJars());
assertThat(cClasspath)
.containsAllIn(
JavaInfo.getProvider(JavaCompilationArgsProvider.class, bTarget)
.getDirectCompileTimeJars());
assertNoEvents();
}
@Test
public void testAndroidJavacoptsCanBeOverridden() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'a',",
" srcs = ['A.java'],",
" javacopts = ['-g:lines,source'],",
" )");
SpawnAction javacAction = (SpawnAction) getGeneratingActionForLabel("//java/android:liba.jar");
String commandLine = Iterables.toString(getJavacArguments(javacAction));
assertThat(commandLine).contains("-g:lines,source");
}
@Test
public void testAarGeneration_LocalResources() throws Exception {
scratch.file("java/android/aartest/BUILD",
"android_library(name = 'aartest',",
" deps = ['dep'],",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
" assets = ['assets/some/random/file'],",
" assets_dir = 'assets')",
"android_library(name = 'dep',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['dep/res/values/strings.xml'])");
ConfiguredTarget target = getConfiguredTarget("//java/android/aartest:aartest");
Artifact aar = getBinArtifact("aartest.aar", target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(aar), "aartest.aar");
assertThat(action).isNotNull();
assertThat(prettyArtifactNames(getNonToolInputs(action)))
.containsAllOf(
"java/android/aartest/aartest_processed_manifest/AndroidManifest.xml",
"java/android/aartest/aartest_symbols/R.txt",
"java/android/aartest/res/values/strings.xml",
"java/android/aartest/assets/some/random/file",
"java/android/aartest/libaartest.jar");
}
@Test
public void testAarGeneration_NoResources() throws Exception {
scratch.file("java/android/aartest/BUILD",
"android_library(name = 'aartest',",
" exports = ['dep'])",
"android_library(name = 'dep',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['dep/res/values/strings.xml'])");
ConfiguredTarget target = getConfiguredTarget("//java/android/aartest:aartest");
Artifact aar = getBinArtifact("aartest.aar", target);
SpawnAction action = (SpawnAction) actionsTestUtil().getActionForArtifactEndingWith(
actionsTestUtil().artifactClosureOf(aar), "aartest.aar");
assertThat(action).isNotNull();
assertThat(prettyArtifactNames(getNonToolInputs(action)))
.containsAllOf(
"java/android/aartest/aartest_processed_manifest/AndroidManifest.xml",
"java/android/aartest/aartest_symbols/R.txt",
"java/android/aartest/libaartest.jar");
}
@Test
public void testAarProvider_localResources() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'test',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'],",
" deps = [':t1', ':t2'])",
"android_library(name = 't1',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'])",
"android_library(name = 't2',",
" inline_constants = 0,",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'])");
ConfiguredTarget target = getConfiguredTarget("//java/android:test");
ConfiguredTarget t1Target = getConfiguredTarget("//java/android:t1");
ConfiguredTarget t2Target = getConfiguredTarget("//java/android:t2");
final AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
final Aar test =
Aar.create(
getBinArtifact("test.aar", target),
getBinArtifact("test_processed_manifest/AndroidManifest.xml", target));
final Aar t1 =
Aar.create(
getBinArtifact("t1.aar", t1Target),
getBinArtifact("t1_processed_manifest/AndroidManifest.xml", t1Target));
final Aar t2 =
Aar.create(
getBinArtifact("t2.aar", t2Target),
getBinArtifact("t2_processed_manifest/AndroidManifest.xml", t2Target));
assertThat(provider.getAar()).isEqualTo(test);
assertThat(provider.getTransitiveAars()).containsExactly(test, t1, t2);
}
@Test
public void testAarProvider_noResources() throws Exception {
scratch.file("java/android/BUILD",
"android_library(name = 'test',",
" exports = [':transitive'])",
"android_library(name = 'transitive',",
" manifest = 'AndroidManifest.xml',",
" resource_files = ['res/values/strings.xml'])");
ConfiguredTarget target = getConfiguredTarget("//java/android:test");
final AndroidLibraryAarInfo provider = target.get(AndroidLibraryAarInfo.PROVIDER);
ConfiguredTarget transitiveTarget = getConfiguredTarget("//java/android:transitive");
final Aar transitive =
Aar.create(
getBinArtifact("transitive.aar", transitiveTarget),
getBinArtifact("transitive_processed_manifest/AndroidManifest.xml", transitiveTarget));
assertThat(provider.getAar()).isNull();
assertThat(provider.getTransitiveAars()).containsExactly(transitive);
}
@Test
public void nativeHeaderOutputs() throws Exception {
scratch.file(
"java/com/google/jni/BUILD", //
"android_library(",
" name = 'jni',",
" srcs = ['Foo.java', 'Bar.java'],",
")");
FileConfiguredTarget target = getFileConfiguredTarget("//java/com/google/jni:libjni.jar");
SpawnAction action = (SpawnAction) getGeneratingAction(target.getArtifact());
String outputPath = outputPath(action, "java/com/google/jni/libjni-native-header.jar");
Iterable<String> result = paramFileArgsForAction(action);
assertThat(Joiner.on(' ').join(result))
.contains(Joiner.on(' ').join("--native_header_output", outputPath));
Artifact nativeHeaderOutput =
JavaInfo.getProvider(
JavaRuleOutputJarsProvider.class, getConfiguredTarget("//java/com/google/jni"))
.getNativeHeaders();
assertThat(nativeHeaderOutput.getExecPathString()).isEqualTo(outputPath);
}
private static String outputPath(Action action, String suffix) {
System.err.println(action.getOutputs());
Artifact artifact = ActionsTestUtil.getFirstArtifactEndingWith(action.getOutputs(), suffix);
return verifyNotNull(artifact, suffix).getExecPath().getPathString();
}
@Test
public void skylarkJavaInfoToAndroidLibraryAttributes() throws Exception {
scratch.file(
"foo/extension.bzl",
"def _impl(ctx):",
" dep_params = ctx.attr.dep[JavaInfo]",
" return [dep_params]",
"my_rule = rule(",
" _impl,",
" attrs = {",
" 'dep': attr.label(),",
" },",
")");
scratch.file(
"foo/BUILD",
"load(':extension.bzl', 'my_rule')",
"android_library(",
" name = 'al_bottom_for_deps',",
" srcs = ['java/A.java'],",
")",
"android_library(",
" name = 'jl_bottom_for_exports',",
" srcs = ['java/A2.java'],",
")",
"my_rule(",
" name = 'mya',",
" dep = ':al_bottom_for_deps',",
")",
"my_rule(",
" name = 'myb',",
" dep = ':jl_bottom_for_exports',",
")",
"android_library(",
" name = 'lib_foo',",
" srcs = ['java/B.java'],",
" deps = [':mya'],",
" exports = [':myb'],",
")");
// Test that all bottom jars are on the runtime classpath of lib_android.
ConfiguredTarget target = getConfiguredTarget("//foo:lib_foo");
Collection<Artifact> transitiveSrcJars =
OutputGroupInfo.get(target).getOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP)
.toCollection();
assertThat(ActionsTestUtil.baseArtifactNames(transitiveSrcJars)).containsExactly(
"libjl_bottom_for_exports-src.jar",
"libal_bottom_for_deps-src.jar",
"liblib_foo-src.jar");
}
@Test
public void testLocalResourcesFirstInJavaCompilationClasspath() throws Exception {
scratch.file(
"java/foo/BUILD",
"android_library(",
" name='dep',",
" srcs=['dep.java'], ",
" resource_files=['res/values/dep.xml'],",
" manifest='AndroidManifest.xml')",
"android_library(",
" name='lib',",
" srcs=['lib.java'],",
" resource_files=['res/values/lib.xml'],",
" manifest='AndroidManifest.xml',",
" deps=[':dep'])");
SpawnAction javacAction =
(SpawnAction)
getGeneratingAction(getFileConfiguredTarget("//java/foo:liblib.jar").getArtifact());
assertThat(prettyArtifactNames(getInputs(javacAction, getDirectJars(javacAction))))
.containsExactly(
"java/foo/lib_resources.jar", "java/foo/dep_resources.jar", "java/foo/libdep-hjar.jar")
.inOrder();
assertThat(prettyArtifactNames(getInputs(javacAction, getClasspath(javacAction))))
.containsExactly(
"java/foo/lib_resources.jar", "java/foo/dep_resources.jar", "java/foo/libdep-hjar.jar")
.inOrder();
}
@Test
public void testAndroidCcLinkParamsProvider() throws Exception {
scratch.file(
"java/foo/BUILD",
"cc_library(",
" name='cc_dep',",
" srcs=['dep.cc'],",
" linkopts = ['-CC_DEP'],",
")",
"android_library(",
" name='lib',",
" srcs=['lib.java'],",
" deps=[':cc_dep'])");
ConfiguredTarget target = getConfiguredTarget("//java/foo:lib");
assertThat(
target
.get(AndroidCcLinkParamsProvider.PROVIDER)
.getLinkParams()
.getDynamicModeParamsForDynamicLibrary()
.flattenedLinkopts())
.containsExactly("-CC_DEP")
.inOrder();
}
}
|
Reformat the build rules in the AndroidLibraryTest.java
RELNOTES: none.
PiperOrigin-RevId: 217405037
|
src/test/java/com/google/devtools/build/lib/rules/android/AndroidLibraryTest.java
|
Reformat the build rules in the AndroidLibraryTest.java
|
|
Java
|
apache-2.0
|
8d4bd84f896a3000951f3af743c0a5d6ed22f45d
| 0
|
CloudifySource/cloudify-widget,CloudifySource/cloudify-widget,CloudifySource/cloudify-widget,CloudifySource/cloudify-widget
|
package beans.scripts;
import java.io.File;
import java.io.IOException;
import javax.inject.Inject;
import models.ServerNode;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.ExecuteResultHandler;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.jclouds.compute.ComputeServiceContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import beans.api.ExecutorFactory;
import beans.config.ServerConfig.CloudBootstrapConfiguration;
import play.i18n.Messages;
import server.ApplicationContext;
import server.ProcExecutor;
import server.exceptions.ServerException;
import utils.CloudifyUtils;
import utils.Utils;
public class BasicScriptExecutor implements ScriptExecutor{
@Inject
private ExecutorFactory executorFactory;
private static Logger logger = LoggerFactory.getLogger( BasicScriptExecutor.class );
public BasicScriptExecutor(){
logger.info( "---Initializing BasicScriptExecutor---" );
}
/**
* Used for bootstrapping
* @param cmdLine
* @param serverNode
* @param jCloudsContext
* @param cloudFolder
* @param cloudBootstrapConfiguration
* @param isHandlePrivateKey
*/
@Override
public void runBootstrapScript( CommandLine cmdLine, ServerNode serverNode,
ComputeServiceContext jCloudsContext, File cloudFolder,
CloudBootstrapConfiguration cloudBootstrapConfiguration,
boolean isHandlePrivateKey ) {
try{
//Command line for bootstrapping remote cloud.
DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
ProcExecutor bootstrapExecutor = executorFactory.getBootstrapExecutor( serverNode );
logger.info( "Executing command line: " + cmdLine );
bootstrapExecutor.execute( cmdLine, ApplicationContext.get().conf().server.environment.getEnvironment(), resultHandler );
logger.info( "waiting for output" );
resultHandler.waitFor();
logger.info( "finished waiting , exit value is [{}]", resultHandler.getExitValue() );
logger.info( "> serverNode ID:", serverNode.getId() );
String cachedOutput = Utils.getCachedOutput( serverNode );
logger.info( "> cachedOutput:", cachedOutput );
String output = Utils.getOrDefault( cachedOutput, "" );
logger.info( "> output:", output );
if ( resultHandler.getException() != null ) {
logger.info( "we have exceptions, checking for known issues" );
if ( output.contains( "found existing management machines" ) ) {
logger.info( "found 'found existing management machines' - issuing cloudify already exists message" );
throw new ServerException( Messages.get( "cloudify.already.exists" ) );
}
logger.info( "Command execution ended with errors: {}", output );
throw new RuntimeException( "Failed to bootstrap cloudify machine: "
+ output, resultHandler.getException() );
}
logger.info( "finished handling errors, extracting IP" );
String publicIp = Utils.extractIpFromBootstrapOutput( output );
if ( StringUtils.isEmpty( publicIp ) ) {
logger.warn( "No public ip address found in bootstrap output. " + output );
throw new RuntimeException( "Bootstrap failed. No IP address found in bootstrap output."
+ output, resultHandler.getException() );
}
logger.info( "ip is [{}], saving to serverNode", publicIp );
if( isHandlePrivateKey ){
String privateKey = CloudifyUtils.getCloudPrivateKey( cloudFolder );
if ( StringUtils.isEmpty( privateKey ) ) {
throw new RuntimeException( "Bootstrap failed. No pem file found in cloud directory." );
}
logger.info( "found PEM string" );
serverNode.setPrivateKey( privateKey );
}
logger.info( "Bootstrap cloud command ended successfully" );
logger.info( "updating server node with new info" );
serverNode.setPublicIP( publicIp );
serverNode.save();
logger.info("server node updated and saved");
}
catch( Exception e ) {
serverNode.errorEvent("Invalid Credentials").save();
throw new RuntimeException("Unable to bootstrap cloud", e);
}
finally {
if( cloudFolder != null && cloudBootstrapConfiguration.removeCloudFolder ) {
FileUtils.deleteQuietly( cloudFolder );
}
if (jCloudsContext != null) {
jCloudsContext.close();
}
serverNode.setStopped(true);
}
}
/**
* used for running install and uninstall of applications
* @param cmdLine
* @param server
*/
@Override
public void runInstallationManagementScript( CommandLine cmdLine, ServerNode server ){
try {
ProcExecutor executor = executorFactory.getDeployExecutor( server );
ExecuteResultHandler resultHandler = executorFactory.getResultHandler(cmdLine.toString());
logger.info( "executing command [{}]", cmdLine );
executor.execute( cmdLine,
ApplicationContext.get().conf().server.environment.getEnvironment(), resultHandler );
logger.info( "The process instanceId: {}", executor.getId() );
}
catch ( ExecuteException e ) {
logger.error( "Failed to execute process. Exit value: " + e.getExitValue(), e );
throw new ServerException( "Failed to execute process. Exit value: " + e.getExitValue(), e );
}
catch ( IOException e ) {
logger.error( "Failed to execute process", e );
throw new ServerException( "Failed to execute process.", e );
}
}
}
|
app/beans/scripts/BasicScriptExecutor.java
|
package beans.scripts;
import java.io.File;
import java.io.IOException;
import javax.inject.Inject;
import models.ServerNode;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.ExecuteResultHandler;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.jclouds.compute.ComputeServiceContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import beans.api.ExecutorFactory;
import beans.config.ServerConfig.CloudBootstrapConfiguration;
import play.i18n.Messages;
import server.ApplicationContext;
import server.ProcExecutor;
import server.exceptions.ServerException;
import utils.CloudifyUtils;
import utils.Utils;
public class BasicScriptExecutor implements ScriptExecutor{
@Inject
private ExecutorFactory executorFactory;
private static Logger logger = LoggerFactory.getLogger( BasicScriptExecutor.class );
public BasicScriptExecutor(){
logger.info( "---Initializing BasicScriptExecutor---" );
}
/**
* Used for bootstrapping
* @param cmdLine
* @param serverNode
* @param jCloudsContext
* @param cloudFolder
* @param cloudBootstrapConfiguration
* @param isHandlePrivateKey
*/
@Override
public void runBootstrapScript( CommandLine cmdLine, ServerNode serverNode,
ComputeServiceContext jCloudsContext, File cloudFolder,
CloudBootstrapConfiguration cloudBootstrapConfiguration,
boolean isHandlePrivateKey ) {
try{
//Command line for bootstrapping remote cloud.
DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
ProcExecutor bootstrapExecutor = executorFactory.getBootstrapExecutor( serverNode );
logger.info( "Executing command line: " + cmdLine );
bootstrapExecutor.execute( cmdLine, ApplicationContext.get().conf().server.environment.getEnvironment(), resultHandler );
logger.info( "waiting for output" );
resultHandler.waitFor();
logger.info( "finished waiting , exit value is [{}]", resultHandler.getExitValue() );
String output = Utils.getOrDefault( Utils.getCachedOutput( serverNode ), "" );
if ( resultHandler.getException() != null ) {
logger.info( "we have exceptions, checking for known issues" );
if ( output.contains( "found existing management machines" ) ) {
logger.info( "found 'found existing management machines' - issuing cloudify already exists message" );
throw new ServerException( Messages.get( "cloudify.already.exists" ) );
}
logger.info( "Command execution ended with errors: {}", output );
throw new RuntimeException( "Failed to bootstrap cloudify machine: "
+ output, resultHandler.getException() );
}
logger.info( "finished handling errors, extracting IP" );
String publicIp = Utils.extractIpFromBootstrapOutput( output );
if ( StringUtils.isEmpty( publicIp ) ) {
logger.warn( "No public ip address found in bootstrap output. " + output );
throw new RuntimeException( "Bootstrap failed. No IP address found in bootstrap output."
+ output, resultHandler.getException() );
}
logger.info( "ip is [{}], saving to serverNode", publicIp );
if( isHandlePrivateKey ){
String privateKey = CloudifyUtils.getCloudPrivateKey( cloudFolder );
if ( StringUtils.isEmpty( privateKey ) ) {
throw new RuntimeException( "Bootstrap failed. No pem file found in cloud directory." );
}
logger.info( "found PEM string" );
serverNode.setPrivateKey( privateKey );
}
logger.info( "Bootstrap cloud command ended successfully" );
logger.info( "updating server node with new info" );
serverNode.setPublicIP( publicIp );
serverNode.save();
logger.info("server node updated and saved");
}
catch( Exception e ) {
serverNode.errorEvent("Invalid Credentials").save();
throw new RuntimeException("Unable to bootstrap cloud", e);
}
finally {
if( cloudFolder != null && cloudBootstrapConfiguration.removeCloudFolder ) {
FileUtils.deleteQuietly( cloudFolder );
}
if (jCloudsContext != null) {
jCloudsContext.close();
}
serverNode.setStopped(true);
}
}
/**
* used for running install and uninstall of applications
* @param cmdLine
* @param server
*/
@Override
public void runInstallationManagementScript( CommandLine cmdLine, ServerNode server ){
try {
ProcExecutor executor = executorFactory.getDeployExecutor( server );
ExecuteResultHandler resultHandler = executorFactory.getResultHandler(cmdLine.toString());
logger.info( "executing command [{}]", cmdLine );
executor.execute( cmdLine,
ApplicationContext.get().conf().server.environment.getEnvironment(), resultHandler );
logger.info( "The process instanceId: {}", executor.getId() );
}
catch ( ExecuteException e ) {
logger.error( "Failed to execute process. Exit value: " + e.getExitValue(), e );
throw new ServerException( "Failed to execute process. Exit value: " + e.getExitValue(), e );
}
catch ( IOException e ) {
logger.error( "Failed to execute process", e );
throw new ServerException( "Failed to execute process.", e );
}
}
}
|
added additional logs
|
app/beans/scripts/BasicScriptExecutor.java
|
added additional logs
|
|
Java
|
apache-2.0
|
e9127800c4efe8ad9601c7b1152783cd12a55bc2
| 0
|
GerritCodeReview/plugins_hooks
|
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.hooks;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.validators.MergeValidationException;
import com.google.gerrit.server.git.validators.MergeValidationListener;
import com.google.gerrit.server.project.ProjectState;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.lib.Repository;
@Singleton
public class Submit implements MergeValidationListener {
private final Hook hook;
private final HookFactory hookFactory;
@Inject
Submit(HookFactory hookFactory) {
this.hook = hookFactory.createSync("submitHook", "submit");
this.hookFactory = hookFactory;
}
@Override
public void onPreMerge(
Repository repo,
CodeReviewCommit commit,
ProjectState destProject,
Branch.NameKey destBranch,
PatchSet.Id patchSetId,
IdentifiedUser caller)
throws MergeValidationException {
String projectName = destProject.getProject().getName();
HookArgs args = hookFactory.createArgs();
args.add("--change", patchSetId.changeId.get());
args.add("--project", projectName);
args.add("--branch", destBranch.get());
args.add("--submitter", caller.getNameEmail());
args.add("--submitter-username", caller.getUserName().orElse(null));
args.add("--patchset", patchSetId.get());
args.add("--commit", commit.getId().name());
HookResult result = hook.execute(projectName, args);
if (result != null && result.getExitValue() != 0) {
throw new MergeValidationException(result.toString());
}
}
}
|
src/main/java/com/googlesource/gerrit/plugins/hooks/Submit.java
|
// Copyright (C) 2018 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.hooks;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.validators.MergeValidationException;
import com.google.gerrit.server.git.validators.MergeValidationListener;
import com.google.gerrit.server.project.ProjectState;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.lib.Repository;
@Singleton
public class Submit implements MergeValidationListener {
private final Hook hook;
private final HookFactory hookFactory;
@Inject
Submit(HookFactory hookFactory) {
this.hook = hookFactory.createSync("submitHook", "submit");
this.hookFactory = hookFactory;
}
@Override
public void onPreMerge(
Repository repo,
CodeReviewCommit commit,
ProjectState destProject,
Branch.NameKey destBranch,
Change.Id changeId,
PatchSet.Id patchSetId,
IdentifiedUser caller)
throws MergeValidationException {
String projectName = destProject.getProject().getName();
HookArgs args = hookFactory.createArgs();
args.add("--change", changeId.get());
args.add("--project", projectName);
args.add("--branch", destBranch.get());
args.add("--submitter", caller.getNameEmail());
args.add("--submitter-username", caller.getUserName().orElse(null));
args.add("--patchset", patchSetId.get());
args.add("--commit", commit.getId().name());
HookResult result = hook.execute(projectName, args);
if (result != null && result.getExitValue() != 0) {
throw new MergeValidationException(result.toString());
}
}
}
|
Submit: Get the change Id from the patch set Id
Bug: Issue 11905
Change-Id: I9ca4cccfc6b285846aa6743ff40dfe4809360225
|
src/main/java/com/googlesource/gerrit/plugins/hooks/Submit.java
|
Submit: Get the change Id from the patch set Id
|
|
Java
|
apache-2.0
|
e5103af8794536a543ef704b4c539bcc069fa5d8
| 0
|
sameeraroshan/visjs,sameeraroshan/visjs
|
package org.vaadin.visjs.networkDiagram;
import org.vaadin.visjs.networkDiagram.event.NetworkEvent;
import org.vaadin.visjs.networkDiagram.event.node.*;
import org.vaadin.visjs.networkDiagram.listener.GraphListener;
import org.vaadin.visjs.networkDiagram.options.Options;
import org.vaadin.visjs.networkDiagram.util.Constants;
import com.google.gson.Gson;
import com.vaadin.annotations.JavaScript;
import com.vaadin.annotations.StyleSheet;
import com.vaadin.ui.AbstractJavaScriptComponent;
import com.vaadin.ui.JavaScriptFunction;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
/**
* Created by roshans on 10/10/14.
*/
@JavaScript({"js/vis.min.js", "js/networkDiagram-connector.js"})
@StyleSheet({"css/vis.css", "css/networkDiagram.css"})
public class NetworkDiagram extends AbstractJavaScriptComponent {
private List<Node.NodeSelectListener> nodeSelectListeners = new ArrayList<>();
private List<Node.NodeClickListener> nodeClickListeners = new ArrayList<>();
private List<Node.NodeDoubleClickListener> nodeDoubleClickListeners = new ArrayList<>();
private List<Node.NodeHoverListener> nodeHoverListeners = new ArrayList<>();
private List<Node.NodeBlurListener> nodeBlurListeners = new ArrayList<>();
private List<Node.NodeDragStartListener> nodeDragStartListeners = new ArrayList<>();
private List<Node.NodeDragEndListener> nodeDragEndListeners = new ArrayList<>();
private ResizeListener resizeListener;
private StabilizationStartListener stabilizationStartListener;
private StabilizedListener stabilizedListener;
private ViewChangedListener viewChangedListener;
private ZoomListener zoomListener;
private Gson gson = new Gson();
public NetworkDiagram(Options options) {
super();
addFunction(Constants.ON_SELECT, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
SelectEvent event = EventGenerator.getNodeSelectEvent(properties);
fireNodeSelectEvent(event);
}
});
addFunction(Constants.ON_CLICK, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
ClickEvent event = EventGenerator.getNodeClickEvent(properties);
fireNodeClickEvent(event);
}
});
addFunction(Constants.ON_DOUBLE_CLICK, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DoubleClickEvent event = EventGenerator.getNodeDoubleClickEvent(properties);
fireNodeDoubleClickEvent(event);
}
});
addFunction(Constants.ON_HOVER_NODE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
HoverEvent event = EventGenerator.getNodeHoverEvent(properties);
fireNodeHoverEvent(event);
}
});
addFunction(Constants.ON_BLUR_NODE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
BlurEvent event = EventGenerator.getNodeBlurEvent(properties);
fireNodeBlurEvent(event);
}
});
addFunction(Constants.ON_DRAG_START, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DragStartEvent event = EventGenerator.getNodeDragStartEvent(properties);
fireNodeDragStartEvent(event);
}
});
addFunction(Constants.ON_DRAG_END, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DragEndEvent event = EventGenerator.getNodeDragEndEvent(properties);
fireNodeDragEndEvent(event);
}
});
addFunction(Constants.ON_START_STABILIZATION, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onStartStabilization" + properties);
//fireGraphStabilizationStartEvent();
}
});
addFunction(Constants.ON_STABILIZED, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onStabilized" + properties);
//fireGraphStabilizedEvent();
}
});
addFunction(Constants.ON_VIEW_CHANGED, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onViewChanged" + properties);
//fireGraphViewChangedEvent();
}
});
addFunction(Constants.ON_ZOOM, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onZoom" + properties);
//fireGraphZoomEvent();
}
});
addFunction(Constants.ON_RESIZE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onResize" + properties);
//fireGraphResizeEvent();
}
});
callFunction("init", gson.toJson(options));
}
public NetworkDiagramState getState() {
return (NetworkDiagramState) super.getState();
}
public void updateOptions(Options options) {
getState().updates++;
callFunction("updateOptions", gson.toJson(options));
}
public void addNode(Node... node) {
getState().updates++;
callFunction("addNodes", gson.toJson(node));
}
public void addNodes(List<Node> nodes) {
getState().updates++;
callFunction("addNodes", gson.toJson(nodes));
}
public void addEdges(List<Edge> edges) {
getState().updates++;
callFunction("addEdges", gson.toJson(edges));
}
public void addEdge(Edge... edges) {
getState().updates++;
callFunction("addEdges", gson.toJson(edges));
}
public void removeNode(Node... node) {
getState().updates++;
callFunction("removeNode", gson.toJson(node));
}
public void removeEdge(Edge... edges) {
getState().updates++;
callFunction("removeEdge", gson.toJson(edges));
}
public void updateNode(Node... node) {
getState().updates++;
callFunction("updateNode", gson.toJson(node));
}
public void updateEdge(Edge... edges) {
getState().updates++;
callFunction("updateEdge", gson.toJson(edges));
}
@Deprecated
public void updateEdge(List<Edge> edges) {
updateEdges(edges);
}
public void updateEdges(List<Edge> edges) {
callFunction("updateEdge", gson.toJson(edges));
}
@Deprecated
public void updateNode(List<Node> nodes) {
updateNodes(nodes);
}
public void updateNodes(List<Node> nodes) {
callFunction("updateNode", gson.toJson(nodes));
}
public void clearNodes() {
callFunction("clearNodes");
}
public void clearEdges() {
callFunction("clearEdges");
}
public void destroyNetwork() {
callFunction("destroyNetwork");
}
public void clear() {
clearEdges();
clearNodes();
}
public void drawConnections() {
callFunction("drawConnections");
}
public void addNodeSelectListener(Node.NodeSelectListener listener) {
nodeSelectListeners.add(listener);
}
public void removeNodeSelectListener(Node.NodeSelectListener listener) {
nodeSelectListeners.remove(listener);
}
public void removeNodeClickListeners(Node.NodeClickListener listener) {
nodeClickListeners.remove(listener);
}
public void addNodeClickListener(Node.NodeClickListener nodeClickListener) {
this.nodeClickListeners.add(nodeClickListener);
}
public void removeNodeDoubleClickListener(Node.NodeDoubleClickListener listener) {
nodeDoubleClickListeners.remove(listener);
}
public void addNodeDoubleClickListener(Node.NodeDoubleClickListener listener) {
nodeDoubleClickListeners.add(listener);
}
public void removeNodeHoverListener(Node.NodeHoverListener listener) {
nodeHoverListeners.remove(listener);
}
public void addNodeHoverListener(Node.NodeHoverListener listener) {
this.nodeHoverListeners.add(listener);
}
public void removeNodeBlurListener(Node.NodeBlurListener listener) {
nodeBlurListeners.remove(listener);
}
public void addNodeBlurListener(Node.NodeBlurListener listener) {
this.nodeBlurListeners.add(listener);
}
public void removeNodeDragStartListener(Node.NodeDragStartListener listener) {
nodeDragStartListeners.remove(listener);
}
public void addNodeDragStartListener(Node.NodeDragStartListener listener) {
this.nodeDragStartListeners.add(listener);
}
public void removeNodeDragEndListener(Node.NodeDragEndListener listener) {
nodeDragEndListeners.remove(listener);
}
public void addNodeDragEndListener(Node.NodeDragEndListener listener) {
this.nodeDragEndListeners.add(listener);
}
//adding and removing graph listeners
public void addResizeListener(ResizeListener resizeListener) {
this.resizeListener = resizeListener;
}
public void addStabilizationStartListener(StabilizationStartListener stabilizationStartListener) {
this.stabilizationStartListener = stabilizationStartListener;
}
public void addStabilizedListener(StabilizedListener stabilizedListener) {
this.stabilizedListener = stabilizedListener;
}
public void addViewChangedListener(ViewChangedListener viewChangedListener) {
this.viewChangedListener = viewChangedListener;
}
public void addZoomListener(ZoomListener zoomListener) {
this.zoomListener = zoomListener;
}
public void removeResizeListener() {
this.resizeListener = null;
}
public void removeStabilizationStartListener() {
this.stabilizationStartListener = null;
}
public void removeStabilizedListener() {
this.stabilizedListener = null;
}
public void removeViewChangedListener() {
this.viewChangedListener = null;
}
public void removeZoomListener() {
this.zoomListener = null;
}
//listeners for entire graph
public static abstract class ResizeListener extends GraphListener {
}
public static abstract class StabilizationStartListener extends GraphListener {
}
public static abstract class StabilizedListener extends GraphListener {
}
public static abstract class ViewChangedListener extends GraphListener {
}
public static abstract class ZoomListener extends GraphListener {
}
public void fireGraphResizeEvent(NetworkEvent event) {
if (resizeListener != null) {
resizeListener.onFired(event);
}
}
public void fireGraphStabilizationStartEvent(NetworkEvent event) {
if (stabilizationStartListener != null) {
stabilizationStartListener.onFired(event);
}
}
public void fireGraphStabilizedEvent(NetworkEvent event) {
if (stabilizedListener != null) {
stabilizedListener.onFired(event);
}
}
public void fireGraphViewChangedEvent(NetworkEvent event) {
if (viewChangedListener != null) {
viewChangedListener.onFired(event);
}
}
public void fireGraphZoomEvent(NetworkEvent event) {
if (zoomListener != null) {
zoomListener.onFired(event);
}
}
public void fireNodeSelectEvent(SelectEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeSelectListener listener : nodeSelectListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeClickEvent(ClickEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeClickListener listener : nodeClickListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDoubleClickEvent(DoubleClickEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDoubleClickListener listener : nodeDoubleClickListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeHoverEvent(HoverEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeHoverListener listener : nodeHoverListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeBlurEvent(BlurEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeBlurListener listener : nodeBlurListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDragStartEvent(DragStartEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDragStartListener listener : nodeDragStartListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDragEndEvent(DragEndEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDragEndListener listener : nodeDragEndListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
}
|
src/main/java/org/vaadin/visjs/networkDiagram/NetworkDiagram.java
|
package org.vaadin.visjs.networkDiagram;
import org.vaadin.visjs.networkDiagram.event.NetworkEvent;
import org.vaadin.visjs.networkDiagram.event.node.*;
import org.vaadin.visjs.networkDiagram.listener.GraphListener;
import org.vaadin.visjs.networkDiagram.options.Options;
import org.vaadin.visjs.networkDiagram.util.Constants;
import com.google.gson.Gson;
import com.vaadin.annotations.JavaScript;
import com.vaadin.annotations.StyleSheet;
import com.vaadin.ui.AbstractJavaScriptComponent;
import com.vaadin.ui.JavaScriptFunction;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
/**
* Created by roshans on 10/10/14.
*/
@JavaScript({"js/vis.min.js", "js/networkDiagram-connector.js"})
@StyleSheet({"css/vis.css", "css/networkDiagram.css"})
public class NetworkDiagram extends AbstractJavaScriptComponent {
private List<Node.NodeSelectListener> nodeSelectListeners = new ArrayList<>();
private List<Node.NodeClickListener> nodeClickListeners = new ArrayList<>();
private List<Node.NodeDoubleClickListener> nodeDoubleClickListeners = new ArrayList<>();
private List<Node.NodeHoverListener> nodeHoverListeners = new ArrayList<>();
private List<Node.NodeBlurListener> nodeBlurListeners = new ArrayList<>();
private List<Node.NodeDragStartListener> nodeDragStartListeners = new ArrayList<>();
private List<Node.NodeDragEndListener> nodeDragEndListeners = new ArrayList<>();
private ResizeListener resizeListener;
private StabilizationStartListener stabilizationStartListener;
private StabilizedListener stabilizedListener;
private ViewChangedListener viewChangedListener;
private ZoomListener zoomListener;
public NetworkDiagram(Options options) {
super();
addFunction(Constants.ON_SELECT, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
SelectEvent event = EventGenerator.getNodeSelectEvent(properties);
fireNodeSelectEvent(event);
}
});
addFunction(Constants.ON_CLICK, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
ClickEvent event = EventGenerator.getNodeClickEvent(properties);
fireNodeClickEvent(event);
}
});
addFunction(Constants.ON_DOUBLE_CLICK, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DoubleClickEvent event = EventGenerator.getNodeDoubleClickEvent(properties);
fireNodeDoubleClickEvent(event);
}
});
addFunction(Constants.ON_HOVER_NODE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
HoverEvent event = EventGenerator.getNodeHoverEvent(properties);
fireNodeHoverEvent(event);
}
});
addFunction(Constants.ON_BLUR_NODE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
BlurEvent event = EventGenerator.getNodeBlurEvent(properties);
fireNodeBlurEvent(event);
}
});
addFunction(Constants.ON_DRAG_START, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DragStartEvent event = EventGenerator.getNodeDragStartEvent(properties);
fireNodeDragStartEvent(event);
}
});
addFunction(Constants.ON_DRAG_END, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
DragEndEvent event = EventGenerator.getNodeDragEndEvent(properties);
fireNodeDragEndEvent(event);
}
});
addFunction(Constants.ON_START_STABILIZATION, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onStartStabilization" + properties);
//fireGraphStabilizationStartEvent();
}
});
addFunction(Constants.ON_STABILIZED, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onStabilized" + properties);
//fireGraphStabilizedEvent();
}
});
addFunction(Constants.ON_VIEW_CHANGED, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onViewChanged" + properties);
//fireGraphViewChangedEvent();
}
});
addFunction(Constants.ON_ZOOM, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onZoom" + properties);
//fireGraphZoomEvent();
}
});
addFunction(Constants.ON_RESIZE, new JavaScriptFunction() {
@Override
public void call(final JSONArray properties) throws JSONException {
//System.out.println("onResize" + properties);
//fireGraphResizeEvent();
}
});
Gson gson = new Gson();
String json = gson.toJson(options);
callFunction("init", json);
}
public NetworkDiagramState getState() {
return (NetworkDiagramState) super.getState();
}
public void updateOptions(Options options) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(options);
callFunction("updateOptions", json);
}
public void addNode(Node... node) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(node);
callFunction("addNodes", json);
}
public void addNodes(List<Node> nodes) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(nodes);
callFunction("addNodes", json);
}
public void addEdges(List<Edge> edges) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(edges);
callFunction("addEdges", json);
}
public void addEdge(Edge... edges) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(edges);
callFunction("addEdges", json);
}
public void removeNode(Node... node) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(node);
callFunction("removeNode", json);
}
public void removeEdge(Edge... edges) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(edges);
callFunction("removeEdge", json);
}
public void updateNode(Node... node) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(node);
callFunction("updateNode", json);
}
public void updateEdge(Edge... edges) {
getState().updates++;
Gson gson = new Gson();
String json = gson.toJson(edges);
callFunction("updateEdge", json);
}
@Deprecated
public void updateEdge(List<Edge> edges){
updateEdges(edges);
}
public void updateEdges(List<Edge> edges){
Gson gson = new Gson();
String json = gson.toJson(edges);
callFunction("updateEdge", json);
}
@Deprecated
public void updateNode(List<Node> nodes){
updateNodes(nodes);
}
public void updateNodes(List<Node> nodes){
Gson gson = new Gson();
String json = gson.toJson(nodes);
callFunction("updateNode", json);
}
public void clearNodes(){
callFunction("clearNodes");
}
public void clearEdges(){
callFunction("clearEdges");
}
public void destroyNetwork(){
callFunction("destroyNetwork");
}
public void clear(){
clearEdges();
clearNodes();
}
public void drawConnections() {
callFunction("drawConnections");
}
public void addNodeSelectListener(Node.NodeSelectListener listener) {
nodeSelectListeners.add(listener);
}
public void removeNodeSelectListener(Node.NodeSelectListener listener) {
nodeSelectListeners.remove(listener);
}
public void removeNodeClickListeners(Node.NodeClickListener listener) {
nodeClickListeners.remove(listener);
}
public void addNodeClickListener(Node.NodeClickListener nodeClickListener) {
this.nodeClickListeners.add(nodeClickListener);
}
public void removeNodeDoubleClickListener(Node.NodeDoubleClickListener listener) {
nodeDoubleClickListeners.remove(listener);
}
public void addNodeDoubleClickListener(Node.NodeDoubleClickListener listener) {
nodeDoubleClickListeners.add(listener);
}
public void removeNodeHoverListener(Node.NodeHoverListener listener) {
nodeHoverListeners.remove(listener);
}
public void addNodeHoverListener(Node.NodeHoverListener listener) {
this.nodeHoverListeners.add(listener);
}
public void removeNodeBlurListener(Node.NodeBlurListener listener) {
nodeBlurListeners.remove(listener);
}
public void addNodeBlurListener(Node.NodeBlurListener listener) {
this.nodeBlurListeners.add(listener);
}
public void removeNodeDragStartListener(Node.NodeDragStartListener listener) {
nodeDragStartListeners.remove(listener);
}
public void addNodeDragStartListener(Node.NodeDragStartListener listener) {
this.nodeDragStartListeners.add(listener);
}
public void removeNodeDragEndListener(Node.NodeDragEndListener listener) {
nodeDragEndListeners.remove(listener);
}
public void addNodeDragEndListener(Node.NodeDragEndListener listener) {
this.nodeDragEndListeners.add(listener);
}
//adding and removing graph listeners
public void addResizeListener(ResizeListener resizeListener) {
this.resizeListener = resizeListener;
}
public void addStabilizationStartListener(StabilizationStartListener stabilizationStartListener) {
this.stabilizationStartListener = stabilizationStartListener;
}
public void addStabilizedListener(StabilizedListener stabilizedListener) {
this.stabilizedListener = stabilizedListener;
}
public void addViewChangedListener(ViewChangedListener viewChangedListener) {
this.viewChangedListener = viewChangedListener;
}
public void addZoomListener(ZoomListener zoomListener) {
this.zoomListener = zoomListener;
}
public void removeResizeListener() {
this.resizeListener = null;
}
public void removeStabilizationStartListener() {
this.stabilizationStartListener = null;
}
public void removeStabilizedListener() {
this.stabilizedListener = null;
}
public void removeViewChangedListener() {
this.viewChangedListener = null;
}
public void removeZoomListener() {
this.zoomListener = null;
}
//listeners for entire graph
public static abstract class ResizeListener extends GraphListener {
}
public static abstract class StabilizationStartListener extends GraphListener {
}
public static abstract class StabilizedListener extends GraphListener {
}
public static abstract class ViewChangedListener extends GraphListener {
}
public static abstract class ZoomListener extends GraphListener {
}
public void fireGraphResizeEvent(NetworkEvent event) {
if (resizeListener != null) {
resizeListener.onFired(event);
}
}
public void fireGraphStabilizationStartEvent(NetworkEvent event) {
if (stabilizationStartListener != null) {
stabilizationStartListener.onFired(event);
}
}
public void fireGraphStabilizedEvent(NetworkEvent event) {
if (stabilizedListener != null) {
stabilizedListener.onFired(event);
}
}
public void fireGraphViewChangedEvent(NetworkEvent event) {
if (viewChangedListener != null) {
viewChangedListener.onFired(event);
}
}
public void fireGraphZoomEvent(NetworkEvent event) {
if (zoomListener != null) {
zoomListener.onFired(event);
}
}
public void fireNodeSelectEvent(SelectEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeSelectListener listener : nodeSelectListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeClickEvent(ClickEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeClickListener listener : nodeClickListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDoubleClickEvent(DoubleClickEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDoubleClickListener listener : nodeDoubleClickListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeHoverEvent(HoverEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeHoverListener listener : nodeHoverListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeBlurEvent(BlurEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeBlurListener listener : nodeBlurListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDragStartEvent(DragStartEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDragStartListener listener : nodeDragStartListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
public void fireNodeDragEndEvent(DragEndEvent event) {
for (String nodeID : event.getNodeIds()) {
for (Node.NodeDragEndListener listener : nodeDragEndListeners) {
if (listener.getNode().getId().equals(nodeID)) {
listener.onFired(event);
}
}
}
}
}
|
removed new gson creation for all events
|
src/main/java/org/vaadin/visjs/networkDiagram/NetworkDiagram.java
|
removed new gson creation for all events
|
|
Java
|
apache-2.0
|
efe455510d8a88d35d65b58bc54cd8853f974bc2
| 0
|
nsoft/jesterj
|
/*
* Copyright 2013-2016 Needham Software LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jesterj.ingest.model.impl;
import com.coremedia.iso.Hex;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ForwardingListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multiset;
import net.jini.core.entry.Entry;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.jesterj.ingest.Main;
import org.jesterj.ingest.model.Document;
import org.jesterj.ingest.model.Plan;
import org.jesterj.ingest.model.Scanner;
import org.jesterj.ingest.model.Status;
import org.jesterj.ingest.model.Step;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A container for the file data and associated metadata. MetaData for which the key and the value
* are of type @link(java.lang.String} should be submitted as a field & value to the index. Multiple
* values for the same field are supported and addition order is maintained. The file data
* will be discarded by default, and if it is to be indexed, it should be processed
* and the text result added as a string value by a step in a plan that processes this item.
*
* @see ForwardingListMultimap
*/
public class DocumentImpl implements Document {
// document id field.
private final String idField;
Logger log = LogManager.getLogger();
private final ArrayListMultimap<String, String> delegate = ArrayListMultimap.create();
private byte[] rawData;
private Status status = Status.PROCESSING;
private String statusMessage = "";
private final Operation operation;
private final String sourceScannerName;
public DocumentImpl(byte[] rawData, String id, Plan plan, Operation operation, Scanner source) {
this.rawData = rawData;
this.operation = operation;
this.sourceScannerName = source.getName();
this.idField = plan.getDocIdField();
this.delegate.put(idField, id);
if (this.rawData != null) {
this.delegate.put(DOC_RAW_SIZE, String.valueOf(this.rawData.length));
}
}
@Override
public Multiset<String> keys() {
return delegate.keys();
}
@Override
public boolean putAll(@Nullable java.lang.String key, Iterable<? extends String> values) {
return delegate.putAll(key, values);
}
@Override
public boolean put(@Nonnull java.lang.String key, @Nonnull java.lang.String value) {
if (getIdField().equals(key)) {
ArrayList<String> values = new ArrayList<>();
values.add(value);
List<String> prev = replaceValues(this.idField, values);
return prev == null || prev.size() != 1 || !prev.get(0).equals(value);
} else {
return delegate.put(key, value);
}
}
@Override
public boolean putAll(Multimap<? extends String, ? extends String> multimap) {
return delegate.putAll(multimap);
}
@Override
public Set<String> keySet() {
return delegate.keySet();
}
@Override
public boolean containsEntry(@Nullable java.lang.Object key, @Nullable java.lang.Object value) {
return delegate.containsEntry(key, value);
}
@Override
public boolean remove(@Nullable java.lang.Object key, @Nullable java.lang.Object value) {
return delegate.remove(key, value);
}
@Override
public boolean containsValue(@Nullable java.lang.Object value) {
return delegate.containsValue(value);
}
@Override
public Collection<Map.Entry<String, String>> entries() {
return delegate.entries();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public void clear() {
delegate.clear();
}
@Override
public Map<String, Collection<String>> asMap() {
return delegate.asMap();
}
@Override
public List<String> replaceValues(@Nullable java.lang.String key, Iterable<? extends String> values) {
return delegate.replaceValues(key, values);
}
@Override
public Collection<String> values() {
return delegate.values();
}
@Override
public boolean containsKey(@Nullable java.lang.Object key) {
return delegate.containsKey(key);
}
@Override
public List<String> get(@Nullable java.lang.String key) {
return delegate.get(key);
}
@Override
public int size() {
return delegate.size();
}
@Override
public List<String> removeAll(@Nullable java.lang.Object key) {
return delegate.removeAll(key);
}
@Override
public byte[] getRawData() {
return rawData;
}
@Override
public void setRawData(byte[] rawData) {
this.rawData = rawData;
}
@Override
public Status getStatus() {
return this.status;
}
@Override
public void setStatus(Status status, String statusMessage) {
this.statusMessage = statusMessage;
setStatus(status);
}
@Override
public void setStatus(Status status) {
this.status = status;
try {
log.info(status.getMarker(), statusMessage);
} catch (AppenderLoggingException e) {
if (Main.isNotShuttingDown()) {
log.error("Could not contact our internal Cassandra!!!" + e);
}
}
}
@Override
public String getStatusMessage() {
return statusMessage;
}
@Override
public void setStatusMessage(String message) {
this.statusMessage = message;
}
@Override
public Entry toEntry(Step next) {
return new DocumentEntry(this, next);
}
@Override
public ArrayListMultimap<String, String> getDelegate() {
return delegate;
}
@Override
public String getId() {
return get(getIdField()).get(0);
}
@Override
public String getHash() {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
md.update(getDelegateString().getBytes(StandardCharsets.UTF_8));
if (getRawData() != null) {
md.update(getRawData());
}
return Hex.encodeHex(md.digest());
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
String getDelegateString() {
return delegate.toString();
}
@Override
public String getIdField() {
return idField;
}
@Override
public Operation getOperation() {
return operation;
}
public String getSourceScannerName() {
return sourceScannerName;
}
@Override
public String getFirstValue(String fieldName) {
List<String> values = get(fieldName);
return values == null || values.size() == 0 ? null : values.get(0);
}
/**
* A serializable form of an item that can be placed in a JavaSpace. The nextStepName is the property on which
* steps query JavaSpaces to retrieve entries.
*/
public static class DocumentEntry implements Entry {
public ArrayListMultimap<String, String> contents;
public String scannerName;
public Status status;
public String statusMessage;
public RawData data;
public String nextStepName;
public String operation;
DocumentEntry(Document document, Step destination) {
this.scannerName = document.getSourceScannerName();
this.contents = document.getDelegate();
this.status = document.getStatus();
this.statusMessage = document.getStatusMessage();
this.data = new RawData();
this.data.data = document.getRawData();
this.nextStepName = destination.getName();
this.operation = document.getOperation().toString();
}
}
// may want to associate encoding or parsing related information in the future...
public static class RawData {
public byte[] data;
}
@Override
public String toString() {
return "DocumentImpl{" +
"id=" + getId() +
", delegate=" + delegate +
", status=" + status +
", statusMessage='" + statusMessage + '\'' +
", operation=" + operation +
", sourceScannerName='" + sourceScannerName + '\'' +
", idField='" + idField + '\'' +
'}';
}
}
|
code/ingest/src/main/java/org/jesterj/ingest/model/impl/DocumentImpl.java
|
/*
* Copyright 2013-2016 Needham Software LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jesterj.ingest.model.impl;
import com.coremedia.iso.Hex;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ForwardingListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multiset;
import net.jini.core.entry.Entry;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.jesterj.ingest.Main;
import org.jesterj.ingest.model.Document;
import org.jesterj.ingest.model.Plan;
import org.jesterj.ingest.model.Scanner;
import org.jesterj.ingest.model.Status;
import org.jesterj.ingest.model.Step;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/*
* Created with IntelliJ IDEA.
* User: gus
* Date: 11/10/13
*/
/**
* A container for the file data and associated metadata. MetaData for which the key and the value
* are of type @link(java.lang.String} should be submitted as a field & value to the index. Multiple
* values for the same field are supported and addition order is maintained. The file data
* will be discarded by default, and if it is to be indexed, it should be processed
* and the text result added as a string value by a step in a plan that processes this item.
*
* @see ForwardingListMultimap
*/
public class DocumentImpl implements Document {
// document id field.
private final String idField;
Logger log = LogManager.getLogger();
private ArrayListMultimap<String, String> delegate = ArrayListMultimap.create();
private byte[] rawData;
private Status status = Status.PROCESSING;
private String statusMessage = "";
private Operation operation;
private String sourceScannerName;
public DocumentImpl(byte[] rawData, String id, Plan plan, Operation operation, Scanner source) {
this.rawData = rawData;
this.operation = operation;
this.sourceScannerName = source.getName();
this.idField = plan.getDocIdField();
this.delegate.put(idField, id);
if (this.rawData != null) {
this.delegate.put(DOC_RAW_SIZE, String.valueOf(this.rawData.length));
}
}
/**
* Copy constructor. Creates a deep copy of raw data, so may be memory intensive.
*
* @param doc The original document to be copied.
*/
public DocumentImpl(Document doc) {
this(doc, true);
}
/**
* Create a copy of a document but do not copy the raw data or existing mappings. Useful in creating
* child documents or documents calculated from other documents.
*
* @param doc The document to copy
* @param deep whether or not to copy the mappings and raw content or only the document info.
*/
public DocumentImpl(Document doc, boolean deep) {
if (deep) {
byte[] duplicate = new byte[doc.getRawData().length];
System.arraycopy(doc.getRawData(), 0, duplicate, 0, doc.getRawData().length);
this.rawData = duplicate;
} else {
rawData = new byte[]{};
}
this.operation = doc.getOperation();
if (deep) {
this.delegate = ArrayListMultimap.create(doc.getDelegate());
} else {
this.delegate = ArrayListMultimap.create();
}
this.sourceScannerName = doc.getSourceScannerName();
this.idField = doc.getIdField();
this.status = doc.getStatus();
this.statusMessage = doc.getStatusMessage();
}
@Override
public Multiset<String> keys() {
return delegate.keys();
}
@Override
public boolean putAll(@Nullable java.lang.String key, Iterable<? extends String> values) {
return delegate.putAll(key, values);
}
@Override
public boolean put(@Nonnull java.lang.String key, @Nonnull java.lang.String value) {
if (getIdField().equals(key)) {
ArrayList<String> values = new ArrayList<>();
values.add(value);
List<String> prev = replaceValues(this.idField, values);
return prev == null || prev.size() != 1 || !prev.get(0).equals(value);
} else {
return delegate.put(key, value);
}
}
@Override
public boolean putAll(Multimap<? extends String, ? extends String> multimap) {
return delegate.putAll(multimap);
}
@Override
public Set<String> keySet() {
return delegate.keySet();
}
@Override
public boolean containsEntry(@Nullable java.lang.Object key, @Nullable java.lang.Object value) {
return delegate.containsEntry(key, value);
}
@Override
public boolean remove(@Nullable java.lang.Object key, @Nullable java.lang.Object value) {
return delegate.remove(key, value);
}
@Override
public boolean containsValue(@Nullable java.lang.Object value) {
return delegate.containsValue(value);
}
@Override
public Collection<Map.Entry<String, String>> entries() {
return delegate.entries();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public void clear() {
delegate.clear();
}
@Override
public Map<String, Collection<String>> asMap() {
return delegate.asMap();
}
@Override
public List<String> replaceValues(@Nullable java.lang.String key, Iterable<? extends String> values) {
return delegate.replaceValues(key, values);
}
@Override
public Collection<String> values() {
return delegate.values();
}
@Override
public boolean containsKey(@Nullable java.lang.Object key) {
return delegate.containsKey(key);
}
@Override
public List<String> get(@Nullable java.lang.String key) {
return delegate.get(key);
}
@Override
public int size() {
return delegate.size();
}
@Override
public List<String> removeAll(@Nullable java.lang.Object key) {
return delegate.removeAll(key);
}
@Override
public byte[] getRawData() {
return rawData;
}
@Override
public void setRawData(byte[] rawData) {
this.rawData = rawData;
}
@Override
public Status getStatus() {
return this.status;
}
@Override
public void setStatus(Status status, String statusMessage) {
this.statusMessage = statusMessage;
setStatus(status);
}
@Override
public void setStatus(Status status) {
this.status = status;
try {
log.info(status.getMarker(), statusMessage);
} catch (AppenderLoggingException e) {
if (Main.isNotShuttingDown()) {
log.error("Could not contact our internal Cassandra!!!" + e);
}
}
}
@Override
public String getStatusMessage() {
return statusMessage;
}
@Override
public void setStatusMessage(String message) {
this.statusMessage = message;
}
@Override
public Entry toEntry(Step next) {
return new DocumentEntry(this, next);
}
@Override
public ArrayListMultimap<String, String> getDelegate() {
return delegate;
}
@Override
public String getId() {
return get(getIdField()).get(0);
}
@Override
public String getHash() {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
try {
md.update(getDelegateString().getBytes("UTF-8"));
if (getRawData() != null) {
md.update(getRawData());
}
} catch (IOException e) {
e.printStackTrace();
}
return Hex.encodeHex(md.digest());
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
String getDelegateString() {
return delegate.toString();
}
@Override
public String getIdField() {
return idField;
}
@Override
public Operation getOperation() {
return operation;
}
public String getSourceScannerName() {
return sourceScannerName;
}
@Override
public String getFirstValue(String fieldName) {
List<String> values = get(fieldName);
return values == null || values.size() == 0 ? null : values.get(0);
}
/**
* A serializable form of an item that can be placed in a JavaSpace. The nextStepName is the property on which
* steps query JavaSpaces to retrieve entries.
*/
public static class DocumentEntry implements Entry {
public ArrayListMultimap<String, String> contents;
public String scannerName;
public Status status;
public String statusMessage;
public RawData data;
public String nextStepName;
public String operation;
DocumentEntry(Document document, Step destination) {
this.scannerName = document.getSourceScannerName();
this.contents = document.getDelegate();
this.status = document.getStatus();
this.statusMessage = document.getStatusMessage();
this.data = new RawData();
this.data.data = document.getRawData();
this.nextStepName = destination.getName();
this.operation = document.getOperation().toString();
}
}
// may want to associate encoding or parsing related information in the future...
public static class RawData {
public byte[] data;
}
@Override
public String toString() {
return "DocumentImpl{" +
"id=" + getId() +
", delegate=" + delegate +
", status=" + status +
", statusMessage='" + statusMessage + '\'' +
", operation=" + operation +
", sourceScannerName='" + sourceScannerName + '\'' +
", idField='" + idField + '\'' +
'}';
}
}
|
code cleanup
|
code/ingest/src/main/java/org/jesterj/ingest/model/impl/DocumentImpl.java
|
code cleanup
|
|
Java
|
apache-2.0
|
2ae4748eb558aaf863adcd8e3183ef8d55fb2e62
| 0
|
rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny
|
package org.rabix.bindings.sb.processor.callback;
import java.io.File;
import java.util.Map;
import java.util.Set;
import org.rabix.bindings.filemapper.FileMapper;
import org.rabix.bindings.model.FileValue;
import org.rabix.bindings.sb.bean.SBJob;
import org.rabix.bindings.sb.processor.SBPortProcessor;
import org.rabix.bindings.sb.processor.SBPortProcessorException;
public class SBPortProcessorHelper {
private final SBJob sbJob;
private final SBPortProcessor portProcessor;
public SBPortProcessorHelper(SBJob sbJob) {
this.sbJob = sbJob;
this.portProcessor = new SBPortProcessor(sbJob);
}
public Set<FileValue> getInputFiles(Map<String, Object> inputs, FileMapper fileMapper, Map<String, Object> config) throws SBPortProcessorException {
if (fileMapper != null) {
SBFilePathMapProcessorCallback fileMapperCallback = new SBFilePathMapProcessorCallback(fileMapper, config);
inputs = portProcessor.processInputs(inputs, fileMapperCallback);
}
SBFileValueProcessorCallback callback = new SBFileValueProcessorCallback(sbJob, null, true);
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to get input files.", e);
}
return callback.getFileValues();
}
public Set<FileValue> getOutputFiles(Map<String, Object> outputs, Set<String> visiblePorts) throws SBPortProcessorException {
SBFileValueProcessorCallback callback = new SBFileValueProcessorCallback(sbJob, visiblePorts, false);
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to get output files.", e);
}
return callback.getFileValues();
}
public Set<String> flattenInputFilePaths(Map<String, Object> inputs) throws SBPortProcessorException {
SBFilePathFlattenProcessorCallback callback = new SBFilePathFlattenProcessorCallback();
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file paths.", e);
}
return callback.getFlattenedPaths();
}
public Set<FileValue> flattenInputFiles(Map<String, Object> inputs) throws SBPortProcessorException {
SBFileValueFlattenProcessorCallback callback = new SBFileValueFlattenProcessorCallback(null);
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file paths.", e);
}
return callback.getFlattenedFileData();
}
public Set<FileValue> flattenOutputFiles(Map<String, Object> outputs, Set<String> visiblePorts) throws SBPortProcessorException {
SBFileValueFlattenProcessorCallback callback = new SBFileValueFlattenProcessorCallback(visiblePorts);
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file paths.", e);
}
return callback.getFlattenedFileData();
}
public Map<String, Object> updateInputFiles(Map<String, Object> inputs, Set<FileValue> fileValues) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBFileValueUpdateProcessorCallback(fileValues));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Map<String, Object> updateOutputFiles(Map<String, Object> outputs, Set<FileValue> fileValues) throws SBPortProcessorException {
try {
return portProcessor.processOutputs(outputs, new SBFileValueUpdateProcessorCallback(fileValues));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Set<String> flattenOutputFilePaths(Map<String, Object> outputs) throws SBPortProcessorException {
SBFilePathFlattenProcessorCallback callback = new SBFilePathFlattenProcessorCallback();
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file paths.", e);
}
return callback.getFlattenedPaths();
}
public Set<Map<String, Object>> flattenInputFileData(Map<String, Object> inputs) throws SBPortProcessorException {
SBFileDataFlattenProcessorCallback callback = new SBFileDataFlattenProcessorCallback();
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file data.", e);
}
return callback.getFlattenedFileData();
}
public Set<Map<String, Object>> flattenOutputFileData(Map<String, Object> outputs)
throws SBPortProcessorException {
SBFileDataFlattenProcessorCallback callback = new SBFileDataFlattenProcessorCallback();
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file data.", e);
}
return callback.getFlattenedFileData();
}
public Map<String, Object> setFileSize(Map<String, Object> inputs) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBFileSizeProcessorCallback());
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Map<String, Object> fixOutputMetadata(Map<String, Object> inputs, Map<String, Object> outputs) throws SBPortProcessorException {
try {
SBMetadataCallback callback = new SBMetadataCallback(inputs);
Map<String, Object> fixedOutputs = portProcessor.processOutputs(outputs, callback);
fixedOutputs = portProcessor.processOutputs(fixedOutputs, callback); // call twice on purpose
return fixedOutputs;
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to fix metadata", e);
}
}
public Map<String, Object> loadInputContents(Map<String, Object> inputs) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBLoadContentsPortProcessorCallback());
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to load input contents.", e);
}
}
public Map<String, Object> stageInputFiles(Map<String, Object> inputs, File workingDir)
throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBStageInputProcessorCallback(workingDir));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to stage inputs.", e);
}
}
}
|
rabix-bindings-sb/src/main/java/org/rabix/bindings/sb/processor/callback/SBPortProcessorHelper.java
|
package org.rabix.bindings.sb.processor.callback;
import java.io.File;
import java.util.Map;
import java.util.Set;
import org.rabix.bindings.filemapper.FileMapper;
import org.rabix.bindings.model.FileValue;
import org.rabix.bindings.sb.bean.SBJob;
import org.rabix.bindings.sb.processor.SBPortProcessor;
import org.rabix.bindings.sb.processor.SBPortProcessorException;
public class SBPortProcessorHelper {
private final SBJob sbJob;
private final SBPortProcessor portProcessor;
public SBPortProcessorHelper(SBJob sbJob) {
this.sbJob = sbJob;
this.portProcessor = new SBPortProcessor(sbJob);
}
public Set<FileValue> getInputFiles(Map<String, Object> inputs, FileMapper fileMapper, Map<String, Object> config) throws SBPortProcessorException {
if (fileMapper != null) {
SBFilePathMapProcessorCallback fileMapperCallback = new SBFilePathMapProcessorCallback(fileMapper, config);
inputs = portProcessor.processInputs(inputs, fileMapperCallback);
}
SBFileValueProcessorCallback callback = new SBFileValueProcessorCallback(sbJob, null, true);
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to get input files.", e);
}
return callback.getFileValues();
}
public Set<FileValue> getOutputFiles(Map<String, Object> outputs, Set<String> visiblePorts) throws SBPortProcessorException {
SBFileValueProcessorCallback callback = new SBFileValueProcessorCallback(sbJob, visiblePorts, true);
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to get output files.", e);
}
return callback.getFileValues();
}
public Set<String> flattenInputFilePaths(Map<String, Object> inputs) throws SBPortProcessorException {
SBFilePathFlattenProcessorCallback callback = new SBFilePathFlattenProcessorCallback();
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file paths.", e);
}
return callback.getFlattenedPaths();
}
public Set<FileValue> flattenInputFiles(Map<String, Object> inputs) throws SBPortProcessorException {
SBFileValueFlattenProcessorCallback callback = new SBFileValueFlattenProcessorCallback(null);
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file paths.", e);
}
return callback.getFlattenedFileData();
}
public Set<FileValue> flattenOutputFiles(Map<String, Object> outputs, Set<String> visiblePorts) throws SBPortProcessorException {
SBFileValueFlattenProcessorCallback callback = new SBFileValueFlattenProcessorCallback(visiblePorts);
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file paths.", e);
}
return callback.getFlattenedFileData();
}
public Map<String, Object> updateInputFiles(Map<String, Object> inputs, Set<FileValue> fileValues) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBFileValueUpdateProcessorCallback(fileValues));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Map<String, Object> updateOutputFiles(Map<String, Object> outputs, Set<FileValue> fileValues) throws SBPortProcessorException {
try {
return portProcessor.processOutputs(outputs, new SBFileValueUpdateProcessorCallback(fileValues));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Set<String> flattenOutputFilePaths(Map<String, Object> outputs) throws SBPortProcessorException {
SBFilePathFlattenProcessorCallback callback = new SBFilePathFlattenProcessorCallback();
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file paths.", e);
}
return callback.getFlattenedPaths();
}
public Set<Map<String, Object>> flattenInputFileData(Map<String, Object> inputs) throws SBPortProcessorException {
SBFileDataFlattenProcessorCallback callback = new SBFileDataFlattenProcessorCallback();
try {
portProcessor.processInputs(inputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten input file data.", e);
}
return callback.getFlattenedFileData();
}
public Set<Map<String, Object>> flattenOutputFileData(Map<String, Object> outputs)
throws SBPortProcessorException {
SBFileDataFlattenProcessorCallback callback = new SBFileDataFlattenProcessorCallback();
try {
portProcessor.processOutputs(outputs, callback);
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to flatten output file data.", e);
}
return callback.getFlattenedFileData();
}
public Map<String, Object> setFileSize(Map<String, Object> inputs) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBFileSizeProcessorCallback());
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to set input file size", e);
}
}
public Map<String, Object> fixOutputMetadata(Map<String, Object> inputs, Map<String, Object> outputs) throws SBPortProcessorException {
try {
SBMetadataCallback callback = new SBMetadataCallback(inputs);
Map<String, Object> fixedOutputs = portProcessor.processOutputs(outputs, callback);
fixedOutputs = portProcessor.processOutputs(fixedOutputs, callback); // call twice on purpose
return fixedOutputs;
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to fix metadata", e);
}
}
public Map<String, Object> loadInputContents(Map<String, Object> inputs) throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBLoadContentsPortProcessorCallback());
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to load input contents.", e);
}
}
public Map<String, Object> stageInputFiles(Map<String, Object> inputs, File workingDir)
throws SBPortProcessorException {
try {
return portProcessor.processInputs(inputs, new SBStageInputProcessorCallback(workingDir));
} catch (SBPortProcessorException e) {
throw new SBPortProcessorException("Failed to stage inputs.", e);
}
}
}
|
fix sb bindings
|
rabix-bindings-sb/src/main/java/org/rabix/bindings/sb/processor/callback/SBPortProcessorHelper.java
|
fix sb bindings
|
|
Java
|
apache-2.0
|
4fe0244646843b3ad306feb6281a50aa743c3add
| 0
|
leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.transaction.manager.xa;
import com.mysql.jdbc.jdbc2.optional.MysqlXADataSource;
import io.shardingsphere.core.constant.DatabaseType;
import org.hamcrest.Matchers;
import org.junit.Test;
import javax.sql.XADataSource;
import static org.hamcrest.MatcherAssert.assertThat;
public class XADataSourceFactoryTest {
@Test
public void assertCreateH2XADataSource() {
XADataSource xaDataSource = XADataSourceFactory.build(DatabaseType.H2);
}
@Test
public void assertCreateMysqlXADataSource() {
XADataSource xaDataSource = XADataSourceFactory.build(DatabaseType.MySQL);
assertThat(xaDataSource, Matchers.<XADataSource>instanceOf(MysqlXADataSource.class));
}
@Test
public void assertCreatePGXADataSource() {
XADataSource xaDataSource = XADataSourceFactory.build(DatabaseType.PostgreSQL);
}
@Test
public void assertCreateMSSQLXADataSource() {
XADataSource xaDataSource = XADataSourceFactory.build(DatabaseType.SQLServer);
}
}
|
sharding-transaction/src/test/java/io/shardingsphere/transaction/manager/xa/XADataSourceFactoryTest.java
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.transaction.manager.xa;
public class XADataSourceFactoryTest {
}
|
#1363 Add test case for XADataSourceFactory.
|
sharding-transaction/src/test/java/io/shardingsphere/transaction/manager/xa/XADataSourceFactoryTest.java
|
#1363 Add test case for XADataSourceFactory.
|
|
Java
|
apache-2.0
|
9203583a11e8dbab25c2bb8460855222f66aea5f
| 0
|
nla/openwayback,JesseWeinstein/openwayback,kris-sigur/openwayback,zubairkhatri/openwayback,emijrp/openwayback,kris-sigur/openwayback,nla/openwayback,zubairkhatri/openwayback,JesseWeinstein/openwayback,emijrp/openwayback,bitzl/openwayback,bitzl/openwayback,nlnwa/openwayback,nlnwa/openwayback,ukwa/openwayback,chasehd/openwayback,SpiralsSeminaire/openwayback,bitzl/openwayback,emijrp/openwayback,SpiralsSeminaire/openwayback,nla/openwayback,nla/openwayback,emijrp/openwayback,nlnwa/openwayback,iipc/openwayback,efundamentals/openwayback,SpiralsSeminaire/openwayback,chasehd/openwayback,ukwa/openwayback,sul-dlss/openwayback,kris-sigur/openwayback,bitzl/openwayback,efundamentals/openwayback,JesseWeinstein/openwayback,ukwa/openwayback,efundamentals/openwayback,iipc/openwayback,nlnwa/openwayback,sul-dlss/openwayback,sul-dlss/openwayback,JesseWeinstein/openwayback,zubairkhatri/openwayback,SpiralsSeminaire/openwayback,emijrp/openwayback,iipc/openwayback,nlnwa/openwayback,efundamentals/openwayback,efundamentals/openwayback,chasehd/openwayback,zubairkhatri/openwayback,MohammedElsayyed/openwayback,JesseWeinstein/openwayback,kris-sigur/openwayback,MohammedElsayyed/openwayback,bitzl/openwayback,kris-sigur/openwayback,nla/openwayback,SpiralsSeminaire/openwayback,MohammedElsayyed/openwayback
|
/*
* This file is part of the Wayback archival access software
* (http://archive-access.sourceforge.net/projects/wayback/).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.wayback.accesscontrol.staticmap;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import org.archive.wayback.UrlCanonicalizer;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.util.ObjectFilter;
import org.archive.wayback.util.url.AggressiveUrlCanonicalizer;
import junit.framework.TestCase;
/**
*
*
* @author brad
* @version $Date$, $Revision$
*/
public class StaticMapExclusionFilterTest extends TestCase {
File tmpFile = null;
StaticMapExclusionFilterFactory factory = null;
UrlCanonicalizer canonicalizer = new AggressiveUrlCanonicalizer();
protected void setUp() throws Exception {
super.setUp();
factory = new StaticMapExclusionFilterFactory();
tmpFile = File.createTempFile("static-map", ".tmp");
// Properties p = new Properties();
// p.put("resourceindex.exclusionpath", tmpFile.getAbsolutePath());
// factory.init(p);
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
if(tmpFile != null && tmpFile.exists()) {
tmpFile.delete();
}
}
/**
* @throws Exception
*/
public void testRealWorld() throws Exception {
String bases[] = { "pho-c.co.jp/~clever",
"sf.net/pop/Roger",
"www.eva-stu.vn",
"mins.com.br/",
"24.ne.jp",
"24.ne.jp/~nekko"};
// setTmpContents(bases);
ObjectFilter<CaptureSearchResult> filter = getFilter(bases);
assertFalse("unmassaged",isBlocked(filter,"24.ne.jp.idpnt.com/robots.txt"));
assertTrue("massage",isBlocked(filter,"http://24.ne.jp:80/"));
assertTrue("unmassaged",isBlocked(filter,"http://www.pho-c.co.jp/~clever"));
assertTrue("massage",isBlocked(filter,"http://24.ne.jp"));
assertTrue("unmassaged",isBlocked(filter,"http://www.pho-c.co.jp/~clever"));
assertTrue("massaged",isBlocked(filter,"http://pho-c.co.jp/~clever"));
assertTrue("trailing-slash",isBlocked(filter,"http://pho-c.co.jp/~clever/"));
assertTrue("subpath",isBlocked(filter,"http://pho-c.co.jp/~clever/foo.txt"));
assertTrue("full-port",isBlocked(filter,"http://www.mins.com.br:80"));
assertTrue("tail-slash-port",isBlocked(filter,"http://www.mins.com.br:80/"));
assertTrue("full",isBlocked(filter,"http://www.mins.com.br"));
assertTrue("tail-slash",isBlocked(filter,"http://www.mins.com.br/"));
assertTrue("full-massage",isBlocked(filter,"http://mins.com.br"));
assertTrue("tail-slash-massage",isBlocked(filter,"http://mins.com.br/"));
assertTrue("massage",isBlocked(filter,"http://mins.com.br/foo.txt"));
assertTrue("subpath",isBlocked(filter,"http://www13.mins.com.br/~clever/foo.txt"));
assertTrue("massage",isBlocked(filter,"24.ne.jp"));
assertTrue("full",isBlocked(filter,"http://www.mins.com.br"));
assertTrue("subpath",isBlocked(filter,"www.24.ne.jp"));
assertTrue("tail-slash-massage",isBlocked(filter,"http://mins.com.br/"));
assertTrue("subpath",isBlocked(filter,"http://www.24.ne.jp:80/"));
assertTrue(isBlocked(filter,"http://sf.net/pop/Roger"));
assertTrue(isBlocked(filter,"http://sf.net/pop/Roger/"));
assertTrue(isBlocked(filter,"http://sf.net/pop/Roger//"));
assertFalse(isBlocked(filter,"http://sf.net/pop/"));
assertTrue(isBlocked(filter,"http://sf.net/pop/Roger/2"));
assertTrue(isBlocked(filter,"http://sf.net/pop/Roger/23"));
assertTrue(isBlocked(filter,"http://www.sf.net/pop/Roger"));
assertTrue(isBlocked(filter,"http://www1.sf.net/pop/Roger"));
assertTrue(isBlocked(filter,"http://www23.sf.net/pop/Roger"));
assertTrue(isBlocked(filter,"http://www23.eva-stu.vn/"));
assertTrue(isBlocked(filter,"http://www23.eva-stu.vn"));
assertTrue(isBlocked(filter,"http://eva-stu.vn"));
assertTrue(isBlocked(filter,"http://www.eva-stu.vn/"));
assertTrue(isBlocked(filter,"http://eva-stu.vn/"));
assertTrue(isBlocked(filter,"http://www.eva-stu.vn/foo.txt"));
assertTrue(isBlocked(filter,"http://www2.eva-stu.vn/foo/bar.txt"));
assertTrue(isBlocked(filter,"http://eva-stu.vn/foo/bar.txt"));
}
/**
* @throws Exception
*/
public void testBaseNoPrefix() throws Exception {
String bases[] = {"http://www.peagreenboat.com/",
"http://peagreenboat.com/"};
// setTmpContents(bases);
ObjectFilter<CaptureSearchResult> filter = getFilter(bases);
assertTrue("unmassaged",isBlocked(filter,"http://www.peagreenboat.com"));
assertTrue("unmassaged",isBlocked(filter,"http://peagreenboat.com"));
assertFalse("other1",isBlocked(filter,"http://peagreenboatt.com"));
assertFalse("other2",isBlocked(filter,"http://peagreenboat.org"));
assertFalse("other3",isBlocked(filter,"http://www.peagreenboat.org"));
// there is a problem with the SURTTokenizer... deal with ports!
// assertFalse("other4",isBlocked(filter,"http://www.peagreenboat.com:8080"));
assertTrue("subpath",isBlocked(filter,"http://www.peagreenboat.com/foo"));
assertTrue("emptypath",isBlocked(filter,"http://www.peagreenboat.com/"));
}
private boolean isBlocked(ObjectFilter<CaptureSearchResult> filter, String url) {
CaptureSearchResult result = new CaptureSearchResult();
result.setOriginalUrl(url);
int filterResult = filter.filterObject(result);
if(filterResult == ObjectFilter.FILTER_EXCLUDE) {
return true;
}
return false;
}
private ObjectFilter<CaptureSearchResult> getFilter(String lines[])
throws IOException {
setTmpContents(lines);
Map<String,Object> map = factory.loadFile(tmpFile.getAbsolutePath());
return new StaticMapExclusionFilter(map,canonicalizer);
}
private void setTmpContents(String[] lines) throws IOException {
if(tmpFile != null && tmpFile.exists()) {
tmpFile.delete();
}
// tmpFile = File.createTempFile("range-map","tmp");
FileWriter writer = new FileWriter(tmpFile);
StringBuilder sb = new StringBuilder();
for(int i=0; i<lines.length; i++) {
sb.append(lines[i]).append("\n");
}
String contents = sb.toString();
writer.write(contents);
writer.close();
//factory.reloadFile();
}
}
|
wayback-core/src/test/java/org/archive/wayback/accesscontrol/staticmap/StaticMapExclusionFilterTest.java
|
/*
* This file is part of the Wayback archival access software
* (http://archive-access.sourceforge.net/projects/wayback/).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.wayback.accesscontrol.staticmap;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.util.ObjectFilter;
import junit.framework.TestCase;
/**
*
*
* @author brad
* @version $Date$, $Revision$
*/
public class StaticMapExclusionFilterTest extends TestCase {
File tmpFile = null;
StaticMapExclusionFilterFactory factory = null;
protected void setUp() throws Exception {
super.setUp();
factory = new StaticMapExclusionFilterFactory();
tmpFile = File.createTempFile("static-map", ".tmp");
// Properties p = new Properties();
// p.put("resourceindex.exclusionpath", tmpFile.getAbsolutePath());
// factory.init(p);
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
if(tmpFile != null && tmpFile.exists()) {
tmpFile.delete();
}
}
/**
* @throws Exception
*/
public void testBaseNoPrefix() throws Exception {
String bases[] = {"http://www.peagreenboat.com/",
"http://peagreenboat.com/"};
// setTmpContents(bases);
ObjectFilter<CaptureSearchResult> filter = getFilter(bases);
assertTrue("unmassaged",isBlocked(filter,"http://www.peagreenboat.com"));
assertTrue("unmassaged",isBlocked(filter,"http://peagreenboat.com"));
assertFalse("other1",isBlocked(filter,"http://peagreenboatt.com"));
assertFalse("other2",isBlocked(filter,"http://peagreenboat.org"));
assertFalse("other3",isBlocked(filter,"http://www.peagreenboat.org"));
// there is a problem with the SURTTokenizer... deal with ports!
// assertFalse("other4",isBlocked(filter,"http://www.peagreenboat.com:8080"));
assertTrue("subpath",isBlocked(filter,"http://www.peagreenboat.com/foo"));
assertTrue("emptypath",isBlocked(filter,"http://www.peagreenboat.com/"));
}
private boolean isBlocked(ObjectFilter<CaptureSearchResult> filter, String url) {
CaptureSearchResult result = new CaptureSearchResult();
result.setOriginalUrl(url);
int filterResult = filter.filterObject(result);
if(filterResult == ObjectFilter.FILTER_EXCLUDE) {
return true;
}
return false;
}
private ObjectFilter<CaptureSearchResult> getFilter(String lines[])
throws IOException {
setTmpContents(lines);
Map<String,Object> map = factory.loadFile(tmpFile.getAbsolutePath());
return new StaticMapExclusionFilter(map);
}
private void setTmpContents(String[] lines) throws IOException {
if(tmpFile != null && tmpFile.exists()) {
tmpFile.delete();
}
// tmpFile = File.createTempFile("range-map","tmp");
FileWriter writer = new FileWriter(tmpFile);
StringBuilder sb = new StringBuilder();
for(int i=0; i<lines.length; i++) {
sb.append(lines[i]).append("\n");
}
String contents = sb.toString();
writer.write(contents);
writer.close();
//factory.reloadFile();
}
}
|
added tests for real-world examples
git-svn-id: ca6d9ebf75caaf710f0e3a4ee74a890c456d4c90@3387 69e27eb3-6e27-0410-b9c6-fffd7e226fab
|
wayback-core/src/test/java/org/archive/wayback/accesscontrol/staticmap/StaticMapExclusionFilterTest.java
|
added tests for real-world examples
|
|
Java
|
apache-2.0
|
f5bf64a0a2313489c3f39c01efaedf04c62285f8
| 0
|
savanibharat/concourse,cinchapi/concourse,mAzurkovic/concourse,chiranjeevjain/concourse,cinchapi/concourse,Qunzer/concourse,remiemalik/concourse,bigtreeljc/concourse,vrnithinkumar/concourse,karthikprabhu17/concourse,remiemalik/concourse,JerJohn15/concourse,karthikprabhu17/concourse,mAzurkovic/concourse,dubex/concourse,remiemalik/concourse,Qunzer/concourse,bigtreeljc/concourse,kylycht/concourse,MattiasZurkovic/concourse,vrnithinkumar/concourse,dubex/concourse,MattiasZurkovic/concourse,remiemalik/concourse,chiranjeevjain/concourse,aabdin01/concourse,hcuffy/concourse,Qunzer/concourse,vrnithinkumar/concourse,remiemalik/concourse,prateek135/concourse,aabdin01/concourse,karthikprabhu17/concourse,chiranjeevjain/concourse,MattiasZurkovic/concourse,remiemalik/concourse,Qunzer/concourse,savanibharat/concourse,prateek135/concourse,JerJohn15/concourse,hcuffy/concourse,chiranjeevjain/concourse,bigtreeljc/concourse,hcuffy/concourse,chiranjeevjain/concourse,kylycht/concourse,dubex/concourse,aabdin01/concourse,mAzurkovic/concourse,savanibharat/concourse,dubex/concourse,mAzurkovic/concourse,hcuffy/concourse,savanibharat/concourse,kylycht/concourse,prateek135/concourse,aabdin01/concourse,MattiasZurkovic/concourse,chiranjeevjain/concourse,karthikprabhu17/concourse,MattiasZurkovic/concourse,karthikprabhu17/concourse,cinchapi/concourse,kylycht/concourse,JerJohn15/concourse,bigtreeljc/concourse,dubex/concourse,aabdin01/concourse,dubex/concourse,savanibharat/concourse,aabdin01/concourse,karthikprabhu17/concourse,kylycht/concourse,MattiasZurkovic/concourse,hcuffy/concourse,prateek135/concourse,vrnithinkumar/concourse,hcuffy/concourse,prateek135/concourse,bigtreeljc/concourse,vrnithinkumar/concourse,bigtreeljc/concourse,mAzurkovic/concourse,cinchapi/concourse,JerJohn15/concourse,JerJohn15/concourse,savanibharat/concourse,prateek135/concourse,mAzurkovic/concourse,JerJohn15/concourse,Qunzer/concourse,cinchapi/concourse,vrnithinkumar/concourse,cinchapi/concourse,kylycht/concourse,Qunzer/concourse
|
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*
* @generated
*/
package org.cinchapi.concourse.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import org.cinchapi.concourse.util.ByteBuffers;
import org.cinchapi.concourse.util.Convert;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
/**
* A lightweight wrapper for a typed Object that has been encoded
* as binary data.
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-2-22")
public class TObject implements
org.apache.thrift.TBase<TObject, TObject._Fields>,
java.io.Serializable,
Cloneable,
Comparable<TObject> {
/**
* Represents a null object that can be passed across the wire.
*/
public static final TObject NULL = new TObject();
static {
NULL.setType(Type.NULL);
NULL.setData(ByteBuffer.allocate(1));
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField(
"data", org.apache.thrift.protocol.TType.STRING, (short) 1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"TObject");
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField(
"type", org.apache.thrift.protocol.TType.I32, (short) 2);
static {
schemes.put(StandardScheme.class, new TObjectStandardSchemeFactory());
schemes.put(TupleScheme.class, new TObjectTupleSchemeFactory());
}
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData(
"data", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRING, true)));
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData(
"type", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.EnumMetaData(
org.apache.thrift.protocol.TType.ENUM, Type.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(
TObject.class, metaDataMap);
}
public ByteBuffer data; // required
/**
*
* @see Type
*/
public Type type; // required
public TObject() {
this.type = Type.STRING;
}
public TObject(ByteBuffer data, Type type) {
this.data = data;
this.type = type;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public TObject(TObject other) {
if(other.isSetData()) {
this.data = org.apache.thrift.TBaseHelper.copyBinary(other.data);
}
if(other.isSetType()) {
this.type = other.type;
}
}
public ByteBuffer bufferForData() {
return ByteBuffers.asReadOnlyBuffer(data);
}
@Override
public void clear() {
this.data = null;
this.type = Type.STRING;
}
@Override
public int compareTo(TObject other) {
if(!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetData()).compareTo(
other.isSetData());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetData()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data,
other.data);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetType()).compareTo(
other.isSetType());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type,
other.type);
if(lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public TObject deepCopy() {
return new TObject(this);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof TObject) {
TObject other = (TObject) obj;
return bufferForData().equals(other.bufferForData())
&& getInternalType() == other.getInternalType();
}
return false;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public byte[] getData() {
setData(org.apache.thrift.TBaseHelper.rightSize(data));
return data == null ? null : data.array();
}
public Object getFieldValue(_Fields field) {
switch (field) {
case DATA:
return getData();
case TYPE:
return getType();
}
throw new IllegalStateException();
}
/**
*
* @see Type
*/
public Type getType() {
return this.type;
}
@Override
public int hashCode() {
return Arrays.hashCode(new int[] { data.hashCode(),
getInternalType().ordinal() });
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned
* a value) and false otherwise
*/
public boolean isSet(_Fields field) {
if(field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case DATA:
return isSetData();
case TYPE:
return isSetType();
}
throw new IllegalStateException();
}
/**
* Returns true if field data is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetData() {
return this.data != null;
}
/**
* Returns true if field type is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetType() {
return this.type != null;
}
/**
* Return {@code true} if this TObject and {@code other} have the same
* {@code type} and are equal.
*
* @param other
* @return {@code true} if this matches {@code other}.
*/
public boolean matches(TObject other) {
return type == other.type && equals(other);
}
public void read(org.apache.thrift.protocol.TProtocol iprot)
throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public TObject setData(byte[] data) {
setData(data == null ? (ByteBuffer) null : ByteBuffer.wrap(data));
return this;
}
public TObject setData(ByteBuffer data) {
this.data = data;
return this;
}
public void setDataIsSet(boolean value) {
if(!value) {
this.data = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case DATA:
if(value == null) {
unsetData();
}
else {
setData((ByteBuffer) value);
}
break;
case TYPE:
if(value == null) {
unsetType();
}
else {
setType((Type) value);
}
break;
}
}
/**
*
* @see Type
*/
public TObject setType(Type type) {
this.type = type;
return this;
}
public void setTypeIsSet(boolean value) {
if(!value) {
this.type = null;
}
}
@Override
public String toString() {
return Convert.thriftToJava(this).toString();
}
public void unsetData() {
this.data = null;
}
public void unsetType() {
this.type = null;
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if(data == null) {
throw new org.apache.thrift.protocol.TProtocolException(
"Required field 'data' was not present! Struct: "
+ toString());
}
if(type == null) {
throw new org.apache.thrift.protocol.TProtocolException(
"Required field 'type' was not present! Struct: "
+ toString());
}
// check for sub-struct validity
}
public void write(org.apache.thrift.protocol.TProtocol oprot)
throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
private void readObject(java.io.ObjectInputStream in)
throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(in)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void writeObject(java.io.ObjectOutputStream out)
throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(out)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
/**
* Return the {@link Type} that is used for internal operations.
*
* @return the internal type
*/
protected Type getInternalType() { // visible for testing
if(type == Type.TAG) {
return Type.STRING;
}
else {
return getType();
}
}
/**
* The set of fields this struct contains, along with convenience methods
* for finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DATA((short) 1, "data"),
/**
*
* @see Type
*/
TYPE((short) 2, "type");
/**
* Find the _Fields constant that matches name, or null if its not
* found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
/**
* Find the _Fields constant that matches fieldId, or null if its not
* found.
*/
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // DATA
return DATA;
case 2: // TYPE
return TYPE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if(fields == null)
throw new IllegalArgumentException("Field " + fieldId
+ " doesn't exist!");
return fields;
}
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
private final String _fieldName;
private final short _thriftId;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public String getFieldName() {
return _fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
}
private static class TObjectStandardScheme extends StandardScheme<TObject> {
public void read(org.apache.thrift.protocol.TProtocol iprot,
TObject struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if(schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DATA
if(schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.data = iprot.readBinary();
struct.setDataIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 2: // TYPE
if(schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.type = Type.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be
// checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot,
TObject struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if(struct.data != null) {
oprot.writeFieldBegin(DATA_FIELD_DESC);
oprot.writeBinary(struct.data);
oprot.writeFieldEnd();
}
if(struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeI32(struct.type.getValue());
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TObjectStandardSchemeFactory implements SchemeFactory {
public TObjectStandardScheme getScheme() {
return new TObjectStandardScheme();
}
}
private static class TObjectTupleScheme extends TupleScheme<TObject> {
@Override
public void read(org.apache.thrift.protocol.TProtocol prot,
TObject struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.data = iprot.readBinary();
struct.setDataIsSet(true);
struct.type = Type.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
@Override
public void write(org.apache.thrift.protocol.TProtocol prot,
TObject struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeBinary(struct.data);
oprot.writeI32(struct.type.getValue());
}
}
private static class TObjectTupleSchemeFactory implements SchemeFactory {
public TObjectTupleScheme getScheme() {
return new TObjectTupleScheme();
}
}
}
|
concourse/src/main/java/org/cinchapi/concourse/thrift/TObject.java
|
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*
* @generated
*/
package org.cinchapi.concourse.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import org.cinchapi.concourse.util.ByteBuffers;
import org.cinchapi.concourse.util.Convert;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
/**
* A lightweight wrapper for a typed Object that has been encoded
* as binary data.
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-2-22")
public class TObject implements
org.apache.thrift.TBase<TObject, TObject._Fields>,
java.io.Serializable,
Cloneable,
Comparable<TObject> {
/**
* Represents a null object that can be passed across the wire.
*/
public static final TObject NULL = new TObject();
static {
NULL.setType(Type.NULL);
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField(
"data", org.apache.thrift.protocol.TType.STRING, (short) 1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"TObject");
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField(
"type", org.apache.thrift.protocol.TType.I32, (short) 2);
static {
schemes.put(StandardScheme.class, new TObjectStandardSchemeFactory());
schemes.put(TupleScheme.class, new TObjectTupleSchemeFactory());
}
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData(
"data", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRING, true)));
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData(
"type", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.EnumMetaData(
org.apache.thrift.protocol.TType.ENUM, Type.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(
TObject.class, metaDataMap);
}
public ByteBuffer data; // required
/**
*
* @see Type
*/
public Type type; // required
public TObject() {
this.type = Type.STRING;
}
public TObject(ByteBuffer data, Type type) {
this.data = data;
this.type = type;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public TObject(TObject other) {
if(other.isSetData()) {
this.data = org.apache.thrift.TBaseHelper.copyBinary(other.data);
}
if(other.isSetType()) {
this.type = other.type;
}
}
public ByteBuffer bufferForData() {
return ByteBuffers.asReadOnlyBuffer(data);
}
@Override
public void clear() {
this.data = null;
this.type = Type.STRING;
}
@Override
public int compareTo(TObject other) {
if(!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetData()).compareTo(
other.isSetData());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetData()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data,
other.data);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetType()).compareTo(
other.isSetType());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type,
other.type);
if(lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public TObject deepCopy() {
return new TObject(this);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof TObject) {
TObject other = (TObject) obj;
return bufferForData().equals(other.bufferForData())
&& getInternalType() == other.getInternalType();
}
return false;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public byte[] getData() {
setData(org.apache.thrift.TBaseHelper.rightSize(data));
return data == null ? null : data.array();
}
public Object getFieldValue(_Fields field) {
switch (field) {
case DATA:
return getData();
case TYPE:
return getType();
}
throw new IllegalStateException();
}
/**
*
* @see Type
*/
public Type getType() {
return this.type;
}
@Override
public int hashCode() {
return Arrays.hashCode(new int[] { data.hashCode(),
getInternalType().ordinal() });
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned
* a value) and false otherwise
*/
public boolean isSet(_Fields field) {
if(field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case DATA:
return isSetData();
case TYPE:
return isSetType();
}
throw new IllegalStateException();
}
/**
* Returns true if field data is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetData() {
return this.data != null;
}
/**
* Returns true if field type is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetType() {
return this.type != null;
}
/**
* Return {@code true} if this TObject and {@code other} have the same
* {@code type} and are equal.
*
* @param other
* @return {@code true} if this matches {@code other}.
*/
public boolean matches(TObject other) {
return type == other.type && equals(other);
}
public void read(org.apache.thrift.protocol.TProtocol iprot)
throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public TObject setData(byte[] data) {
setData(data == null ? (ByteBuffer) null : ByteBuffer.wrap(data));
return this;
}
public TObject setData(ByteBuffer data) {
this.data = data;
return this;
}
public void setDataIsSet(boolean value) {
if(!value) {
this.data = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case DATA:
if(value == null) {
unsetData();
}
else {
setData((ByteBuffer) value);
}
break;
case TYPE:
if(value == null) {
unsetType();
}
else {
setType((Type) value);
}
break;
}
}
/**
*
* @see Type
*/
public TObject setType(Type type) {
this.type = type;
return this;
}
public void setTypeIsSet(boolean value) {
if(!value) {
this.type = null;
}
}
@Override
public String toString() {
return Convert.thriftToJava(this).toString();
}
public void unsetData() {
this.data = null;
}
public void unsetType() {
this.type = null;
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if(data == null) {
throw new org.apache.thrift.protocol.TProtocolException(
"Required field 'data' was not present! Struct: "
+ toString());
}
if(type == null) {
throw new org.apache.thrift.protocol.TProtocolException(
"Required field 'type' was not present! Struct: "
+ toString());
}
// check for sub-struct validity
}
public void write(org.apache.thrift.protocol.TProtocol oprot)
throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
private void readObject(java.io.ObjectInputStream in)
throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(in)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void writeObject(java.io.ObjectOutputStream out)
throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(out)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
/**
* Return the {@link Type} that is used for internal operations.
*
* @return the internal type
*/
protected Type getInternalType() { // visible for testing
if(type == Type.TAG) {
return Type.STRING;
}
else {
return getType();
}
}
/**
* The set of fields this struct contains, along with convenience methods
* for finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DATA((short) 1, "data"),
/**
*
* @see Type
*/
TYPE((short) 2, "type");
/**
* Find the _Fields constant that matches name, or null if its not
* found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
/**
* Find the _Fields constant that matches fieldId, or null if its not
* found.
*/
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // DATA
return DATA;
case 2: // TYPE
return TYPE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if(fields == null)
throw new IllegalArgumentException("Field " + fieldId
+ " doesn't exist!");
return fields;
}
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
private final String _fieldName;
private final short _thriftId;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public String getFieldName() {
return _fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
}
private static class TObjectStandardScheme extends StandardScheme<TObject> {
public void read(org.apache.thrift.protocol.TProtocol iprot,
TObject struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if(schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DATA
if(schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.data = iprot.readBinary();
struct.setDataIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 2: // TYPE
if(schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.type = Type.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be
// checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot,
TObject struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if(struct.data != null) {
oprot.writeFieldBegin(DATA_FIELD_DESC);
oprot.writeBinary(struct.data);
oprot.writeFieldEnd();
}
if(struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeI32(struct.type.getValue());
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TObjectStandardSchemeFactory implements SchemeFactory {
public TObjectStandardScheme getScheme() {
return new TObjectStandardScheme();
}
}
private static class TObjectTupleScheme extends TupleScheme<TObject> {
@Override
public void read(org.apache.thrift.protocol.TProtocol prot,
TObject struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.data = iprot.readBinary();
struct.setDataIsSet(true);
struct.type = Type.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
@Override
public void write(org.apache.thrift.protocol.TProtocol prot,
TObject struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeBinary(struct.data);
oprot.writeI32(struct.type.getValue());
}
}
private static class TObjectTupleSchemeFactory implements SchemeFactory {
public TObjectTupleScheme getScheme() {
return new TObjectTupleScheme();
}
}
}
|
add small bytebuffer to TObject#NULL to prevent NPE issues on serialization
|
concourse/src/main/java/org/cinchapi/concourse/thrift/TObject.java
|
add small bytebuffer to TObject#NULL to prevent NPE issues on serialization
|
|
Java
|
apache-2.0
|
bb5b149b556a6f295c3bad597a66edf83c2e73eb
| 0
|
greenrobot/EventBus
|
/*
* Copyright (C) 2012-2016 Markus Junginger, greenrobot (http://greenrobot.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.greenrobot.eventbus;
import android.annotation.TargetApi;
import org.greenrobot.eventbus.meta.SubscriberInfo;
import org.greenrobot.eventbus.meta.SubscriberInfoIndex;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
class SubscriberMethodFinder {
/*
* In newer class files, compilers may add methods. Those are called bridge or synthetic methods.
* EventBus must ignore both. There modifiers are not public but defined in the Java class file format:
* http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.6-200-A.1
*/
private static final int BRIDGE = 0x40;
private static final int SYNTHETIC = 0x1000;
private static final int MODIFIERS_IGNORE = Modifier.ABSTRACT | Modifier.STATIC | BRIDGE | SYNTHETIC;
private static final Map<Class<?>, List<SubscriberMethod>> METHOD_CACHE = new ConcurrentHashMap<>();
private List<SubscriberInfoIndex> subscriberInfoIndexes;
private final boolean strictMethodVerification;
private final boolean ignoreGeneratedIndex;
private static final int POOL_SIZE = 4;
private static final FindState[] FIND_STATE_POOL = new FindState[POOL_SIZE];
SubscriberMethodFinder(List<SubscriberInfoIndex> subscriberInfoIndexes, boolean strictMethodVerification,
boolean ignoreGeneratedIndex) {
this.subscriberInfoIndexes = subscriberInfoIndexes;
this.strictMethodVerification = strictMethodVerification;
this.ignoreGeneratedIndex = ignoreGeneratedIndex;
}
List<SubscriberMethod> findSubscriberMethods(Class<?> subscriberClass) {
List<SubscriberMethod> subscriberMethods = METHOD_CACHE.get(subscriberClass);
if (subscriberMethods != null) {
return subscriberMethods;
}
if (ignoreGeneratedIndex) {
subscriberMethods = findUsingReflection(subscriberClass);
} else {
subscriberMethods = findUsingInfo(subscriberClass);
}
if (subscriberMethods.isEmpty()) {
throw new EventBusException("Subscriber " + subscriberClass
+ " and its super classes have no public methods with the @Subscribe annotation");
} else {
METHOD_CACHE.put(subscriberClass, subscriberMethods);
return subscriberMethods;
}
}
private List<SubscriberMethod> findUsingInfo(Class<?> subscriberClass) {
FindState findState = prepareFindState();
findState.initForSubscriber(subscriberClass);
while (findState.clazz != null) {
findState.subscriberInfo = getSubscriberInfo(findState);
if (findState.subscriberInfo != null) {
SubscriberMethod[] array = findState.subscriberInfo.getSubscriberMethods();
for (SubscriberMethod subscriberMethod : array) {
if (findState.checkAdd(subscriberMethod.method, subscriberMethod.eventType)) {
findState.subscriberMethods.add(subscriberMethod);
}
}
} else {
findUsingReflectionInSingleClass(findState);
}
findState.moveToSuperclass();
}
return getMethodsAndRelease(findState);
}
private List<SubscriberMethod> getMethodsAndRelease(FindState findState) {
List<SubscriberMethod> subscriberMethods = new ArrayList<>(findState.subscriberMethods);
findState.recycle();
synchronized (FIND_STATE_POOL) {
for (int i = 0; i < POOL_SIZE; i++) {
if (FIND_STATE_POOL[i] == null) {
FIND_STATE_POOL[i] = findState;
break;
}
}
}
return subscriberMethods;
}
private FindState prepareFindState() {
synchronized (FIND_STATE_POOL) {
for (int i = 0; i < POOL_SIZE; i++) {
FindState state = FIND_STATE_POOL[i];
if (state != null) {
FIND_STATE_POOL[i] = null;
return state;
}
}
}
return new FindState();
}
private SubscriberInfo getSubscriberInfo(FindState findState) {
if (findState.subscriberInfo != null && findState.subscriberInfo.getSuperSubscriberInfo() != null) {
SubscriberInfo superclassInfo = findState.subscriberInfo.getSuperSubscriberInfo();
if (findState.clazz == superclassInfo.getSubscriberClass()) {
return superclassInfo;
}
}
if (subscriberInfoIndexes != null) {
for (SubscriberInfoIndex index : subscriberInfoIndexes) {
SubscriberInfo info = index.getSubscriberInfo(findState.clazz);
if (info != null) {
return info;
}
}
}
return null;
}
private List<SubscriberMethod> findUsingReflection(Class<?> subscriberClass) {
FindState findState = prepareFindState();
findState.initForSubscriber(subscriberClass);
while (findState.clazz != null) {
findUsingReflectionInSingleClass(findState);
findState.moveToSuperclass();
}
return getMethodsAndRelease(findState);
}
private void findUsingReflectionInSingleClass(FindState findState) {
Method[] methods;
try {
// This is faster than getMethods, especially when subscribers are fat classes like Activities
methods = findState.clazz.getDeclaredMethods();
} catch (Throwable th) {
// Workaround for java.lang.NoClassDefFoundError, see https://github.com/greenrobot/EventBus/issues/149
try {
methods = findState.clazz.getMethods();
} catch (LinkageError error) { // super class of NoClassDefFoundError to be a bit more broad...
String msg = "Could not inspect methods of " + findState.clazz.getName() +
". Please consider using EventBus annotation processor to avoid reflection.";
throwLinkageError(error, msg);
return;
}
findState.skipSuperClasses = true;
}
for (Method method : methods) {
int modifiers = method.getModifiers();
if ((modifiers & Modifier.PUBLIC) != 0 && (modifiers & MODIFIERS_IGNORE) == 0) {
Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 1) {
Subscribe subscribeAnnotation = method.getAnnotation(Subscribe.class);
if (subscribeAnnotation != null) {
Class<?> eventType = parameterTypes[0];
if (findState.checkAdd(method, eventType)) {
ThreadMode threadMode = subscribeAnnotation.threadMode();
findState.subscriberMethods.add(new SubscriberMethod(method, eventType, threadMode,
subscribeAnnotation.priority(), subscribeAnnotation.sticky()));
}
}
} else if (strictMethodVerification && method.isAnnotationPresent(Subscribe.class)) {
String methodName = method.getDeclaringClass().getName() + "." + method.getName();
throw new EventBusException("@Subscribe method " + methodName +
"must have exactly 1 parameter but has " + parameterTypes.length);
}
} else if (strictMethodVerification && method.isAnnotationPresent(Subscribe.class)) {
String methodName = method.getDeclaringClass().getName() + "." + method.getName();
throw new EventBusException(methodName +
" is a illegal @Subscribe method: must be public, non-static, and non-abstract");
}
}
}
@TargetApi(19)
private void throwLinkageError(LinkageError error, String msg) {
try {
error = new LinkageError(msg, error); // Wrapping only works with Java 7 / Android API 19
} catch (Throwable ex) {
Logger.Default.get().log(Level.SEVERE, msg); // Can not wrap, log additional info
}
throw error;
}
static void clearCaches() {
METHOD_CACHE.clear();
}
static class FindState {
final List<SubscriberMethod> subscriberMethods = new ArrayList<>();
final Map<Class, Object> anyMethodByEventType = new HashMap<>();
final Map<String, Class> subscriberClassByMethodKey = new HashMap<>();
final StringBuilder methodKeyBuilder = new StringBuilder(128);
Class<?> subscriberClass;
Class<?> clazz;
boolean skipSuperClasses;
SubscriberInfo subscriberInfo;
void initForSubscriber(Class<?> subscriberClass) {
this.subscriberClass = clazz = subscriberClass;
skipSuperClasses = false;
subscriberInfo = null;
}
void recycle() {
subscriberMethods.clear();
anyMethodByEventType.clear();
subscriberClassByMethodKey.clear();
methodKeyBuilder.setLength(0);
subscriberClass = null;
clazz = null;
skipSuperClasses = false;
subscriberInfo = null;
}
boolean checkAdd(Method method, Class<?> eventType) {
// 2 level check: 1st level with event type only (fast), 2nd level with complete signature when required.
// Usually a subscriber doesn't have methods listening to the same event type.
Object existing = anyMethodByEventType.put(eventType, method);
if (existing == null) {
return true;
} else {
if (existing instanceof Method) {
if (!checkAddWithMethodSignature((Method) existing, eventType)) {
// Paranoia check
throw new IllegalStateException();
}
// Put any non-Method object to "consume" the existing Method
anyMethodByEventType.put(eventType, this);
}
return checkAddWithMethodSignature(method, eventType);
}
}
private boolean checkAddWithMethodSignature(Method method, Class<?> eventType) {
methodKeyBuilder.setLength(0);
methodKeyBuilder.append(method.getName());
methodKeyBuilder.append('>').append(eventType.getName());
String methodKey = methodKeyBuilder.toString();
Class<?> methodClass = method.getDeclaringClass();
Class<?> methodClassOld = subscriberClassByMethodKey.put(methodKey, methodClass);
if (methodClassOld == null || methodClassOld.isAssignableFrom(methodClass)) {
// Only add if not already found in a sub class
return true;
} else {
// Revert the put, old class is further down the class hierarchy
subscriberClassByMethodKey.put(methodKey, methodClassOld);
return false;
}
}
void moveToSuperclass() {
if (skipSuperClasses) {
clazz = null;
} else {
clazz = clazz.getSuperclass();
String clazzName = clazz.getName();
// Skip system classes, this degrades performance.
// Also we might avoid some ClassNotFoundException (see FAQ for background).
if (clazzName.startsWith("java.") || clazzName.startsWith("javax.") ||
clazzName.startsWith("android.") || clazzName.startsWith("androidx.")) {
clazz = null;
}
}
}
}
}
|
EventBus/src/org/greenrobot/eventbus/SubscriberMethodFinder.java
|
/*
* Copyright (C) 2012-2016 Markus Junginger, greenrobot (http://greenrobot.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.greenrobot.eventbus;
import org.greenrobot.eventbus.meta.SubscriberInfo;
import org.greenrobot.eventbus.meta.SubscriberInfoIndex;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
class SubscriberMethodFinder {
/*
* In newer class files, compilers may add methods. Those are called bridge or synthetic methods.
* EventBus must ignore both. There modifiers are not public but defined in the Java class file format:
* http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.6-200-A.1
*/
private static final int BRIDGE = 0x40;
private static final int SYNTHETIC = 0x1000;
private static final int MODIFIERS_IGNORE = Modifier.ABSTRACT | Modifier.STATIC | BRIDGE | SYNTHETIC;
private static final Map<Class<?>, List<SubscriberMethod>> METHOD_CACHE = new ConcurrentHashMap<>();
private List<SubscriberInfoIndex> subscriberInfoIndexes;
private final boolean strictMethodVerification;
private final boolean ignoreGeneratedIndex;
private static final int POOL_SIZE = 4;
private static final FindState[] FIND_STATE_POOL = new FindState[POOL_SIZE];
SubscriberMethodFinder(List<SubscriberInfoIndex> subscriberInfoIndexes, boolean strictMethodVerification,
boolean ignoreGeneratedIndex) {
this.subscriberInfoIndexes = subscriberInfoIndexes;
this.strictMethodVerification = strictMethodVerification;
this.ignoreGeneratedIndex = ignoreGeneratedIndex;
}
List<SubscriberMethod> findSubscriberMethods(Class<?> subscriberClass) {
List<SubscriberMethod> subscriberMethods = METHOD_CACHE.get(subscriberClass);
if (subscriberMethods != null) {
return subscriberMethods;
}
if (ignoreGeneratedIndex) {
subscriberMethods = findUsingReflection(subscriberClass);
} else {
subscriberMethods = findUsingInfo(subscriberClass);
}
if (subscriberMethods.isEmpty()) {
throw new EventBusException("Subscriber " + subscriberClass
+ " and its super classes have no public methods with the @Subscribe annotation");
} else {
METHOD_CACHE.put(subscriberClass, subscriberMethods);
return subscriberMethods;
}
}
private List<SubscriberMethod> findUsingInfo(Class<?> subscriberClass) {
FindState findState = prepareFindState();
findState.initForSubscriber(subscriberClass);
while (findState.clazz != null) {
findState.subscriberInfo = getSubscriberInfo(findState);
if (findState.subscriberInfo != null) {
SubscriberMethod[] array = findState.subscriberInfo.getSubscriberMethods();
for (SubscriberMethod subscriberMethod : array) {
if (findState.checkAdd(subscriberMethod.method, subscriberMethod.eventType)) {
findState.subscriberMethods.add(subscriberMethod);
}
}
} else {
findUsingReflectionInSingleClass(findState);
}
findState.moveToSuperclass();
}
return getMethodsAndRelease(findState);
}
private List<SubscriberMethod> getMethodsAndRelease(FindState findState) {
List<SubscriberMethod> subscriberMethods = new ArrayList<>(findState.subscriberMethods);
findState.recycle();
synchronized (FIND_STATE_POOL) {
for (int i = 0; i < POOL_SIZE; i++) {
if (FIND_STATE_POOL[i] == null) {
FIND_STATE_POOL[i] = findState;
break;
}
}
}
return subscriberMethods;
}
private FindState prepareFindState() {
synchronized (FIND_STATE_POOL) {
for (int i = 0; i < POOL_SIZE; i++) {
FindState state = FIND_STATE_POOL[i];
if (state != null) {
FIND_STATE_POOL[i] = null;
return state;
}
}
}
return new FindState();
}
private SubscriberInfo getSubscriberInfo(FindState findState) {
if (findState.subscriberInfo != null && findState.subscriberInfo.getSuperSubscriberInfo() != null) {
SubscriberInfo superclassInfo = findState.subscriberInfo.getSuperSubscriberInfo();
if (findState.clazz == superclassInfo.getSubscriberClass()) {
return superclassInfo;
}
}
if (subscriberInfoIndexes != null) {
for (SubscriberInfoIndex index : subscriberInfoIndexes) {
SubscriberInfo info = index.getSubscriberInfo(findState.clazz);
if (info != null) {
return info;
}
}
}
return null;
}
private List<SubscriberMethod> findUsingReflection(Class<?> subscriberClass) {
FindState findState = prepareFindState();
findState.initForSubscriber(subscriberClass);
while (findState.clazz != null) {
findUsingReflectionInSingleClass(findState);
findState.moveToSuperclass();
}
return getMethodsAndRelease(findState);
}
private void findUsingReflectionInSingleClass(FindState findState) {
Method[] methods;
try {
// This is faster than getMethods, especially when subscribers are fat classes like Activities
methods = findState.clazz.getDeclaredMethods();
} catch (Throwable th) {
// Workaround for java.lang.NoClassDefFoundError, see https://github.com/greenrobot/EventBus/issues/149
methods = findState.clazz.getMethods();
findState.skipSuperClasses = true;
}
for (Method method : methods) {
int modifiers = method.getModifiers();
if ((modifiers & Modifier.PUBLIC) != 0 && (modifiers & MODIFIERS_IGNORE) == 0) {
Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 1) {
Subscribe subscribeAnnotation = method.getAnnotation(Subscribe.class);
if (subscribeAnnotation != null) {
Class<?> eventType = parameterTypes[0];
if (findState.checkAdd(method, eventType)) {
ThreadMode threadMode = subscribeAnnotation.threadMode();
findState.subscriberMethods.add(new SubscriberMethod(method, eventType, threadMode,
subscribeAnnotation.priority(), subscribeAnnotation.sticky()));
}
}
} else if (strictMethodVerification && method.isAnnotationPresent(Subscribe.class)) {
String methodName = method.getDeclaringClass().getName() + "." + method.getName();
throw new EventBusException("@Subscribe method " + methodName +
"must have exactly 1 parameter but has " + parameterTypes.length);
}
} else if (strictMethodVerification && method.isAnnotationPresent(Subscribe.class)) {
String methodName = method.getDeclaringClass().getName() + "." + method.getName();
throw new EventBusException(methodName +
" is a illegal @Subscribe method: must be public, non-static, and non-abstract");
}
}
}
static void clearCaches() {
METHOD_CACHE.clear();
}
static class FindState {
final List<SubscriberMethod> subscriberMethods = new ArrayList<>();
final Map<Class, Object> anyMethodByEventType = new HashMap<>();
final Map<String, Class> subscriberClassByMethodKey = new HashMap<>();
final StringBuilder methodKeyBuilder = new StringBuilder(128);
Class<?> subscriberClass;
Class<?> clazz;
boolean skipSuperClasses;
SubscriberInfo subscriberInfo;
void initForSubscriber(Class<?> subscriberClass) {
this.subscriberClass = clazz = subscriberClass;
skipSuperClasses = false;
subscriberInfo = null;
}
void recycle() {
subscriberMethods.clear();
anyMethodByEventType.clear();
subscriberClassByMethodKey.clear();
methodKeyBuilder.setLength(0);
subscriberClass = null;
clazz = null;
skipSuperClasses = false;
subscriberInfo = null;
}
boolean checkAdd(Method method, Class<?> eventType) {
// 2 level check: 1st level with event type only (fast), 2nd level with complete signature when required.
// Usually a subscriber doesn't have methods listening to the same event type.
Object existing = anyMethodByEventType.put(eventType, method);
if (existing == null) {
return true;
} else {
if (existing instanceof Method) {
if (!checkAddWithMethodSignature((Method) existing, eventType)) {
// Paranoia check
throw new IllegalStateException();
}
// Put any non-Method object to "consume" the existing Method
anyMethodByEventType.put(eventType, this);
}
return checkAddWithMethodSignature(method, eventType);
}
}
private boolean checkAddWithMethodSignature(Method method, Class<?> eventType) {
methodKeyBuilder.setLength(0);
methodKeyBuilder.append(method.getName());
methodKeyBuilder.append('>').append(eventType.getName());
String methodKey = methodKeyBuilder.toString();
Class<?> methodClass = method.getDeclaringClass();
Class<?> methodClassOld = subscriberClassByMethodKey.put(methodKey, methodClass);
if (methodClassOld == null || methodClassOld.isAssignableFrom(methodClass)) {
// Only add if not already found in a sub class
return true;
} else {
// Revert the put, old class is further down the class hierarchy
subscriberClassByMethodKey.put(methodKey, methodClassOld);
return false;
}
}
void moveToSuperclass() {
if (skipSuperClasses) {
clazz = null;
} else {
clazz = clazz.getSuperclass();
String clazzName = clazz.getName();
// Skip system classes, this degrades performance.
// Also we might avoid some ClassNotFoundException (see FAQ for background).
if (clazzName.startsWith("java.") || clazzName.startsWith("javax.") ||
clazzName.startsWith("android.") || clazzName.startsWith("androidx.")) {
clazz = null;
}
}
}
}
}
|
Provide additional info if Class.getMethods() fails
|
EventBus/src/org/greenrobot/eventbus/SubscriberMethodFinder.java
|
Provide additional info if Class.getMethods() fails
|
|
Java
|
apache-2.0
|
d8d9087877c01f1786271726a541fb3eeda7f939
| 0
|
chamikaramj/beam,tgroh/beam,rangadi/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,amarouni/incubator-beam,RyanSkraba/beam,lukecwik/incubator-beam,charlesccychen/beam,lukecwik/incubator-beam,yk5/beam,sammcveety/incubator-beam,jbonofre/incubator-beam,staslev/beam,manuzhang/incubator-beam,wtanaka/beam,apache/beam,robertwb/incubator-beam,chamikaramj/beam,rangadi/beam,markflyhigh/incubator-beam,amarouni/incubator-beam,lukecwik/incubator-beam,charlesccychen/beam,manuzhang/beam,wangyum/beam,charlesccychen/beam,apache/beam,robertwb/incubator-beam,tgroh/beam,wangyum/beam,iemejia/incubator-beam,rangadi/beam,RyanSkraba/beam,tgroh/incubator-beam,markflyhigh/incubator-beam,chamikaramj/beam,rangadi/beam,markflyhigh/incubator-beam,apache/beam,chamikaramj/beam,rangadi/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,eljefe6a/incubator-beam,apache/beam,lukecwik/incubator-beam,apache/beam,RyanSkraba/beam,jbonofre/beam,apache/beam,jbonofre/beam,apache/beam,markflyhigh/incubator-beam,peihe/incubator-beam,chamikaramj/beam,manuzhang/incubator-beam,staslev/beam,wtanaka/beam,manuzhang/beam,rangadi/beam,tgroh/beam,chamikaramj/beam,charlesccychen/beam,sammcveety/incubator-beam,tgroh/incubator-beam,robertwb/incubator-beam,charlesccychen/beam,chamikaramj/beam,apache/beam,sammcveety/incubator-beam,robertwb/incubator-beam,peihe/incubator-beam,lukecwik/incubator-beam,eljefe6a/incubator-beam,RyanSkraba/beam,apache/beam,rangadi/beam,jbonofre/beam,staslev/incubator-beam,wtanaka/beam,RyanSkraba/beam,jbonofre/incubator-beam,markflyhigh/incubator-beam,chamikaramj/beam,peihe/incubator-beam,staslev/incubator-beam,mxm/incubator-beam,charlesccychen/beam,iemejia/incubator-beam,chamikaramj/beam,wangyum/beam,rangadi/beam,lukecwik/incubator-beam,yk5/beam,markflyhigh/incubator-beam,apache/beam,charlesccychen/incubator-beam,RyanSkraba/beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam,charlesccychen/incubator-beam,charlesccychen/beam,RyanSkraba/beam,tgroh/beam,lukecwik/incubator-beam,manuzhang/beam,jbonofre/beam,rangadi/incubator-beam,wangyum/beam,rangadi/beam,charlesccychen/incubator-beam,yk5/beam,markflyhigh/incubator-beam,lukecwik/incubator-beam,eljefe6a/incubator-beam,mxm/incubator-beam,staslev/beam
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.direct;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems;
import org.apache.beam.runners.core.construction.PTransformMatchers;
import org.apache.beam.runners.core.construction.PTransformTranslation;
import org.apache.beam.runners.core.construction.SplittableParDo;
import org.apache.beam.runners.direct.DirectRunner.DirectPipelineResult;
import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.DirectTestStreamFactory;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.PipelineRunner;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.metrics.MetricResults;
import org.apache.beam.sdk.metrics.MetricsEnvironment;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.PTransformOverride;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.ParDo.MultiOutput;
import org.apache.beam.sdk.util.UserCodeException;
import org.apache.beam.sdk.values.PCollection;
import org.joda.time.Duration;
/**
* A {@link PipelineRunner} that executes a {@link Pipeline} within the process that constructed the
* {@link Pipeline}.
*
* <p>The {@link DirectRunner} is suitable for running a {@link Pipeline} on small scale, example,
* and test data, and should be used for ensuring that processing logic is correct. It also
* is appropriate for executing unit tests and performs additional work to ensure that behavior
* contained within a {@link Pipeline} does not break assumptions within the Beam model, to improve
* the ability to execute a {@link Pipeline} at scale on a distributed backend.
*/
public class DirectRunner extends PipelineRunner<DirectPipelineResult> {
enum Enforcement {
ENCODABILITY {
@Override
public boolean appliesTo(PCollection<?> collection, DirectGraph graph) {
return true;
}
},
IMMUTABILITY {
@Override
public boolean appliesTo(PCollection<?> collection, DirectGraph graph) {
return CONTAINS_UDF.contains(graph.getProducer(collection).getTransform().getClass());
}
};
/**
* The set of {@link PTransform PTransforms} that execute a UDF. Useful for some enforcements.
*/
private static final Set<Class<? extends PTransform>> CONTAINS_UDF =
ImmutableSet.of(
Read.Bounded.class, Read.Unbounded.class, ParDo.SingleOutput.class, MultiOutput.class);
public abstract boolean appliesTo(PCollection<?> collection, DirectGraph graph);
////////////////////////////////////////////////////////////////////////////////////////////////
// Utilities for creating enforcements
static Set<Enforcement> enabled(DirectOptions options) {
EnumSet<Enforcement> enabled = EnumSet.noneOf(Enforcement.class);
if (options.isEnforceEncodability()) {
enabled.add(ENCODABILITY);
}
if (options.isEnforceImmutability()) {
enabled.add(IMMUTABILITY);
}
return Collections.unmodifiableSet(enabled);
}
static BundleFactory bundleFactoryFor(
Set<Enforcement> enforcements, DirectGraph graph) {
BundleFactory bundleFactory =
enforcements.contains(Enforcement.ENCODABILITY)
? CloningBundleFactory.create()
: ImmutableListBundleFactory.create();
if (enforcements.contains(Enforcement.IMMUTABILITY)) {
bundleFactory = ImmutabilityCheckingBundleFactory.create(bundleFactory, graph);
}
return bundleFactory;
}
@SuppressWarnings("rawtypes")
private static Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>>
defaultModelEnforcements(Set<Enforcement> enabledEnforcements) {
ImmutableMap.Builder<Class<? extends PTransform>, Collection<ModelEnforcementFactory>>
enforcements = ImmutableMap.builder();
ImmutableList.Builder<ModelEnforcementFactory> enabledParDoEnforcements =
ImmutableList.builder();
if (enabledEnforcements.contains(Enforcement.IMMUTABILITY)) {
enabledParDoEnforcements.add(ImmutabilityEnforcementFactory.create());
}
Collection<ModelEnforcementFactory> parDoEnforcements = enabledParDoEnforcements.build();
enforcements.put(ParDo.SingleOutput.class, parDoEnforcements);
enforcements.put(MultiOutput.class, parDoEnforcements);
return enforcements.build();
}
}
////////////////////////////////////////////////////////////////////////////////////////////////
private final DirectOptions options;
private final Set<Enforcement> enabledEnforcements;
private Supplier<Clock> clockSupplier = new NanosOffsetClockSupplier();
/**
* Construct a {@link DirectRunner} from the provided options.
*/
public static DirectRunner fromOptions(PipelineOptions options) {
return new DirectRunner(options.as(DirectOptions.class));
}
private DirectRunner(DirectOptions options) {
this.options = options;
this.enabledEnforcements = Enforcement.enabled(options);
}
/**
* Returns the {@link PipelineOptions} used to create this {@link DirectRunner}.
*/
public DirectOptions getPipelineOptions() {
return options;
}
Supplier<Clock> getClockSupplier() {
return clockSupplier;
}
void setClockSupplier(Supplier<Clock> supplier) {
this.clockSupplier = supplier;
}
@Override
public DirectPipelineResult run(Pipeline pipeline) {
pipeline.replaceAll(defaultTransformOverrides());
MetricsEnvironment.setMetricsSupported(true);
DirectGraphVisitor graphVisitor = new DirectGraphVisitor();
pipeline.traverseTopologically(graphVisitor);
@SuppressWarnings("rawtypes")
KeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create();
pipeline.traverseTopologically(keyedPValueVisitor);
DisplayDataValidator.validatePipeline(pipeline);
DisplayDataValidator.validateOptions(getPipelineOptions());
DirectGraph graph = graphVisitor.getGraph();
EvaluationContext context =
EvaluationContext.create(
getPipelineOptions(),
clockSupplier.get(),
Enforcement.bundleFactoryFor(enabledEnforcements, graph),
graph,
keyedPValueVisitor.getKeyedPValues());
RootProviderRegistry rootInputProvider = RootProviderRegistry.defaultRegistry(context);
TransformEvaluatorRegistry registry = TransformEvaluatorRegistry.defaultRegistry(context);
PipelineExecutor executor =
ExecutorServiceParallelExecutor.create(
options.getTargetParallelism(), graph,
rootInputProvider,
registry,
Enforcement.defaultModelEnforcements(enabledEnforcements),
context);
executor.start(graph.getRootTransforms());
DirectPipelineResult result = new DirectPipelineResult(executor, context);
if (options.isBlockOnRun()) {
try {
result.waitUntilFinish();
} catch (UserCodeException userException) {
throw new PipelineExecutionException(userException.getCause());
} catch (Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t);
}
}
return result;
}
/**
* The default set of transform overrides to use in the {@link DirectRunner}.
*
* <p>The order in which overrides is applied is important, as some overrides are expanded into a
* composite. If the composite contains {@link PTransform PTransforms} which are also overridden,
* these PTransforms must occur later in the iteration order. {@link ImmutableMap} has an
* iteration order based on the order at which elements are added to it.
*/
@SuppressWarnings("rawtypes")
@VisibleForTesting
List<PTransformOverride> defaultTransformOverrides() {
return ImmutableList.<PTransformOverride>builder()
.add(
PTransformOverride.of(
PTransformMatchers.writeWithRunnerDeterminedSharding(),
new WriteWithShardingFactory())) /* Uses a view internally. */
.add(
PTransformOverride.of(
PTransformMatchers.urnEqualTo(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN),
new ViewOverrideFactory())) /* Uses pardos and GBKs */
.add(
PTransformOverride.of(
PTransformMatchers.urnEqualTo(PTransformTranslation.TEST_STREAM_TRANSFORM_URN),
new DirectTestStreamFactory(this))) /* primitive */
// SplittableParMultiDo is implemented in terms of nonsplittable simple ParDos and extra
// primitives
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDo(), new ParDoMultiOverrideFactory()))
// state and timer pardos are implemented in terms of simple ParDos and extra primitives
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDo(), new ParDoMultiOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.urnEqualTo(
SplittableParDo.SPLITTABLE_PROCESS_KEYED_ELEMENTS_URN),
new SplittableParDoViaKeyedWorkItems.OverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.urnEqualTo(SplittableParDo.SPLITTABLE_GBKIKWI_URN),
new DirectGBKIntoKeyedWorkItemsOverrideFactory())) /* Returns a GBKO */
.add(
PTransformOverride.of(
PTransformMatchers.urnEqualTo(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN),
new DirectGroupByKeyOverrideFactory())) /* returns two chained primitives. */
.build();
}
/**
* The result of running a {@link Pipeline} with the {@link DirectRunner}.
*/
public static class DirectPipelineResult implements PipelineResult {
private final PipelineExecutor executor;
private final EvaluationContext evaluationContext;
private State state;
private DirectPipelineResult(
PipelineExecutor executor,
EvaluationContext evaluationContext) {
this.executor = executor;
this.evaluationContext = evaluationContext;
// Only ever constructed after the executor has started.
this.state = State.RUNNING;
}
@Override
public State getState() {
return state;
}
@Override
public MetricResults metrics() {
return evaluationContext.getMetrics();
}
/**
* {@inheritDoc}.
*
* <p>If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow
* the original {@link Exception}. Future calls to {@link #getState()} will return {@link
* org.apache.beam.sdk.PipelineResult.State#FAILED}.
*/
@Override
public State waitUntilFinish() {
return waitUntilFinish(Duration.ZERO);
}
@Override
public State cancel() {
this.state = executor.getPipelineState();
if (!this.state.isTerminal()) {
executor.stop();
this.state = executor.getPipelineState();
}
return executor.getPipelineState();
}
/**
* {@inheritDoc}.
*
* <p>If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow
* the original {@link Exception}. Future calls to {@link #getState()} will return {@link
* org.apache.beam.sdk.PipelineResult.State#FAILED}.
*/
@Override
public State waitUntilFinish(Duration duration) {
State startState = this.state;
if (!startState.isTerminal()) {
try {
state = executor.waitUntilFinish(duration);
} catch (UserCodeException uce) {
// Emulates the behavior of Pipeline#run(), where a stack trace caused by a
// UserCodeException is truncated and replaced with the stack starting at the call to
// waitToFinish
throw new Pipeline.PipelineExecutionException(uce.getCause());
} catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new RuntimeException(e);
}
}
return this.state;
}
}
/**
* A {@link Supplier} that creates a {@link NanosOffsetClock}.
*/
private static class NanosOffsetClockSupplier implements Supplier<Clock> {
@Override
public Clock get() {
return NanosOffsetClock.create();
}
}
}
|
runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.direct;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems;
import org.apache.beam.runners.core.construction.PTransformMatchers;
import org.apache.beam.runners.core.construction.SplittableParDo;
import org.apache.beam.runners.direct.DirectRunner.DirectPipelineResult;
import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.DirectTestStreamFactory;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.PipelineRunner;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.metrics.MetricResults;
import org.apache.beam.sdk.metrics.MetricsEnvironment;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.PTransformOverride;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.ParDo.MultiOutput;
import org.apache.beam.sdk.transforms.View.CreatePCollectionView;
import org.apache.beam.sdk.util.UserCodeException;
import org.apache.beam.sdk.values.PCollection;
import org.joda.time.Duration;
/**
* A {@link PipelineRunner} that executes a {@link Pipeline} within the process that constructed the
* {@link Pipeline}.
*
* <p>The {@link DirectRunner} is suitable for running a {@link Pipeline} on small scale, example,
* and test data, and should be used for ensuring that processing logic is correct. It also
* is appropriate for executing unit tests and performs additional work to ensure that behavior
* contained within a {@link Pipeline} does not break assumptions within the Beam model, to improve
* the ability to execute a {@link Pipeline} at scale on a distributed backend.
*/
public class DirectRunner extends PipelineRunner<DirectPipelineResult> {
enum Enforcement {
ENCODABILITY {
@Override
public boolean appliesTo(PCollection<?> collection, DirectGraph graph) {
return true;
}
},
IMMUTABILITY {
@Override
public boolean appliesTo(PCollection<?> collection, DirectGraph graph) {
return CONTAINS_UDF.contains(graph.getProducer(collection).getTransform().getClass());
}
};
/**
* The set of {@link PTransform PTransforms} that execute a UDF. Useful for some enforcements.
*/
private static final Set<Class<? extends PTransform>> CONTAINS_UDF =
ImmutableSet.of(
Read.Bounded.class, Read.Unbounded.class, ParDo.SingleOutput.class, MultiOutput.class);
public abstract boolean appliesTo(PCollection<?> collection, DirectGraph graph);
////////////////////////////////////////////////////////////////////////////////////////////////
// Utilities for creating enforcements
static Set<Enforcement> enabled(DirectOptions options) {
EnumSet<Enforcement> enabled = EnumSet.noneOf(Enforcement.class);
if (options.isEnforceEncodability()) {
enabled.add(ENCODABILITY);
}
if (options.isEnforceImmutability()) {
enabled.add(IMMUTABILITY);
}
return Collections.unmodifiableSet(enabled);
}
static BundleFactory bundleFactoryFor(
Set<Enforcement> enforcements, DirectGraph graph) {
BundleFactory bundleFactory =
enforcements.contains(Enforcement.ENCODABILITY)
? CloningBundleFactory.create()
: ImmutableListBundleFactory.create();
if (enforcements.contains(Enforcement.IMMUTABILITY)) {
bundleFactory = ImmutabilityCheckingBundleFactory.create(bundleFactory, graph);
}
return bundleFactory;
}
@SuppressWarnings("rawtypes")
private static Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>>
defaultModelEnforcements(Set<Enforcement> enabledEnforcements) {
ImmutableMap.Builder<Class<? extends PTransform>, Collection<ModelEnforcementFactory>>
enforcements = ImmutableMap.builder();
ImmutableList.Builder<ModelEnforcementFactory> enabledParDoEnforcements =
ImmutableList.builder();
if (enabledEnforcements.contains(Enforcement.IMMUTABILITY)) {
enabledParDoEnforcements.add(ImmutabilityEnforcementFactory.create());
}
Collection<ModelEnforcementFactory> parDoEnforcements = enabledParDoEnforcements.build();
enforcements.put(ParDo.SingleOutput.class, parDoEnforcements);
enforcements.put(MultiOutput.class, parDoEnforcements);
return enforcements.build();
}
}
////////////////////////////////////////////////////////////////////////////////////////////////
private final DirectOptions options;
private final Set<Enforcement> enabledEnforcements;
private Supplier<Clock> clockSupplier = new NanosOffsetClockSupplier();
/**
* Construct a {@link DirectRunner} from the provided options.
*/
public static DirectRunner fromOptions(PipelineOptions options) {
return new DirectRunner(options.as(DirectOptions.class));
}
private DirectRunner(DirectOptions options) {
this.options = options;
this.enabledEnforcements = Enforcement.enabled(options);
}
/**
* Returns the {@link PipelineOptions} used to create this {@link DirectRunner}.
*/
public DirectOptions getPipelineOptions() {
return options;
}
Supplier<Clock> getClockSupplier() {
return clockSupplier;
}
void setClockSupplier(Supplier<Clock> supplier) {
this.clockSupplier = supplier;
}
@Override
public DirectPipelineResult run(Pipeline pipeline) {
pipeline.replaceAll(defaultTransformOverrides());
MetricsEnvironment.setMetricsSupported(true);
DirectGraphVisitor graphVisitor = new DirectGraphVisitor();
pipeline.traverseTopologically(graphVisitor);
@SuppressWarnings("rawtypes")
KeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create();
pipeline.traverseTopologically(keyedPValueVisitor);
DisplayDataValidator.validatePipeline(pipeline);
DisplayDataValidator.validateOptions(getPipelineOptions());
DirectGraph graph = graphVisitor.getGraph();
EvaluationContext context =
EvaluationContext.create(
getPipelineOptions(),
clockSupplier.get(),
Enforcement.bundleFactoryFor(enabledEnforcements, graph),
graph,
keyedPValueVisitor.getKeyedPValues());
RootProviderRegistry rootInputProvider = RootProviderRegistry.defaultRegistry(context);
TransformEvaluatorRegistry registry = TransformEvaluatorRegistry.defaultRegistry(context);
PipelineExecutor executor =
ExecutorServiceParallelExecutor.create(
options.getTargetParallelism(), graph,
rootInputProvider,
registry,
Enforcement.defaultModelEnforcements(enabledEnforcements),
context);
executor.start(graph.getRootTransforms());
DirectPipelineResult result = new DirectPipelineResult(executor, context);
if (options.isBlockOnRun()) {
try {
result.waitUntilFinish();
} catch (UserCodeException userException) {
throw new PipelineExecutionException(userException.getCause());
} catch (Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t);
}
}
return result;
}
/**
* The default set of transform overrides to use in the {@link DirectRunner}.
*
* <p>The order in which overrides is applied is important, as some overrides are expanded into a
* composite. If the composite contains {@link PTransform PTransforms} which are also overridden,
* these PTransforms must occur later in the iteration order. {@link ImmutableMap} has an
* iteration order based on the order at which elements are added to it.
*/
@SuppressWarnings("rawtypes")
@VisibleForTesting
List<PTransformOverride> defaultTransformOverrides() {
return ImmutableList.<PTransformOverride>builder()
.add(
PTransformOverride.of(
PTransformMatchers.writeWithRunnerDeterminedSharding(),
new WriteWithShardingFactory())) /* Uses a view internally. */
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(CreatePCollectionView.class),
new ViewOverrideFactory())) /* Uses pardos and GBKs */
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(TestStream.class),
new DirectTestStreamFactory(this))) /* primitive */
// SplittableParMultiDo is implemented in terms of nonsplittable simple ParDos and extra
// primitives
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDoMulti(), new ParDoMultiOverrideFactory()))
// state and timer pardos are implemented in terms of simple ParDos and extra primitives
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDoMulti(), new ParDoMultiOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(SplittableParDo.ProcessKeyedElements.class),
new SplittableParDoViaKeyedWorkItems.OverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(
SplittableParDoViaKeyedWorkItems.GBKIntoKeyedWorkItems.class),
new DirectGBKIntoKeyedWorkItemsOverrideFactory())) /* Returns a GBKO */
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(GroupByKey.class),
new DirectGroupByKeyOverrideFactory())) /* returns two chained primitives. */
.build();
}
/**
* The result of running a {@link Pipeline} with the {@link DirectRunner}.
*/
public static class DirectPipelineResult implements PipelineResult {
private final PipelineExecutor executor;
private final EvaluationContext evaluationContext;
private State state;
private DirectPipelineResult(
PipelineExecutor executor,
EvaluationContext evaluationContext) {
this.executor = executor;
this.evaluationContext = evaluationContext;
// Only ever constructed after the executor has started.
this.state = State.RUNNING;
}
@Override
public State getState() {
return state;
}
@Override
public MetricResults metrics() {
return evaluationContext.getMetrics();
}
/**
* {@inheritDoc}.
*
* <p>If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow
* the original {@link Exception}. Future calls to {@link #getState()} will return {@link
* org.apache.beam.sdk.PipelineResult.State#FAILED}.
*/
@Override
public State waitUntilFinish() {
return waitUntilFinish(Duration.ZERO);
}
@Override
public State cancel() {
this.state = executor.getPipelineState();
if (!this.state.isTerminal()) {
executor.stop();
this.state = executor.getPipelineState();
}
return executor.getPipelineState();
}
/**
* {@inheritDoc}.
*
* <p>If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow
* the original {@link Exception}. Future calls to {@link #getState()} will return {@link
* org.apache.beam.sdk.PipelineResult.State#FAILED}.
*/
@Override
public State waitUntilFinish(Duration duration) {
State startState = this.state;
if (!startState.isTerminal()) {
try {
state = executor.waitUntilFinish(duration);
} catch (UserCodeException uce) {
// Emulates the behavior of Pipeline#run(), where a stack trace caused by a
// UserCodeException is truncated and replaced with the stack starting at the call to
// waitToFinish
throw new Pipeline.PipelineExecutionException(uce.getCause());
} catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new RuntimeException(e);
}
}
return this.state;
}
}
/**
* A {@link Supplier} that creates a {@link NanosOffsetClock}.
*/
private static class NanosOffsetClockSupplier implements Supplier<Clock> {
@Override
public Clock get() {
return NanosOffsetClock.create();
}
}
}
|
DirectRunner override matchers using Runner API
|
runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java
|
DirectRunner override matchers using Runner API
|
|
Java
|
apache-2.0
|
b4dc87147904a01b8dcbd6c2846019242dd3c47d
| 0
|
asedunov/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,slisson/intellij-community,hurricup/intellij-community,izonder/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,semonte/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,joewalnes/idea-community,samthor/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,joewalnes/idea-community,adedayo/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,caot/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,semonte/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,caot/intellij-community,FHannes/intellij-community,supersven/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,amith01994/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,jexp/idea2,signed/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,hurricup/intellij-community,robovm/robovm-studio,hurricup/intellij-community,FHannes/intellij-community,hurricup/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,jexp/idea2,semonte/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,jexp/idea2,suncycheng/intellij-community,diorcety/intellij-community,joewalnes/idea-community,supersven/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,amith01994/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,semonte/intellij-community,clumsy/intellij-community,blademainer/intellij-community,asedunov/intellij-community,kool79/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,signed/intellij-community,adedayo/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ernestp/consulo,ivan-fedorov/intellij-community,signed/intellij-community,diorcety/intellij-community,ryano144/intellij-community,apixandru/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,hurricup/intellij-community,caot/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,signed/intellij-community,allotria/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,jexp/idea2,wreckJ/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,fitermay/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,petteyg/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,asedunov/intellij-community,caot/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,samthor/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,holmes/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,slisson/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,jexp/idea2,michaelgallacher/intellij-community,diorcety/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,joewalnes/idea-community,lucafavatella/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,kool79/intellij-community,blademainer/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,robovm/robovm-studio,diorcety/intellij-community,signed/intellij-community,consulo/consulo,kdwink/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,samthor/intellij-community,slisson/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,jagguli/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,ahb0327/intellij-community,semonte/intellij-community,holmes/intellij-community,da1z/intellij-community,vvv1559/intellij-community,caot/intellij-community,holmes/intellij-community,joewalnes/idea-community,adedayo/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,kdwink/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,ryano144/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,holmes/intellij-community,gnuhub/intellij-community,supersven/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,robovm/robovm-studio,blademainer/intellij-community,allotria/intellij-community,caot/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,ryano144/intellij-community,signed/intellij-community,da1z/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,xfournet/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,da1z/intellij-community,joewalnes/idea-community,SerCeMan/intellij-community,supersven/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,signed/intellij-community,supersven/intellij-community,ernestp/consulo,SerCeMan/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,da1z/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,slisson/intellij-community,retomerz/intellij-community,dslomov/intellij-community,jexp/idea2,hurricup/intellij-community,apixandru/intellij-community,petteyg/intellij-community,vladmm/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,slisson/intellij-community,jagguli/intellij-community,consulo/consulo,ol-loginov/intellij-community,fitermay/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,amith01994/intellij-community,da1z/intellij-community,caot/intellij-community,robovm/robovm-studio,xfournet/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,holmes/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,izonder/intellij-community,allotria/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,caot/intellij-community,vvv1559/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,joewalnes/idea-community,signed/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,allotria/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,semonte/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,joewalnes/idea-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,suncycheng/intellij-community,izonder/intellij-community,kool79/intellij-community,robovm/robovm-studio,blademainer/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,samthor/intellij-community,nicolargo/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,allotria/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,kool79/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,supersven/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,da1z/intellij-community,wreckJ/intellij-community,supersven/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,consulo/consulo,apixandru/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,consulo/consulo,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,semonte/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ernestp/consulo,akosyakov/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,da1z/intellij-community,ahb0327/intellij-community,allotria/intellij-community,nicolargo/intellij-community,ernestp/consulo,Distrotech/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,allotria/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,holmes/intellij-community,FHannes/intellij-community,retomerz/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,holmes/intellij-community,da1z/intellij-community,fitermay/intellij-community,signed/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,signed/intellij-community,supersven/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,holmes/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,izonder/intellij-community,tmpgit/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,izonder/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,semonte/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,signed/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,consulo/consulo,clumsy/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,jagguli/intellij-community,slisson/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,ernestp/consulo,adedayo/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,jagguli/intellij-community,dslomov/intellij-community,xfournet/intellij-community,amith01994/intellij-community,jexp/idea2,fengbaicanhe/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,fitermay/intellij-community,robovm/robovm-studio,izonder/intellij-community,robovm/robovm-studio,fnouama/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,fnouama/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,retomerz/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,ryano144/intellij-community,dslomov/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,ibinti/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,caot/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,diorcety/intellij-community,allotria/intellij-community,petteyg/intellij-community,asedunov/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,holmes/intellij-community,samthor/intellij-community,FHannes/intellij-community,slisson/intellij-community,joewalnes/idea-community,diorcety/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,jexp/idea2,ftomassetti/intellij-community,ernestp/consulo,akosyakov/intellij-community,fnouama/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community
|
package com.intellij.psi.impl.compiled;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LanguageDialect;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.impl.PsiFileEx;
import com.intellij.psi.impl.PsiManagerImpl;
import com.intellij.psi.impl.cache.RepositoryManager;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.cls.BytePointer;
import com.intellij.util.cls.ClsFormatException;
import com.intellij.util.cls.ClsUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public class ClsFileImpl extends ClsRepositoryPsiElement implements PsiJavaFile, PsiFileEx {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.compiled.ClsFileImpl");
private volatile ClsClassImpl myClass = null;
private volatile ClsPackageStatementImpl myPackageStatement = null;
private static final Key<Document> DOCUMENT_IN_MIRROR_KEY = Key.create("DOCUMENT_IN_MIRROR_KEY");
private final boolean myIsForDecompiling;
private final FileViewProvider myViewProvider;
private volatile LanguageLevel myLanguageLevel = null;
private volatile boolean myContentsUnloaded;
private static final int MAX_CLASS_FILE_MAJOR_VERSION = 50;
private ClsFileImpl(@NotNull PsiManagerImpl manager, @NotNull FileViewProvider viewProvider, boolean forDecompiling) {
super(manager, -2);
myIsForDecompiling = forDecompiling;
myViewProvider = viewProvider;
}
public ClsFileImpl(PsiManagerImpl manager, FileViewProvider viewProvider) {
this(manager, viewProvider, false);
}
public boolean isContentsLoaded() {
return myClass != null && myClass.isContentsLoaded();
}
public void unloadContent() {
myLanguageLevel = null;
if (myClass != null) {
myClass.invalidate();
myClass = null;
}
if (myPackageStatement != null) {
myPackageStatement.invalidate();
myPackageStatement = null;
}
myMirror = null;
myContentsUnloaded = true;
}
public long getRepositoryId() {
synchronized (PsiLock.LOCK) {
long id = super.getRepositoryId();
if (id == -2) {
RepositoryManager repositoryManager = getRepositoryManager();
if (repositoryManager != null) {
id = repositoryManager.getFileId(getVirtualFile());
}
else {
id = -1;
}
super.setRepositoryId(id);
}
return id;
}
}
public boolean isRepositoryIdInitialized() {
return super.getRepositoryId() != -2;
}
@NotNull
public VirtualFile getVirtualFile() {
return myViewProvider.getVirtualFile();
}
public boolean processChildren(final PsiElementProcessor<PsiFileSystemItem> processor) {
return true;
}
public PsiDirectory getParent() {
return getContainingDirectory();
}
public PsiDirectory getContainingDirectory() {
VirtualFile parentFile = getVirtualFile().getParent();
if (parentFile == null) return null;
return getManager().findDirectory(parentFile);
}
public PsiFile getContainingFile() {
if (!isValid()) throw new PsiInvalidElementAccessException(this);
return this;
}
public boolean isValid() {
if (myIsForDecompiling) return true;
VirtualFile vFile = getVirtualFile();
return vFile.isValid();
}
public String getName() {
return getVirtualFile().getName();
}
@NotNull
public PsiElement[] getChildren() {
return getClasses(); // TODO : package statement?
}
@NotNull
public PsiClass[] getClasses() {
long id = getRepositoryId();
if (myClass == null) {
if (id >= 0 && !myContentsUnloaded) {
long[] classIds = getRepositoryManager().getFileView().getClasses(id);
LOG.assertTrue(classIds.length == 1, "Wrong number of compiled classes in repository: " + classIds.length);
myClass = (ClsClassImpl)getRepositoryElementsManager().findOrCreatePsiElementById(classIds[0]);
}
else {
myClass = new ClsClassImpl(myManager, this, new ClassFileData(getVirtualFile()));
myContentsUnloaded = false;
}
}
return new PsiClass[]{myClass};
}
public PsiPackageStatement getPackageStatement() {
if (myPackageStatement == null) {
myPackageStatement = new ClsPackageStatementImpl(this);
}
return myPackageStatement.getPackageName() != null ? myPackageStatement : null;
}
@NotNull
public String getPackageName() {
PsiPackageStatement statement = getPackageStatement();
if (statement == null) {
return "";
}
else {
return statement.getPackageName();
}
}
public PsiImportList getImportList() {
return null;
}
public boolean importClass(PsiClass aClass) {
throw new UnsupportedOperationException("Cannot add imports to compiled classes");
}
@NotNull
public PsiElement[] getOnDemandImports(boolean includeImplicit, boolean checkIncludes) {
return PsiJavaCodeReferenceElement.EMPTY_ARRAY;
}
@NotNull
public PsiClass[] getSingleClassImports(boolean checkIncludes) {
return PsiClass.EMPTY_ARRAY;
}
@NotNull
public String[] getImplicitlyImportedPackages() {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
@NotNull
public PsiJavaCodeReferenceElement[] getImplicitlyImportedPackageReferences() {
return PsiJavaCodeReferenceElement.EMPTY_ARRAY;
}
public PsiJavaCodeReferenceElement findImportReferenceTo(PsiClass aClass) {
return null;
}
@NotNull
public LanguageLevel getLanguageLevel() {
//TODO: repository for language level
if (myLanguageLevel == null) {
myLanguageLevel = getLanguageLevelInner();
}
return myLanguageLevel;
}
private LanguageLevel getLanguageLevelInner() {
return getLanguageLevel(getVirtualFile(), getManager().getEffectiveLanguageLevel());
}
public static LanguageLevel getLanguageLevel(VirtualFile vFile, LanguageLevel defaultLanguageLevel) {
try {
final ClassFileData classFileData = new ClassFileData(vFile);
final BytePointer ptr = new BytePointer(classFileData.getData(), 6);
int majorVersion = ClsUtil.readU2(ptr);
if (majorVersion < MAX_CLASS_FILE_MAJOR_VERSION) {
/*check if the class file was processed by retroweaver, it has written an attribute of format
(int retroweaver build number, int original class file version, long timestamp of weaving) */
final int attributesOffset = classFileData.getOffsetOfAttributesSection();
final BytePointer attrPtr = classFileData.findAttribute(attributesOffset, "net.sourceforge.Retroweaver");
if (attrPtr != null) {
ptr.offset += 4; //skip retroweaver build number
majorVersion = ClsUtil.readU2(ptr);
}
}
return calcLanguageLevelBasedOnVersion(majorVersion);
}
catch (ClsFormatException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
return defaultLanguageLevel;
}
}
private static LanguageLevel calcLanguageLevelBasedOnVersion(final int majorVersion) {
if (majorVersion < 48) return LanguageLevel.JDK_1_3;
if (majorVersion < 49) return LanguageLevel.JDK_1_4;
return LanguageLevel.JDK_1_5;
}
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
throw new IncorrectOperationException(CAN_NOT_MODIFY_MESSAGE);
}
public void checkSetName(String name) throws IncorrectOperationException {
throw new IncorrectOperationException(CAN_NOT_MODIFY_MESSAGE);
}
public boolean isDirectory() {
return false;
}
public void appendMirrorText(final int indentLevel, final StringBuffer buffer) {
buffer.append(PsiBundle.message("psi.decompiled.text.header"));
goNextLine(indentLevel, buffer);
goNextLine(indentLevel, buffer);
final PsiPackageStatement packageStatement = getPackageStatement();
if (packageStatement != null) {
((ClsElementImpl)packageStatement).appendMirrorText(0, buffer);
goNextLine(indentLevel, buffer);
goNextLine(indentLevel, buffer);
}
PsiClass aClass = getClasses()[0];
((ClsClassImpl)aClass).appendMirrorText(0, buffer);
}
public void setMirror(TreeElement element) {
LOG.assertTrue(myMirror == null);
myMirror = element;
PsiElement mirrorFile = SourceTreeToPsiMap.treeElementToPsi(myMirror);
if (mirrorFile instanceof PsiJavaFile) {
PsiPackageStatement packageStatementMirror = ((PsiJavaFile)mirrorFile).getPackageStatement();
final PsiPackageStatement packageStatement = getPackageStatement();
if (packageStatementMirror != null && packageStatement != null) {
((ClsElementImpl)packageStatement).setMirror((TreeElement)SourceTreeToPsiMap.psiElementToTree(packageStatementMirror));
}
PsiClass[] classes = getClasses();
PsiClass[] mirrorClasses = ((PsiJavaFile)mirrorFile).getClasses();
LOG.assertTrue(classes.length == mirrorClasses.length);
if (classes.length == mirrorClasses.length) {
for (int i = 0; i < classes.length; i++) {
((ClsElementImpl)classes[i]).setMirror((TreeElement)SourceTreeToPsiMap.psiElementToTree(mirrorClasses[i]));
}
}
}
}
public String getText() {
initializeMirror();
return myMirror.getText();
}
@NotNull
public char[] textToCharArray() {
initializeMirror();
return myMirror.textToCharArray();
}
public PsiElement getNavigationElement() {
String packageName = getPackageName();
String sourceFileName = myClass.getSourceFileName();
String relativeFilePath = packageName.length() == 0 ?
sourceFileName :
packageName.replace('.', '/') + '/' + sourceFileName;
final VirtualFile vFile = getContainingFile().getVirtualFile();
ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(getProject()).getFileIndex();
final List<OrderEntry> orderEntries = projectFileIndex.getOrderEntriesForFile(vFile);
for (OrderEntry orderEntry : orderEntries) {
VirtualFile[] files = orderEntry.getFiles(OrderRootType.SOURCES);
for (VirtualFile file : files) {
VirtualFile source = file.findFileByRelativePath(relativeFilePath);
if (source != null) {
PsiFile psiSource = getManager().findFile(source);
if (psiSource instanceof PsiJavaFile) {
return psiSource;
}
}
}
}
return this;
}
private void initializeMirror() {
if (myMirror == null) {
FileDocumentManager documentManager = FileDocumentManager.getInstance();
Document document = documentManager.getDocument(getVirtualFile());
String text = document.getText();
String ext = StdFileTypes.JAVA.getDefaultExtension();
PsiClass aClass = getClasses()[0];
String fileName = aClass.getName() + "." + ext;
PsiManager manager = getManager();
PsiFile mirror = manager.getElementFactory().createFileFromText(fileName, text);
ASTNode mirrorTreeElement = SourceTreeToPsiMap.psiElementToTree(mirror);
//IMPORTANT: do not take lock too early - FileDocumentManager.getInstance().saveToString() can run write action...
synchronized (PsiLock.LOCK) {
if (myMirror == null) {
setMirror((TreeElement)mirrorTreeElement);
myMirror.putUserData(DOCUMENT_IN_MIRROR_KEY, document);
}
}
}
}
public long getModificationStamp() {
return getVirtualFile().getModificationStamp();
}
public void accept(@NotNull PsiElementVisitor visitor) {
visitor.visitJavaFile(this);
}
@NonNls
public String toString() {
return "PsiFile:" + getName();
}
public PsiFile getOriginalFile() {
return null;
}
@NotNull
public FileType getFileType() {
return StdFileTypes.CLASS;
}
@NotNull
public PsiFile[] getPsiRoots() {
return new PsiFile[]{this};
}
@NotNull
public FileViewProvider getViewProvider() {
return myViewProvider;
}
public void subtreeChanged() {
}
public static String decompile(PsiManager manager, VirtualFile file) {
final FileViewProvider provider = ((PsiManagerImpl)manager).getFileManager().findCachedViewProvider(file);
ClsFileImpl psiFile = null;
if (provider != null) {
psiFile = (ClsFileImpl)provider.getPsi(provider.getBaseLanguage());
}
if (psiFile == null) {
psiFile = new ClsFileImpl((PsiManagerImpl)manager, new SingleRootFileViewProvider(manager, file), true);
}
StringBuffer buffer = new StringBuffer();
psiFile.appendMirrorText(0, buffer);
return buffer.toString();
}
@Nullable
public LanguageDialect getLanguageDialect() {
return null;
}
}
|
source/com/intellij/psi/impl/compiled/ClsFileImpl.java
|
package com.intellij.psi.impl.compiled;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LanguageDialect;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.impl.PsiFileEx;
import com.intellij.psi.impl.PsiManagerImpl;
import com.intellij.psi.impl.cache.RepositoryManager;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.cls.BytePointer;
import com.intellij.util.cls.ClsFormatException;
import com.intellij.util.cls.ClsUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public class ClsFileImpl extends ClsRepositoryPsiElement implements PsiJavaFile, PsiFileEx {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.compiled.ClsFileImpl");
private ClsClassImpl myClass = null;
private ClsPackageStatementImpl myPackageStatement = null;
private static final Key<Document> DOCUMENT_IN_MIRROR_KEY = Key.create("DOCUMENT_IN_MIRROR_KEY");
private final boolean myIsForDecompiling;
private final FileViewProvider myViewProvider;
private LanguageLevel myLanguageLevel = null;
private boolean myContentsUnloaded;
private static final int MAX_CLASS_FILE_MAJOR_VERSION = 50;
private ClsFileImpl(@NotNull PsiManagerImpl manager, @NotNull FileViewProvider viewProvider, boolean forDecompiling) {
super(manager, -2);
myIsForDecompiling = forDecompiling;
myViewProvider = viewProvider;
}
public ClsFileImpl(PsiManagerImpl manager, FileViewProvider viewProvider) {
this(manager, viewProvider, false);
}
public boolean isContentsLoaded() {
return myClass != null && myClass.isContentsLoaded();
}
public void unloadContent() {
myLanguageLevel = null;
if (myClass != null) {
myClass.invalidate();
myClass = null;
}
if (myPackageStatement != null) {
myPackageStatement.invalidate();
myPackageStatement = null;
}
myMirror = null;
myContentsUnloaded = true;
}
public long getRepositoryId() {
synchronized (PsiLock.LOCK) {
long id = super.getRepositoryId();
if (id == -2) {
RepositoryManager repositoryManager = getRepositoryManager();
if (repositoryManager != null) {
id = repositoryManager.getFileId(getVirtualFile());
}
else {
id = -1;
}
super.setRepositoryId(id);
}
return id;
}
}
public boolean isRepositoryIdInitialized() {
return super.getRepositoryId() != -2;
}
@NotNull
public VirtualFile getVirtualFile() {
return myViewProvider.getVirtualFile();
}
public boolean processChildren(final PsiElementProcessor<PsiFileSystemItem> processor) {
return true;
}
public PsiDirectory getParent() {
return getContainingDirectory();
}
public PsiDirectory getContainingDirectory() {
VirtualFile parentFile = getVirtualFile().getParent();
if (parentFile == null) return null;
return getManager().findDirectory(parentFile);
}
public PsiFile getContainingFile() {
if (!isValid()) throw new PsiInvalidElementAccessException(this);
return this;
}
public boolean isValid() {
if (myIsForDecompiling) return true;
VirtualFile vFile = getVirtualFile();
return vFile.isValid();
}
public String getName() {
return getVirtualFile().getName();
}
@NotNull
public PsiElement[] getChildren() {
return getClasses(); // TODO : package statement?
}
@NotNull
public PsiClass[] getClasses() {
long id = getRepositoryId();
if (myClass == null) {
if (id >= 0 && !myContentsUnloaded) {
long[] classIds = getRepositoryManager().getFileView().getClasses(id);
LOG.assertTrue(classIds.length == 1, "Wrong number of compiled classes in repository: " + classIds.length);
myClass = (ClsClassImpl)getRepositoryElementsManager().findOrCreatePsiElementById(classIds[0]);
}
else {
myClass = new ClsClassImpl(myManager, this, new ClassFileData(getVirtualFile()));
myContentsUnloaded = false;
}
}
return new PsiClass[]{myClass};
}
public PsiPackageStatement getPackageStatement() {
if (myPackageStatement == null) {
myPackageStatement = new ClsPackageStatementImpl(this);
}
return myPackageStatement.getPackageName() != null ? myPackageStatement : null;
}
@NotNull
public String getPackageName() {
PsiPackageStatement statement = getPackageStatement();
if (statement == null) {
return "";
}
else {
return statement.getPackageName();
}
}
public PsiImportList getImportList() {
return null;
}
public boolean importClass(PsiClass aClass) {
throw new UnsupportedOperationException("Cannot add imports to compiled classes");
}
@NotNull
public PsiElement[] getOnDemandImports(boolean includeImplicit, boolean checkIncludes) {
return PsiJavaCodeReferenceElement.EMPTY_ARRAY;
}
@NotNull
public PsiClass[] getSingleClassImports(boolean checkIncludes) {
return PsiClass.EMPTY_ARRAY;
}
@NotNull
public String[] getImplicitlyImportedPackages() {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
@NotNull
public PsiJavaCodeReferenceElement[] getImplicitlyImportedPackageReferences() {
return PsiJavaCodeReferenceElement.EMPTY_ARRAY;
}
public PsiJavaCodeReferenceElement findImportReferenceTo(PsiClass aClass) {
return null;
}
@NotNull
public LanguageLevel getLanguageLevel() {
//TODO: repository for language level
if (myLanguageLevel == null) {
myLanguageLevel = getLanguageLevelInner();
}
return myLanguageLevel;
}
private LanguageLevel getLanguageLevelInner() {
return getLanguageLevel(getVirtualFile(), getManager().getEffectiveLanguageLevel());
}
public static LanguageLevel getLanguageLevel(VirtualFile vFile, LanguageLevel defaultLanguageLevel) {
try {
final ClassFileData classFileData = new ClassFileData(vFile);
final BytePointer ptr = new BytePointer(classFileData.getData(), 6);
int majorVersion = ClsUtil.readU2(ptr);
if (majorVersion < MAX_CLASS_FILE_MAJOR_VERSION) {
/*check if the class file was processed by retroweaver, it has written an attribute of format
(int retroweaver build number, int original class file version, long timestamp of weaving) */
final int attributesOffset = classFileData.getOffsetOfAttributesSection();
final BytePointer attrPtr = classFileData.findAttribute(attributesOffset, "net.sourceforge.Retroweaver");
if (attrPtr != null) {
ptr.offset += 4; //skip retroweaver build number
majorVersion = ClsUtil.readU2(ptr);
}
}
return calcLanguageLevelBasedOnVersion(majorVersion);
}
catch (ClsFormatException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
return defaultLanguageLevel;
}
}
private static LanguageLevel calcLanguageLevelBasedOnVersion(final int majorVersion) {
if (majorVersion < 48) return LanguageLevel.JDK_1_3;
if (majorVersion < 49) return LanguageLevel.JDK_1_4;
return LanguageLevel.JDK_1_5;
}
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
throw new IncorrectOperationException(CAN_NOT_MODIFY_MESSAGE);
}
public void checkSetName(String name) throws IncorrectOperationException {
throw new IncorrectOperationException(CAN_NOT_MODIFY_MESSAGE);
}
public boolean isDirectory() {
return false;
}
public void appendMirrorText(final int indentLevel, final StringBuffer buffer) {
buffer.append(PsiBundle.message("psi.decompiled.text.header"));
goNextLine(indentLevel, buffer);
goNextLine(indentLevel, buffer);
final PsiPackageStatement packageStatement = getPackageStatement();
if (packageStatement != null) {
((ClsElementImpl)packageStatement).appendMirrorText(0, buffer);
goNextLine(indentLevel, buffer);
goNextLine(indentLevel, buffer);
}
PsiClass aClass = getClasses()[0];
((ClsClassImpl)aClass).appendMirrorText(0, buffer);
}
public void setMirror(TreeElement element) {
LOG.assertTrue(myMirror == null);
myMirror = element;
PsiElement mirrorFile = SourceTreeToPsiMap.treeElementToPsi(myMirror);
if (mirrorFile instanceof PsiJavaFile) {
PsiPackageStatement packageStatementMirror = ((PsiJavaFile)mirrorFile).getPackageStatement();
final PsiPackageStatement packageStatement = getPackageStatement();
if (packageStatementMirror != null && packageStatement != null) {
((ClsElementImpl)packageStatement).setMirror((TreeElement)SourceTreeToPsiMap.psiElementToTree(packageStatementMirror));
}
PsiClass[] classes = getClasses();
PsiClass[] mirrorClasses = ((PsiJavaFile)mirrorFile).getClasses();
LOG.assertTrue(classes.length == mirrorClasses.length);
if (classes.length == mirrorClasses.length) {
for (int i = 0; i < classes.length; i++) {
((ClsElementImpl)classes[i]).setMirror((TreeElement)SourceTreeToPsiMap.psiElementToTree(mirrorClasses[i]));
}
}
}
}
public String getText() {
initializeMirror();
return myMirror.getText();
}
@NotNull
public char[] textToCharArray() {
initializeMirror();
return myMirror.textToCharArray();
}
public PsiElement getNavigationElement() {
String packageName = getPackageName();
String sourceFileName = myClass.getSourceFileName();
String relativeFilePath = packageName.length() == 0 ?
sourceFileName :
packageName.replace('.', '/') + '/' + sourceFileName;
final VirtualFile vFile = getContainingFile().getVirtualFile();
ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(getProject()).getFileIndex();
final List<OrderEntry> orderEntries = projectFileIndex.getOrderEntriesForFile(vFile);
for (OrderEntry orderEntry : orderEntries) {
VirtualFile[] files = orderEntry.getFiles(OrderRootType.SOURCES);
for (VirtualFile file : files) {
VirtualFile source = file.findFileByRelativePath(relativeFilePath);
if (source != null) {
PsiFile psiSource = getManager().findFile(source);
if (psiSource instanceof PsiJavaFile) {
return psiSource;
}
}
}
}
return this;
}
private void initializeMirror() {
if (myMirror == null) {
FileDocumentManager documentManager = FileDocumentManager.getInstance();
Document document = documentManager.getDocument(getVirtualFile());
String text = document.getText();
String ext = StdFileTypes.JAVA.getDefaultExtension();
PsiClass aClass = getClasses()[0];
String fileName = aClass.getName() + "." + ext;
PsiManager manager = getManager();
PsiFile mirror = manager.getElementFactory().createFileFromText(fileName, text);
ASTNode mirrorTreeElement = SourceTreeToPsiMap.psiElementToTree(mirror);
//IMPORTANT: do not take lock too early - FileDocumentManager.getInstance().saveToString() can run write action...
synchronized (PsiLock.LOCK) {
if (myMirror == null) {
setMirror((TreeElement)mirrorTreeElement);
myMirror.putUserData(DOCUMENT_IN_MIRROR_KEY, document);
}
}
}
}
public long getModificationStamp() {
return getVirtualFile().getModificationStamp();
}
public void accept(@NotNull PsiElementVisitor visitor) {
visitor.visitJavaFile(this);
}
@NonNls
public String toString() {
return "PsiFile:" + getName();
}
public PsiFile getOriginalFile() {
return null;
}
@NotNull
public FileType getFileType() {
return StdFileTypes.CLASS;
}
@NotNull
public PsiFile[] getPsiRoots() {
return new PsiFile[]{this};
}
@NotNull
public FileViewProvider getViewProvider() {
return myViewProvider;
}
public void subtreeChanged() {
}
public static String decompile(PsiManager manager, VirtualFile file) {
final FileViewProvider provider = ((PsiManagerImpl)manager).getFileManager().findCachedViewProvider(file);
ClsFileImpl psiFile = null;
if (provider != null) {
psiFile = (ClsFileImpl)provider.getPsi(provider.getBaseLanguage());
}
if (psiFile == null) {
psiFile = new ClsFileImpl((PsiManagerImpl)manager, new SingleRootFileViewProvider(manager, file), true);
}
StringBuffer buffer = new StringBuffer();
psiFile.appendMirrorText(0, buffer);
return buffer.toString();
}
@Nullable
public LanguageDialect getLanguageDialect() {
return null;
}
}
|
volatiled just in case
|
source/com/intellij/psi/impl/compiled/ClsFileImpl.java
|
volatiled just in case
|
|
Java
|
apache-2.0
|
9c3d30283d7087125d0ba7ce274547f40880b4f6
| 0
|
178220709/HelloScala,178220709/HelloScala,178220709/HelloScala
|
package jsonsong.spider.controllers;
import jsonsong.spider.dao.CarDao;
import jsonsong.spider.dto.CarResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
@RestController
public class ReportController {
@Autowired
private CarDao carDao;
@RequestMapping(value = "/report/car")
public CarResponse getCarReport() {
CarResponse response = new CarResponse();
Calendar todayStart = GregorianCalendar.getInstance();
todayStart.add(Calendar.DATE, -1);
Date endTime = todayStart.getTime();
todayStart.add(Calendar.DATE, -1);
Date startTime = todayStart.getTime();
response.setCars(carDao.getCarByTime(startTime, endTime));
return response;
}
}
|
ScalaSpider/src/main/java/jsonsong/spider/controllers/ReportController.java
|
package jsonsong.spider.controllers;
import jsonsong.spider.dao.CarDao;
import jsonsong.spider.dto.CarResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
@RestController
public class ReportController {
@Autowired
private CarDao carRepository;
@RequestMapping(value = "/report/car")
public CarResponse GetCarReport() {
CarResponse response = new CarResponse();
TimeZone.setDefault(TimeZone.getTimeZone("GMT+8"));
Calendar todayStart = GregorianCalendar.getInstance();
todayStart.add(Calendar.DATE, -1);
Date endTime = todayStart.getTime();
todayStart.add(Calendar.DATE, -1);
Date startTime = todayStart.getTime();
response.setCars(carRepository.getCarByTime(startTime, endTime));
return response;
}
}
|
rename
|
ScalaSpider/src/main/java/jsonsong/spider/controllers/ReportController.java
|
rename
|
|
Java
|
apache-2.0
|
54e826def605ea2371c187418e0a95f8d218e395
| 0
|
nknize/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,gingerwizard/elasticsearch,gingerwizard/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,HonzaKral/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,HonzaKral/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,scorpionvicky/elasticsearch,HonzaKral/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,HonzaKral/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.inference.loadingservice;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor;
import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider;
import org.elasticsearch.xpack.ml.notifications.InferenceAuditor;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* This is a thread safe model loading service.
*
* It will cache local models that are referenced by processors in memory (as long as it is instantiated on an ingest node).
*
* If more than one processor references the same model, that model will only be cached once.
*/
public class ModelLoadingService implements ClusterStateListener {
/**
* The maximum size of the local model cache here in the loading service
*
* Once the limit is reached, LRU models are evicted in favor of new models
*/
public static final Setting<ByteSizeValue> INFERENCE_MODEL_CACHE_SIZE =
Setting.memorySizeSetting("xpack.ml.inference_model.cache_size",
"40%",
Setting.Property.NodeScope);
/**
* How long should a model stay in the cache since its last access
*
* If nothing references a model via getModel for this configured timeValue, it will be evicted.
*
* Specifically, in the ingest scenario, a processor will call getModel whenever it needs to run inference. So, if a processor is not
* executed for an extended period of time, the model will be evicted and will have to be loaded again when getModel is called.
*
*/
public static final Setting<TimeValue> INFERENCE_MODEL_CACHE_TTL =
Setting.timeSetting("xpack.ml.inference_model.time_to_live",
new TimeValue(5, TimeUnit.MINUTES),
new TimeValue(1, TimeUnit.MILLISECONDS),
Setting.Property.NodeScope);
private static final Logger logger = LogManager.getLogger(ModelLoadingService.class);
private final Cache<String, LocalModel<? extends InferenceConfig>> localModelCache;
private final Set<String> referencedModels = new HashSet<>();
private final Map<String, Queue<ActionListener<Model<? extends InferenceConfig>>>> loadingListeners = new HashMap<>();
private final TrainedModelProvider provider;
private final Set<String> shouldNotAudit;
private final ThreadPool threadPool;
private final InferenceAuditor auditor;
private final ByteSizeValue maxCacheSize;
private final NamedXContentRegistry namedXContentRegistry;
public ModelLoadingService(TrainedModelProvider trainedModelProvider,
InferenceAuditor auditor,
ThreadPool threadPool,
ClusterService clusterService,
NamedXContentRegistry namedXContentRegistry,
Settings settings) {
this.provider = trainedModelProvider;
this.threadPool = threadPool;
this.maxCacheSize = INFERENCE_MODEL_CACHE_SIZE.get(settings);
this.auditor = auditor;
this.shouldNotAudit = new HashSet<>();
this.namedXContentRegistry = namedXContentRegistry;
this.localModelCache = CacheBuilder.<String, LocalModel<? extends InferenceConfig>>builder()
.setMaximumWeight(this.maxCacheSize.getBytes())
.weigher((id, localModel) -> localModel.ramBytesUsed())
// explicit declaration of the listener lambda necessary for Eclipse IDE 4.14
.removalListener(notification -> cacheEvictionListener(notification))
.setExpireAfterAccess(INFERENCE_MODEL_CACHE_TTL.get(settings))
.build();
clusterService.addListener(this);
}
/**
* Gets the model referenced by `modelId` and responds to the listener.
*
* This method first checks the local LRU cache for the model. If it is present, it is returned from cache.
*
* If it is not present, one of the following occurs:
*
* - If the model is referenced by a pipeline and is currently being loaded, the `modelActionListener`
* is added to the list of listeners to be alerted when the model is fully loaded.
* - If the model is referenced by a pipeline and is currently NOT being loaded, a new load attempt is made and the resulting
* model will attempt to be cached for future reference
* - If the models is NOT referenced by a pipeline, the model is simply loaded from the index and given to the listener.
* It is not cached.
*
* @param modelId the model to get
* @param modelActionListener the listener to alert when the model has been retrieved.
*/
public void getModel(String modelId, ActionListener<Model<? extends InferenceConfig>> modelActionListener) {
LocalModel<? extends InferenceConfig> cachedModel = localModelCache.get(modelId);
if (cachedModel != null) {
modelActionListener.onResponse(cachedModel);
logger.trace("[{}] loaded from cache", modelId);
return;
}
if (loadModelIfNecessary(modelId, modelActionListener) == false) {
// If we the model is not loaded and we did not kick off a new loading attempt, this means that we may be getting called
// by a simulated pipeline
logger.trace("[{}] not actively loading, eager loading without cache", modelId);
provider.getTrainedModel(modelId, true, ActionListener.wrap(
trainedModelConfig -> {
trainedModelConfig.ensureParsedDefinition(namedXContentRegistry);
InferenceConfig inferenceConfig = trainedModelConfig.getInferenceConfig() == null ?
inferenceConfigFromTargetType(trainedModelConfig.getModelDefinition().getTrainedModel().targetType()) :
trainedModelConfig.getInferenceConfig();
modelActionListener.onResponse(new LocalModel<>(
trainedModelConfig.getModelId(),
trainedModelConfig.getModelDefinition(),
trainedModelConfig.getInput(),
trainedModelConfig.getDefaultFieldMap(),
inferenceConfig));
},
modelActionListener::onFailure
));
} else {
logger.trace("[{}] is loading or loaded, added new listener to queue", modelId);
}
}
/**
* Returns true if the model is loaded and the listener has been given the cached model
* Returns true if the model is CURRENTLY being loaded and the listener was added to be notified when it is loaded
* Returns false if the model is not loaded or actively being loaded
*/
private boolean loadModelIfNecessary(String modelId, ActionListener<Model<? extends InferenceConfig>> modelActionListener) {
synchronized (loadingListeners) {
Model<? extends InferenceConfig> cachedModel = localModelCache.get(modelId);
if (cachedModel != null) {
modelActionListener.onResponse(cachedModel);
return true;
}
// It is referenced by a pipeline, but the cache does not contain it
if (referencedModels.contains(modelId)) {
// If the loaded model is referenced there but is not present,
// that means the previous load attempt failed or the model has been evicted
// Attempt to load and cache the model if necessary
if (loadingListeners.computeIfPresent(
modelId,
(storedModelKey, listenerQueue) -> addFluently(listenerQueue, modelActionListener)) == null) {
logger.trace("[{}] attempting to load and cache", modelId);
loadingListeners.put(modelId, addFluently(new ArrayDeque<>(), modelActionListener));
loadModel(modelId);
}
return true;
}
// if the cachedModel entry is null, but there are listeners present, that means it is being loaded
return loadingListeners.computeIfPresent(modelId,
(storedModelKey, listenerQueue) -> addFluently(listenerQueue, modelActionListener)) != null;
} // synchronized (loadingListeners)
}
private void loadModel(String modelId) {
provider.getTrainedModel(modelId, true, ActionListener.wrap(
trainedModelConfig -> {
logger.debug("[{}] successfully loaded model", modelId);
handleLoadSuccess(modelId, trainedModelConfig);
},
failure -> {
logger.warn(new ParameterizedMessage("[{}] failed to load model", modelId), failure);
handleLoadFailure(modelId, failure);
}
));
}
private void handleLoadSuccess(String modelId, TrainedModelConfig trainedModelConfig) throws IOException {
Queue<ActionListener<Model<? extends InferenceConfig>>> listeners;
trainedModelConfig.ensureParsedDefinition(namedXContentRegistry);
InferenceConfig inferenceConfig = trainedModelConfig.getInferenceConfig() == null ?
inferenceConfigFromTargetType(trainedModelConfig.getModelDefinition().getTrainedModel().targetType()) :
trainedModelConfig.getInferenceConfig();
LocalModel<? extends InferenceConfig> loadedModel = new LocalModel<>(
trainedModelConfig.getModelId(),
trainedModelConfig.getModelDefinition(),
trainedModelConfig.getInput(),
trainedModelConfig.getDefaultFieldMap(),
inferenceConfig);
synchronized (loadingListeners) {
listeners = loadingListeners.remove(modelId);
// If there is no loadingListener that means the loading was canceled and the listener was already notified as such
// Consequently, we should not store the retrieved model
if (listeners == null) {
return;
}
localModelCache.put(modelId, loadedModel);
shouldNotAudit.remove(modelId);
} // synchronized (loadingListeners)
for (ActionListener<Model<? extends InferenceConfig>> listener = listeners.poll(); listener != null; listener = listeners.poll()) {
listener.onResponse(loadedModel);
}
}
private void handleLoadFailure(String modelId, Exception failure) {
Queue<ActionListener<Model<? extends InferenceConfig>>> listeners;
synchronized (loadingListeners) {
listeners = loadingListeners.remove(modelId);
if (listeners == null) {
return;
}
} // synchronized (loadingListeners)
// If we failed to load and there were listeners present, that means that this model is referenced by a processor
// Alert the listeners to the failure
for (ActionListener<Model<? extends InferenceConfig>> listener = listeners.poll(); listener != null; listener = listeners.poll()) {
listener.onFailure(failure);
}
}
private void cacheEvictionListener(RemovalNotification<String, LocalModel<? extends InferenceConfig>> notification) {
if (notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED) {
String msg = new ParameterizedMessage(
"model cache entry evicted." +
"current cache [{}] current max [{}] model size [{}]. " +
"If this is undesired, consider updating setting [{}] or [{}].",
new ByteSizeValue(localModelCache.weight()).getStringRep(),
maxCacheSize.getStringRep(),
new ByteSizeValue(notification.getValue().ramBytesUsed()).getStringRep(),
INFERENCE_MODEL_CACHE_SIZE.getKey(),
INFERENCE_MODEL_CACHE_TTL.getKey()).getFormattedMessage();
auditIfNecessary(notification.getKey(), msg);
}
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
// If ingest data has not changed or if the current node is not an ingest node, don't bother caching models
if (event.changedCustomMetadataSet().contains(IngestMetadata.TYPE) == false ||
event.state().nodes().getLocalNode().isIngestNode() == false) {
return;
}
ClusterState state = event.state();
IngestMetadata currentIngestMetadata = state.metadata().custom(IngestMetadata.TYPE);
Set<String> allReferencedModelKeys = getReferencedModelKeys(currentIngestMetadata);
if (allReferencedModelKeys.equals(referencedModels)) {
return;
}
// The listeners still waiting for a model and we are canceling the load?
List<Tuple<String, List<ActionListener<Model<? extends InferenceConfig>>>>> drainWithFailure = new ArrayList<>();
Set<String> referencedModelsBeforeClusterState = null;
Set<String> loadingModelBeforeClusterState = null;
Set<String> removedModels = null;
synchronized (loadingListeners) {
referencedModelsBeforeClusterState = new HashSet<>(referencedModels);
if (logger.isTraceEnabled()) {
loadingModelBeforeClusterState = new HashSet<>(loadingListeners.keySet());
}
// If we had models still loading here but are no longer referenced
// we should remove them from loadingListeners and alert the listeners
for (String modelId : loadingListeners.keySet()) {
if (allReferencedModelKeys.contains(modelId) == false) {
drainWithFailure.add(Tuple.tuple(modelId, new ArrayList<>(loadingListeners.remove(modelId))));
}
}
removedModels = Sets.difference(referencedModelsBeforeClusterState, allReferencedModelKeys);
// Remove all cached models that are not referenced by any processors
removedModels.forEach(localModelCache::invalidate);
// Remove the models that are no longer referenced
referencedModels.removeAll(removedModels);
shouldNotAudit.removeAll(removedModels);
// Remove all that are still referenced, i.e. the intersection of allReferencedModelKeys and referencedModels
allReferencedModelKeys.removeAll(referencedModels);
referencedModels.addAll(allReferencedModelKeys);
// Populate loadingListeners key so we know that we are currently loading the model
for (String modelId : allReferencedModelKeys) {
loadingListeners.put(modelId, new ArrayDeque<>());
}
} // synchronized (loadingListeners)
if (logger.isTraceEnabled()) {
if (loadingListeners.keySet().equals(loadingModelBeforeClusterState) == false) {
logger.trace("cluster state event changed loading models: before {} after {}", loadingModelBeforeClusterState,
loadingListeners.keySet());
}
if (referencedModels.equals(referencedModelsBeforeClusterState) == false) {
logger.trace("cluster state event changed referenced models: before {} after {}", referencedModelsBeforeClusterState,
referencedModels);
}
}
for (Tuple<String, List<ActionListener<Model<? extends InferenceConfig>>>> modelAndListeners : drainWithFailure) {
final String msg = new ParameterizedMessage(
"Cancelling load of model [{}] as it is no longer referenced by a pipeline",
modelAndListeners.v1()).getFormat();
for (ActionListener<Model<? extends InferenceConfig>> listener : modelAndListeners.v2()) {
listener.onFailure(new ElasticsearchException(msg));
}
}
removedModels.forEach(this::auditUnreferencedModel);
loadModels(allReferencedModelKeys);
}
private void auditIfNecessary(String modelId, String msg) {
if (shouldNotAudit.contains(modelId)) {
logger.trace("[{}] {}", modelId, msg);
return;
}
auditor.warning(modelId, msg);
shouldNotAudit.add(modelId);
logger.warn("[{}] {}", modelId, msg);
}
private void loadModels(Set<String> modelIds) {
if (modelIds.isEmpty()) {
return;
}
// Execute this on a utility thread as when the callbacks occur we don't want them tying up the cluster listener thread pool
threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> {
for (String modelId : modelIds) {
auditNewReferencedModel(modelId);
this.loadModel(modelId);
}
});
}
private void auditNewReferencedModel(String modelId) {
auditor.info(modelId, "referenced by ingest processors. Attempting to load model into cache");
}
private void auditUnreferencedModel(String modelId) {
auditor.info(modelId, "no longer referenced by any processors");
}
private static <T> Queue<T> addFluently(Queue<T> queue, T object) {
queue.add(object);
return queue;
}
private static Set<String> getReferencedModelKeys(IngestMetadata ingestMetadata) {
Set<String> allReferencedModelKeys = new HashSet<>();
if (ingestMetadata == null) {
return allReferencedModelKeys;
}
ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> {
Object processors = pipelineConfiguration.getConfigAsMap().get("processors");
if (processors instanceof List<?>) {
for(Object processor : (List<?>)processors) {
if (processor instanceof Map<?, ?>) {
Object processorConfig = ((Map<?, ?>)processor).get(InferenceProcessor.TYPE);
if (processorConfig instanceof Map<?, ?>) {
Object modelId = ((Map<?, ?>)processorConfig).get(InferenceProcessor.MODEL_ID);
if (modelId != null) {
assert modelId instanceof String;
allReferencedModelKeys.add(modelId.toString());
}
}
}
}
}
});
return allReferencedModelKeys;
}
private static InferenceConfig inferenceConfigFromTargetType(TargetType targetType) {
switch(targetType) {
case REGRESSION:
return RegressionConfig.EMPTY_PARAMS;
case CLASSIFICATION:
return ClassificationConfig.EMPTY_PARAMS;
default:
throw ExceptionsHelper.badRequestException("unsupported target type [{}]", targetType);
}
}
}
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.inference.loadingservice;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor;
import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider;
import org.elasticsearch.xpack.ml.notifications.InferenceAuditor;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* This is a thread safe model loading service.
*
* It will cache local models that are referenced by processors in memory (as long as it is instantiated on an ingest node).
*
* If more than one processor references the same model, that model will only be cached once.
*/
public class ModelLoadingService implements ClusterStateListener {
/**
* The maximum size of the local model cache here in the loading service
*
* Once the limit is reached, LRU models are evicted in favor of new models
*/
public static final Setting<ByteSizeValue> INFERENCE_MODEL_CACHE_SIZE =
Setting.memorySizeSetting("xpack.ml.inference_model.cache_size",
"40%",
Setting.Property.NodeScope);
/**
* How long should a model stay in the cache since its last access
*
* If nothing references a model via getModel for this configured timeValue, it will be evicted.
*
* Specifically, in the ingest scenario, a processor will call getModel whenever it needs to run inference. So, if a processor is not
* executed for an extended period of time, the model will be evicted and will have to be loaded again when getModel is called.
*
*/
public static final Setting<TimeValue> INFERENCE_MODEL_CACHE_TTL =
Setting.timeSetting("xpack.ml.inference_model.time_to_live",
new TimeValue(5, TimeUnit.MINUTES),
new TimeValue(1, TimeUnit.MILLISECONDS),
Setting.Property.NodeScope);
private static final Logger logger = LogManager.getLogger(ModelLoadingService.class);
private final Cache<String, LocalModel<? extends InferenceConfig>> localModelCache;
private final Set<String> referencedModels = new HashSet<>();
private final Map<String, Queue<ActionListener<Model<? extends InferenceConfig>>>> loadingListeners = new HashMap<>();
private final TrainedModelProvider provider;
private final Set<String> shouldNotAudit;
private final ThreadPool threadPool;
private final InferenceAuditor auditor;
private final ByteSizeValue maxCacheSize;
private final NamedXContentRegistry namedXContentRegistry;
public ModelLoadingService(TrainedModelProvider trainedModelProvider,
InferenceAuditor auditor,
ThreadPool threadPool,
ClusterService clusterService,
NamedXContentRegistry namedXContentRegistry,
Settings settings) {
this.provider = trainedModelProvider;
this.threadPool = threadPool;
this.maxCacheSize = INFERENCE_MODEL_CACHE_SIZE.get(settings);
this.auditor = auditor;
this.shouldNotAudit = new HashSet<>();
this.namedXContentRegistry = namedXContentRegistry;
this.localModelCache = CacheBuilder.<String, LocalModel<? extends InferenceConfig>>builder()
.setMaximumWeight(this.maxCacheSize.getBytes())
.weigher((id, localModel) -> localModel.ramBytesUsed())
.removalListener(this::cacheEvictionListener)
.setExpireAfterAccess(INFERENCE_MODEL_CACHE_TTL.get(settings))
.build();
clusterService.addListener(this);
}
/**
* Gets the model referenced by `modelId` and responds to the listener.
*
* This method first checks the local LRU cache for the model. If it is present, it is returned from cache.
*
* If it is not present, one of the following occurs:
*
* - If the model is referenced by a pipeline and is currently being loaded, the `modelActionListener`
* is added to the list of listeners to be alerted when the model is fully loaded.
* - If the model is referenced by a pipeline and is currently NOT being loaded, a new load attempt is made and the resulting
* model will attempt to be cached for future reference
* - If the models is NOT referenced by a pipeline, the model is simply loaded from the index and given to the listener.
* It is not cached.
*
* @param modelId the model to get
* @param modelActionListener the listener to alert when the model has been retrieved.
*/
public void getModel(String modelId, ActionListener<Model<? extends InferenceConfig>> modelActionListener) {
LocalModel<? extends InferenceConfig> cachedModel = localModelCache.get(modelId);
if (cachedModel != null) {
modelActionListener.onResponse(cachedModel);
logger.trace("[{}] loaded from cache", modelId);
return;
}
if (loadModelIfNecessary(modelId, modelActionListener) == false) {
// If we the model is not loaded and we did not kick off a new loading attempt, this means that we may be getting called
// by a simulated pipeline
logger.trace("[{}] not actively loading, eager loading without cache", modelId);
provider.getTrainedModel(modelId, true, ActionListener.wrap(
trainedModelConfig -> {
trainedModelConfig.ensureParsedDefinition(namedXContentRegistry);
InferenceConfig inferenceConfig = trainedModelConfig.getInferenceConfig() == null ?
inferenceConfigFromTargetType(trainedModelConfig.getModelDefinition().getTrainedModel().targetType()) :
trainedModelConfig.getInferenceConfig();
modelActionListener.onResponse(new LocalModel<>(
trainedModelConfig.getModelId(),
trainedModelConfig.getModelDefinition(),
trainedModelConfig.getInput(),
trainedModelConfig.getDefaultFieldMap(),
inferenceConfig));
},
modelActionListener::onFailure
));
} else {
logger.trace("[{}] is loading or loaded, added new listener to queue", modelId);
}
}
/**
* Returns true if the model is loaded and the listener has been given the cached model
* Returns true if the model is CURRENTLY being loaded and the listener was added to be notified when it is loaded
* Returns false if the model is not loaded or actively being loaded
*/
private boolean loadModelIfNecessary(String modelId, ActionListener<Model<? extends InferenceConfig>> modelActionListener) {
synchronized (loadingListeners) {
Model<? extends InferenceConfig> cachedModel = localModelCache.get(modelId);
if (cachedModel != null) {
modelActionListener.onResponse(cachedModel);
return true;
}
// It is referenced by a pipeline, but the cache does not contain it
if (referencedModels.contains(modelId)) {
// If the loaded model is referenced there but is not present,
// that means the previous load attempt failed or the model has been evicted
// Attempt to load and cache the model if necessary
if (loadingListeners.computeIfPresent(
modelId,
(storedModelKey, listenerQueue) -> addFluently(listenerQueue, modelActionListener)) == null) {
logger.trace("[{}] attempting to load and cache", modelId);
loadingListeners.put(modelId, addFluently(new ArrayDeque<>(), modelActionListener));
loadModel(modelId);
}
return true;
}
// if the cachedModel entry is null, but there are listeners present, that means it is being loaded
return loadingListeners.computeIfPresent(modelId,
(storedModelKey, listenerQueue) -> addFluently(listenerQueue, modelActionListener)) != null;
} // synchronized (loadingListeners)
}
private void loadModel(String modelId) {
provider.getTrainedModel(modelId, true, ActionListener.wrap(
trainedModelConfig -> {
logger.debug("[{}] successfully loaded model", modelId);
handleLoadSuccess(modelId, trainedModelConfig);
},
failure -> {
logger.warn(new ParameterizedMessage("[{}] failed to load model", modelId), failure);
handleLoadFailure(modelId, failure);
}
));
}
private void handleLoadSuccess(String modelId, TrainedModelConfig trainedModelConfig) throws IOException {
Queue<ActionListener<Model<? extends InferenceConfig>>> listeners;
trainedModelConfig.ensureParsedDefinition(namedXContentRegistry);
InferenceConfig inferenceConfig = trainedModelConfig.getInferenceConfig() == null ?
inferenceConfigFromTargetType(trainedModelConfig.getModelDefinition().getTrainedModel().targetType()) :
trainedModelConfig.getInferenceConfig();
LocalModel<? extends InferenceConfig> loadedModel = new LocalModel<>(
trainedModelConfig.getModelId(),
trainedModelConfig.getModelDefinition(),
trainedModelConfig.getInput(),
trainedModelConfig.getDefaultFieldMap(),
inferenceConfig);
synchronized (loadingListeners) {
listeners = loadingListeners.remove(modelId);
// If there is no loadingListener that means the loading was canceled and the listener was already notified as such
// Consequently, we should not store the retrieved model
if (listeners == null) {
return;
}
localModelCache.put(modelId, loadedModel);
shouldNotAudit.remove(modelId);
} // synchronized (loadingListeners)
for (ActionListener<Model<? extends InferenceConfig>> listener = listeners.poll(); listener != null; listener = listeners.poll()) {
listener.onResponse(loadedModel);
}
}
private void handleLoadFailure(String modelId, Exception failure) {
Queue<ActionListener<Model<? extends InferenceConfig>>> listeners;
synchronized (loadingListeners) {
listeners = loadingListeners.remove(modelId);
if (listeners == null) {
return;
}
} // synchronized (loadingListeners)
// If we failed to load and there were listeners present, that means that this model is referenced by a processor
// Alert the listeners to the failure
for (ActionListener<Model<? extends InferenceConfig>> listener = listeners.poll(); listener != null; listener = listeners.poll()) {
listener.onFailure(failure);
}
}
private void cacheEvictionListener(RemovalNotification<String, LocalModel<? extends InferenceConfig>> notification) {
if (notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED) {
String msg = new ParameterizedMessage(
"model cache entry evicted." +
"current cache [{}] current max [{}] model size [{}]. " +
"If this is undesired, consider updating setting [{}] or [{}].",
new ByteSizeValue(localModelCache.weight()).getStringRep(),
maxCacheSize.getStringRep(),
new ByteSizeValue(notification.getValue().ramBytesUsed()).getStringRep(),
INFERENCE_MODEL_CACHE_SIZE.getKey(),
INFERENCE_MODEL_CACHE_TTL.getKey()).getFormattedMessage();
auditIfNecessary(notification.getKey(), msg);
}
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
// If ingest data has not changed or if the current node is not an ingest node, don't bother caching models
if (event.changedCustomMetadataSet().contains(IngestMetadata.TYPE) == false ||
event.state().nodes().getLocalNode().isIngestNode() == false) {
return;
}
ClusterState state = event.state();
IngestMetadata currentIngestMetadata = state.metadata().custom(IngestMetadata.TYPE);
Set<String> allReferencedModelKeys = getReferencedModelKeys(currentIngestMetadata);
if (allReferencedModelKeys.equals(referencedModels)) {
return;
}
// The listeners still waiting for a model and we are canceling the load?
List<Tuple<String, List<ActionListener<Model<? extends InferenceConfig>>>>> drainWithFailure = new ArrayList<>();
Set<String> referencedModelsBeforeClusterState = null;
Set<String> loadingModelBeforeClusterState = null;
Set<String> removedModels = null;
synchronized (loadingListeners) {
referencedModelsBeforeClusterState = new HashSet<>(referencedModels);
if (logger.isTraceEnabled()) {
loadingModelBeforeClusterState = new HashSet<>(loadingListeners.keySet());
}
// If we had models still loading here but are no longer referenced
// we should remove them from loadingListeners and alert the listeners
for (String modelId : loadingListeners.keySet()) {
if (allReferencedModelKeys.contains(modelId) == false) {
drainWithFailure.add(Tuple.tuple(modelId, new ArrayList<>(loadingListeners.remove(modelId))));
}
}
removedModels = Sets.difference(referencedModelsBeforeClusterState, allReferencedModelKeys);
// Remove all cached models that are not referenced by any processors
removedModels.forEach(localModelCache::invalidate);
// Remove the models that are no longer referenced
referencedModels.removeAll(removedModels);
shouldNotAudit.removeAll(removedModels);
// Remove all that are still referenced, i.e. the intersection of allReferencedModelKeys and referencedModels
allReferencedModelKeys.removeAll(referencedModels);
referencedModels.addAll(allReferencedModelKeys);
// Populate loadingListeners key so we know that we are currently loading the model
for (String modelId : allReferencedModelKeys) {
loadingListeners.put(modelId, new ArrayDeque<>());
}
} // synchronized (loadingListeners)
if (logger.isTraceEnabled()) {
if (loadingListeners.keySet().equals(loadingModelBeforeClusterState) == false) {
logger.trace("cluster state event changed loading models: before {} after {}", loadingModelBeforeClusterState,
loadingListeners.keySet());
}
if (referencedModels.equals(referencedModelsBeforeClusterState) == false) {
logger.trace("cluster state event changed referenced models: before {} after {}", referencedModelsBeforeClusterState,
referencedModels);
}
}
for (Tuple<String, List<ActionListener<Model<? extends InferenceConfig>>>> modelAndListeners : drainWithFailure) {
final String msg = new ParameterizedMessage(
"Cancelling load of model [{}] as it is no longer referenced by a pipeline",
modelAndListeners.v1()).getFormat();
for (ActionListener<Model<? extends InferenceConfig>> listener : modelAndListeners.v2()) {
listener.onFailure(new ElasticsearchException(msg));
}
}
removedModels.forEach(this::auditUnreferencedModel);
loadModels(allReferencedModelKeys);
}
private void auditIfNecessary(String modelId, String msg) {
if (shouldNotAudit.contains(modelId)) {
logger.trace("[{}] {}", modelId, msg);
return;
}
auditor.warning(modelId, msg);
shouldNotAudit.add(modelId);
logger.warn("[{}] {}", modelId, msg);
}
private void loadModels(Set<String> modelIds) {
if (modelIds.isEmpty()) {
return;
}
// Execute this on a utility thread as when the callbacks occur we don't want them tying up the cluster listener thread pool
threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> {
for (String modelId : modelIds) {
auditNewReferencedModel(modelId);
this.loadModel(modelId);
}
});
}
private void auditNewReferencedModel(String modelId) {
auditor.info(modelId, "referenced by ingest processors. Attempting to load model into cache");
}
private void auditUnreferencedModel(String modelId) {
auditor.info(modelId, "no longer referenced by any processors");
}
private static <T> Queue<T> addFluently(Queue<T> queue, T object) {
queue.add(object);
return queue;
}
private static Set<String> getReferencedModelKeys(IngestMetadata ingestMetadata) {
Set<String> allReferencedModelKeys = new HashSet<>();
if (ingestMetadata == null) {
return allReferencedModelKeys;
}
ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> {
Object processors = pipelineConfiguration.getConfigAsMap().get("processors");
if (processors instanceof List<?>) {
for(Object processor : (List<?>)processors) {
if (processor instanceof Map<?, ?>) {
Object processorConfig = ((Map<?, ?>)processor).get(InferenceProcessor.TYPE);
if (processorConfig instanceof Map<?, ?>) {
Object modelId = ((Map<?, ?>)processorConfig).get(InferenceProcessor.MODEL_ID);
if (modelId != null) {
assert modelId instanceof String;
allReferencedModelKeys.add(modelId.toString());
}
}
}
}
}
});
return allReferencedModelKeys;
}
private static InferenceConfig inferenceConfigFromTargetType(TargetType targetType) {
switch(targetType) {
case REGRESSION:
return RegressionConfig.EMPTY_PARAMS;
case CLASSIFICATION:
return ClassificationConfig.EMPTY_PARAMS;
default:
throw ExceptionsHelper.badRequestException("unsupported target type [{}]", targetType);
}
}
}
|
Fix Eclipse compile problem in ModelLoadingService (#54670)
Current Eclipse 4.14.0 cannot deal with the direct lambda notation, changing to
an exlicite one.
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java
|
Fix Eclipse compile problem in ModelLoadingService (#54670)
|
|
Java
|
apache-2.0
|
bafc79ea3285af6f330e2fdaa72144103f520a79
| 0
|
burris/dwr,burris/dwr
|
/*
* Copyright 2005 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ltd.getahead.dwr.util;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
/**
* Used by ExecutionContext to forward results back via javascript
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public final class SwallowingHttpServletResponse extends HttpServletResponseWrapper implements HttpServletResponse
{
/**
* @param response The real HttpServletResponse
* @param sout The place we copy responses to
*/
public SwallowingHttpServletResponse(HttpServletResponse response, Writer sout)
{
super(response);
pout = new PrintWriter(sout);
oout = new WriterOutputStream(sout);
// Ignored, but we might as well start with a realistic value in case
// anyone wants to work with the buffer size.
bufferSize = response.getBufferSize();
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getOutputStream()
*/
public ServletOutputStream getOutputStream()
{
return oout;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getWriter()
*/
public PrintWriter getWriter()
{
return pout;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#flushBuffer()
*/
public void flushBuffer() throws IOException
{
pout.flush();
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendError(int, java.lang.String)
*/
public void sendError(int sc, String msg)
{
log.warn("Ignoring call to sendError(" + sc + ", " + msg + ')'); //$NON-NLS-1$ //$NON-NLS-2$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendError(int)
*/
public void sendError(int sc)
{
log.warn("Ignoring call to sendError(" + sc + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendRedirect(java.lang.String)
*/
public void sendRedirect(String location)
{
log.warn("Ignoring call to sendRedirect(" + location + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#setStatus(int)
*/
public void setStatus(int sc)
{
log.warn("Ignoring call to setStatus(" + sc + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#setStatus(int, java.lang.String)
*/
public void setStatus(int sc, String sm)
{
log.warn("Ignoring call to setStatus(" + sc + ", " + sm + ')'); //$NON-NLS-1$ //$NON-NLS-2$
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponse#setContentLength(int)
*/
public void setContentLength(int i)
{
// The content length of the original document is not likely to be the
// same as the content length of the new document.
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#isCommitted()
*/
public boolean isCommitted()
{
return false;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#reset()
*/
public void reset()
{
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#resetBuffer()
*/
public void resetBuffer()
{
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#setBufferSize(int)
*/
public void setBufferSize(int bufferSize)
{
// We're not writing data to the original source so setting the buffer
// size on it isn't really important.
this.bufferSize = bufferSize;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getBufferSize()
*/
public int getBufferSize()
{
return bufferSize;
}
/**
* The log stream
*/
private static final Logger log = Logger.getLogger(SwallowingHttpServletResponse.class);
/**
* The forwarding output stream
*/
private final ServletOutputStream oout;
/**
* The forwarding output stream
*/
private final PrintWriter pout;
/**
* The ignored buffer size
*/
private int bufferSize;
}
|
java/uk/ltd/getahead/dwr/util/SwallowingHttpServletResponse.java
|
/*
* Copyright 2005 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ltd.getahead.dwr.util;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
/**
* Used by ExecutionContext to forward results back via javascript
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public final class SwallowingHttpServletResponse extends HttpServletResponseWrapper implements HttpServletResponse
{
/**
* @param response The real HttpServletResponse
* @param sout The place we copy responses to
*/
public SwallowingHttpServletResponse(HttpServletResponse response, Writer sout)
{
super(response);
pout = new PrintWriter(sout);
oout = new WriterOutputStream(sout);
// Ignored, but we might as well start with a realistic value in case
// anyone wants to work with the buffer size.
bufferSize = response.getBufferSize();
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getOutputStream()
*/
public ServletOutputStream getOutputStream()
{
return oout;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getWriter()
*/
public PrintWriter getWriter()
{
return pout;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#flushBuffer()
*/
public void flushBuffer() throws IOException
{
pout.flush();
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendError(int, java.lang.String)
*/
public void sendError(int sc, String msg)
{
log.warn("Ignoring call to sendError(" + sc + ", " + msg + ')'); //$NON-NLS-1$ //$NON-NLS-2$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendError(int)
*/
public void sendError(int sc)
{
log.warn("Ignoring call to sendError(" + sc + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#sendRedirect(java.lang.String)
*/
public void sendRedirect(String location)
{
log.warn("Ignoring call to sendRedirect(" + location + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#setStatus(int)
*/
public void setStatus(int sc)
{
log.warn("Ignoring call to setStatus(" + sc + ')'); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServletResponse#setStatus(int, java.lang.String)
*/
public void setStatus(int sc, String sm)
{
log.warn("Ignoring call to setStatus(" + sc + ", " + sm + ')'); //$NON-NLS-1$ //$NON-NLS-2$
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponse#setContentLength(int)
*/
public void setContentLength(int i)
{
// The content length of the original document is not likely to be the
// same as the content length of the new document.
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#isCommitted()
*/
public boolean isCommitted()
{
return false;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#reset()
*/
public void reset()
{
throw new IllegalStateException();
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#resetBuffer()
*/
public void resetBuffer()
{
throw new IllegalStateException();
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#setBufferSize(int)
*/
public void setBufferSize(int bufferSize)
{
// We're not writing data to the original source so setting the buffer
// size on it isn't really important.
this.bufferSize = bufferSize;
}
/* (non-Javadoc)
* @see javax.servlet.ServletResponseWrapper#getBufferSize()
*/
public int getBufferSize()
{
return bufferSize;
}
/**
* The log stream
*/
private static final Logger log = Logger.getLogger(SwallowingHttpServletResponse.class);
/**
* The forwarding output stream
*/
private final ServletOutputStream oout;
/**
* The forwarding output stream
*/
private final PrintWriter pout;
/**
* The ignored buffer size
*/
private int bufferSize;
}
|
fix for illegal state exception on forward to string
git-svn-id: ba1d8d5a2a2c535e023d6080c1e5c29aa0f5364e@397 3a8262b2-faa5-11dc-8610-ff947880b6b2
|
java/uk/ltd/getahead/dwr/util/SwallowingHttpServletResponse.java
|
fix for illegal state exception on forward to string
|
|
Java
|
apache-2.0
|
f6718a088ba1af70ed8c3c79477253602a287b59
| 0
|
eayun/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,yingyun001/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,walteryang47/ovirt-engine,OpenUniversity/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine
|
package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.AutoNumaBalanceStatus;
import org.ovirt.engine.core.common.businessentities.CpuStatistics;
import org.ovirt.engine.core.common.businessentities.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.GraphicsInfo;
import org.ovirt.engine.core.common.businessentities.GraphicsType;
import org.ovirt.engine.core.common.businessentities.KdumpStatus;
import org.ovirt.engine.core.common.businessentities.LUNs;
import org.ovirt.engine.core.common.businessentities.NumaNodeStatistics;
import org.ovirt.engine.core.common.businessentities.SessionState;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSDomainsData;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VdsNumaNode;
import org.ovirt.engine.core.common.businessentities.VdsTransparentHugePagesState;
import org.ovirt.engine.core.common.businessentities.VmBalloonInfo;
import org.ovirt.engine.core.common.businessentities.VmBlockJob;
import org.ovirt.engine.core.common.businessentities.VmBlockJobType;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmExitReason;
import org.ovirt.engine.core.common.businessentities.VmExitStatus;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmJobState;
import org.ovirt.engine.core.common.businessentities.VmJobType;
import org.ovirt.engine.core.common.businessentities.VmNumaNode;
import org.ovirt.engine.core.common.businessentities.VmPauseStatus;
import org.ovirt.engine.core.common.businessentities.VmRngDevice;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.Bond;
import org.ovirt.engine.core.common.businessentities.network.InterfaceStatus;
import org.ovirt.engine.core.common.businessentities.network.Network;
import org.ovirt.engine.core.common.businessentities.network.NetworkBootProtocol;
import org.ovirt.engine.core.common.businessentities.network.Nic;
import org.ovirt.engine.core.common.businessentities.network.VdsInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.Vlan;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.utils.EnumUtils;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.utils.SizeConverter;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.utils.NetworkUtils;
import org.ovirt.engine.core.utils.NumaUtils;
import org.ovirt.engine.core.utils.SerializationFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class encapsulate the knowledge of how to create objects from the VDS RPC protocol response.
* This class has methods that receive XmlRpcStruct and construct the following Classes: VmDynamic VdsDynamic VdsStatic.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class VdsBrokerObjectsBuilder {
private static final Logger log = LoggerFactory.getLogger(VdsBrokerObjectsBuilder.class);
private final static int VNC_START_PORT = 5900;
private final static double NANO_SECONDS = 1000000000;
private static final Comparator<VdsNumaNode> numaNodeComparator = new Comparator<VdsNumaNode>() {
@Override
public int compare(VdsNumaNode arg0, VdsNumaNode arg1) {
return arg0.getIndex() < arg1.getIndex() ? -1 : 1;
}
};
public static VmDynamic buildVMDynamicDataFromList(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vmdynamic.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vmdynamic.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
return vmdynamic;
}
public static VmDynamic buildVMDynamicData(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
updateVMDynamicData(vmdynamic, xmlRpcStruct);
return vmdynamic;
}
public static StoragePool buildStoragePool(Map<String, Object> xmlRpcStruct) {
StoragePool sPool = new StoragePool();
if (xmlRpcStruct.containsKey("type")) {
sPool.setIsLocal(StorageType.valueOf(xmlRpcStruct.get("type").toString()).isLocal());
}
sPool.setName(AssignStringValue(xmlRpcStruct, "name"));
Integer masterVersion = AssignIntValue(xmlRpcStruct, "master_ver");
if (masterVersion != null) {
sPool.setMasterDomainVersion(masterVersion);
}
return sPool;
}
public static VmStatistics buildVMStatisticsData(Map<String, Object> xmlRpcStruct) {
VmStatistics vmStatistics = new VmStatistics();
updateVMStatisticsData(vmStatistics, xmlRpcStruct);
return vmStatistics;
}
public static Map<String, LUNs> buildVmLunDisksData(Map<String, Object> xmlRpcStruct) {
Map<String, Object> disks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_disks);
Map<String, LUNs> lunsMap = new HashMap<>();
if (disks != null) {
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
String lunGuidString = AssignStringValue(disk, VdsProperties.lun_guid);
if (!StringUtils.isEmpty(lunGuidString)) {
LUNs lun = new LUNs();
lun.setLUN_id(lunGuidString);
if (disk.containsKey(VdsProperties.disk_true_size)) {
long sizeInBytes = AssignLongValue(disk, VdsProperties.disk_true_size);
int sizeInGB = SizeConverter.convert(
sizeInBytes, SizeConverter.SizeUnit.BYTES, SizeConverter.SizeUnit.GB).intValue();
lun.setDeviceSize(sizeInGB);
}
lunsMap.put(lunGuidString, lun);
}
}
}
return lunsMap;
}
public static void updateVMDynamicData(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.session)) {
String session = (String) xmlRpcStruct.get(VdsProperties.session);
try {
vm.setSession(SessionState.valueOf(session));
} catch (Exception e) {
log.error("Illegal vm session '{}'.", session);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.kvmEnable)) {
vm.setKvmEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.kvmEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.acpiEnable)) {
vm.setAcpiEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.acpiEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.win2kHackEnable)) {
vm.setWin2kHackEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.win2kHackEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vm.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
boolean hasGraphicsInfo = updateGraphicsInfo(vm, xmlRpcStruct);
if (!hasGraphicsInfo) {
updateGraphicsInfoFromConf(vm, xmlRpcStruct);
}
if (xmlRpcStruct.containsKey((VdsProperties.utc_diff))) {
String utc_diff = xmlRpcStruct.get(VdsProperties.utc_diff).toString();
if (utc_diff.startsWith("+")) {
utc_diff = utc_diff.substring(1);
}
try {
vm.setUtcDiff(Integer.parseInt(utc_diff));
} catch (NumberFormatException e) {
log.error("Illegal vm offset (utc_diff) '{}'.", utc_diff);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.hash)) {
String hash = (String) xmlRpcStruct.get(VdsProperties.hash);
try {
vm.setHash(hash);
} catch (Exception e) {
log.error("Illegal vm hash '{}'.", hash);
}
}
/**
* vm disks
*/
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
initDisks(xmlRpcStruct, vm);
}
// ------------- vm internal agent data
vm.setGuestLastLoginTime(AssignDateTImeFromEpoch(xmlRpcStruct, VdsProperties.guest_last_login_time));
vm.setVmHost(AssignStringValue(xmlRpcStruct, VdsProperties.vm_host));
String guestUserName = AssignStringValue(xmlRpcStruct, VdsProperties.guest_cur_user_name);
vm.setGuestCurrentUserName(guestUserName);
initAppsList(xmlRpcStruct, vm);
vm.setGuestOs(AssignStringValue(xmlRpcStruct, VdsProperties.guest_os));
if (xmlRpcStruct.containsKey(VdsProperties.VM_FQDN)) {
vm.setVmFQDN(AssignStringValue(xmlRpcStruct, VdsProperties.VM_FQDN));
String fqdn = vm.getVmFQDN().trim();
if ("localhost".equalsIgnoreCase(fqdn) || "localhost.localdomain".equalsIgnoreCase(fqdn)) {
vm.setVmFQDN(null);
}
else {
vm.setVmFQDN(fqdn);
}
}
vm.setVmIp(AssignStringValue(xmlRpcStruct, VdsProperties.VM_IP));
if (vm.getVmIp() != null) {
if (vm.getVmIp().startsWith("127.0.")) {
vm.setVmIp(null);
} else {
vm.setVmIp(vm.getVmIp().trim());
}
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_code)) {
String exitCodeStr = xmlRpcStruct.get(VdsProperties.exit_code).toString();
vm.setExitStatus(VmExitStatus.forValue(Integer.parseInt(exitCodeStr)));
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_message)) {
String exitMsg = (String) xmlRpcStruct.get(VdsProperties.exit_message);
vm.setExitMessage(exitMsg);
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_reason)) {
String exitReasonStr = xmlRpcStruct.get(VdsProperties.exit_reason).toString();
vm.setExitReason(VmExitReason.forValue(Integer.parseInt(exitReasonStr)));
} else {
vm.setExitReason(VmExitReason.Unknown);
}
// if monitorResponse returns negative it means its erroneous
if (xmlRpcStruct.containsKey(VdsProperties.monitorResponse)) {
int response = Integer.parseInt(xmlRpcStruct.get(VdsProperties.monitorResponse).toString());
if (response < 0) {
vm.setStatus(VMStatus.NotResponding);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.clientIp)) {
vm.setClientIp(xmlRpcStruct.get(VdsProperties.clientIp).toString());
}
VmPauseStatus pauseStatus = VmPauseStatus.NONE;
if (xmlRpcStruct.containsKey(VdsProperties.pauseCode)) {
String pauseCodeStr = (String) xmlRpcStruct.get(VdsProperties.pauseCode);
try {
pauseStatus = VmPauseStatus.valueOf(pauseCodeStr);
} catch (IllegalArgumentException ex) {
log.error("Error in parsing vm pause status. Setting value to NONE");
pauseStatus = VmPauseStatus.NONE;
}
}
vm.setPauseStatus(pauseStatus);
if (xmlRpcStruct.containsKey(VdsProperties.watchdogEvent)) {
Map<String, Object> watchdogStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.watchdogEvent);
double time = Double.parseDouble(watchdogStruct.get(VdsProperties.time).toString());
// vdsm may not send the action http://gerrit.ovirt.org/14134
String action =
watchdogStruct.containsKey(VdsProperties.action) ? watchdogStruct.get(VdsProperties.action)
.toString() : null;
vm.setLastWatchdogEvent((long) time);
vm.setLastWatchdogAction(action);
}
if (xmlRpcStruct.containsKey(VdsProperties.CDRom)) {
String isoName = Paths.get((String) xmlRpcStruct.get(VdsProperties.CDRom)).getFileName().toString();
vm.setCurrentCd(isoName);
}
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_CPU_COUNT)) {
vm.setGuestCpuCount(AssignIntValue(xmlRpcStruct, VdsProperties.GUEST_CPU_COUNT));
}
}
/**
* Updates graphics runtime information according displayInfo VDSM structure if it exists.
*
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
* @return true if displayInfo exists, false otherwise
*/
private static boolean updateGraphicsInfo(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
Object displayInfo = xmlRpcStruct.get(VdsProperties.displayInfo);
if (displayInfo == null) {
return false;
}
for (Object info : (Object[]) displayInfo) {
Map<String, String> infoMap = (Map<String, String>) info;
GraphicsType graphicsType = GraphicsType.fromString(infoMap.get(VdsProperties.type));
GraphicsInfo graphicsInfo = new GraphicsInfo();
graphicsInfo.setIp(infoMap.get(VdsProperties.ipAddress))
.setPort(parseIntegerOrNull(infoMap.get(VdsProperties.port)))
.setTlsPort(parseIntegerOrNull(infoMap.get(VdsProperties.tlsPort)));
if (graphicsInfo.getPort() != null || graphicsInfo.getTlsPort() != null) {
vm.getGraphicsInfos().put(graphicsType, graphicsInfo);
}
}
return true;
}
/**
* Updates graphics runtime information according to vm.conf vdsm structure. It's used with legacy VDSMs that have
* no notion about graphics device.
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
*/
private static void updateGraphicsInfoFromConf(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
GraphicsType vmGraphicsType = parseGraphicsType(xmlRpcStruct);
if (vmGraphicsType == null) {
log.warn("Can't set graphics data from XML.");
return;
}
GraphicsInfo graphicsInfo = new GraphicsInfo();
if (xmlRpcStruct.containsKey(VdsProperties.display_port)) {
try {
graphicsInfo.setPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_port value illegal : {0}", xmlRpcStruct.get(VdsProperties.display_port));
}
} else if (xmlRpcStruct.containsKey(VdsProperties.display)) {
try {
graphicsInfo
.setPort(VNC_START_PORT + Integer.parseInt(xmlRpcStruct.get(VdsProperties.display).toString()));
} catch (NumberFormatException e) {
log.error("vm display value illegal : {0}", xmlRpcStruct.get(VdsProperties.display));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.display_secure_port)) {
try {
graphicsInfo
.setTlsPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_secure_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_secure_port value illegal : {0}",
xmlRpcStruct.get(VdsProperties.display_secure_port));
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayIp))) {
graphicsInfo.setIp((String) xmlRpcStruct.get(VdsProperties.displayIp));
}
vm.getGraphicsInfos().put(vmGraphicsType, graphicsInfo);
}
/**
* Retrieves graphics type from xml.
* @param xmlRpcStruct
* @return
* - graphics type derived from xml on success
* - null on error
*/
private static GraphicsType parseGraphicsType(Map<String, Object> xmlRpcStruct) {
GraphicsType result = null;
try {
String displayTypeStr = xmlRpcStruct.get(VdsProperties.displayType).toString();
switch (displayTypeStr) {
case VdsProperties.VNC:
result = GraphicsType.VNC;
break;
case VdsProperties.QXL:
result = GraphicsType.SPICE;
break;
}
} catch (Exception e) {
}
return result;
}
private static Integer parseIntegerOrNull(String s) {
try {
return Integer.parseInt(s);
} catch (Exception e) {
return null;
}
}
public static void updateVMStatisticsData(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
vm.setelapsed_time(AssignDoubleValue(xmlRpcStruct, VdsProperties.elapsed_time));
// ------------- vm network statistics -----------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK)) {
Map networkStruct = (Map) xmlRpcStruct.get(VdsProperties.VM_NETWORK);
vm.setInterfaceStatistics(new ArrayList<VmNetworkInterface>());
for (Object tempNic : networkStruct.values()) {
Map nic = (Map) tempNic;
VmNetworkInterface stats = new VmNetworkInterface();
vm.getInterfaceStatistics().add(stats);
if (nic.containsKey(VdsProperties.VM_INTERFACE_NAME)) {
stats.setName((String) ((nic.get(VdsProperties.VM_INTERFACE_NAME) instanceof String) ? nic
.get(VdsProperties.VM_INTERFACE_NAME) : null));
}
Double rx_rate = AssignDoubleValue(nic, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(nic, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(nic, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(nic, VdsProperties.tx_dropped);
stats.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
stats.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
stats.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
stats.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
stats.setMacAddress((String) ((nic.get(VdsProperties.MAC_ADDR) instanceof String) ? nic
.get(VdsProperties.MAC_ADDR) : null));
stats.setSpeed(AssignIntValue(nic, VdsProperties.INTERFACE_SPEED));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_DISKS_USAGE)) {
initDisksUsage(xmlRpcStruct, vm);
}
// ------------- vm cpu statistics -----------------------
vm.setcpu_sys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vm.setcpu_user(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
// ------------- vm memory statistics -----------------------
vm.setusage_mem_percent(AssignIntValue(xmlRpcStruct, VdsProperties.vm_usage_mem_percent));
vm.setVmBalloonInfo(getBalloonInfo(xmlRpcStruct));
// ------------- vm migration statistics -----------------------
Integer migrationProgress = AssignIntValue(xmlRpcStruct, VdsProperties.vm_migration_progress_percent);
vm.setMigrationProgressPercent(migrationProgress != null ? migrationProgress : 0);
// ------------- vm jobs -------------
vm.setVmJobs(getVmJobs(vm.getId(), xmlRpcStruct));
// ------------- vm numa nodes runtime info -------------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NUMA_NODES_RUNTIME_INFO)) {
updateVmNumaNodesRuntimeInfo(vm, xmlRpcStruct);
}
}
private static VmBalloonInfo getBalloonInfo(Map<String, Object> xmlRpcStruct) {
Map<String, Object> balloonInfo = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_balloonInfo);
VmBalloonInfo vmBalloonInfo = new VmBalloonInfo();
if (balloonInfo != null && balloonInfo.size() > 0) {
vmBalloonInfo.setCurrentMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_cur));
vmBalloonInfo.setBalloonMaxMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_max));
vmBalloonInfo.setBalloonTargetMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_target));
vmBalloonInfo.setBalloonMinMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_min));
if (balloonInfo.size() >= 4) { // only if all 4 properties are found the balloon is considered enabled (available from 3.3)
vmBalloonInfo.setBalloonDeviceEnabled(true);
}
} else {
vmBalloonInfo.setBalloonDeviceEnabled(false);
}
return vmBalloonInfo;
}
private static List<VmJob> getVmJobs(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vmJobs)) {
return null;
}
List<VmJob> vmJobs = new ArrayList<VmJob>();
for (Object jobMap : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.vmJobs)).values()) {
VmJob job = buildVmJobData(vmId, (Map<String, Object>) jobMap);
vmJobs.add(job);
}
return vmJobs;
}
private static VmJob buildVmJobData(Guid vmId, Map<String, Object> xmlRpcStruct) {
VmJob ret;
VmJobType jobType = VmJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobType));
if (jobType == null) {
jobType = VmJobType.UNKNOWN;
}
switch (jobType) {
case BLOCK:
VmBlockJob blockJob = new VmBlockJob();
blockJob.setBlockJobType(VmBlockJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmBlockJobType)));
blockJob.setCursorCur(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorCur));
blockJob.setCursorEnd(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorEnd));
blockJob.setBandwidth(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobBandwidth));
blockJob.setImageGroupId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobImageUUID)));
ret = blockJob;
break;
default:
ret = new VmJob();
break;
}
ret.setVmId(vmId);
ret.setId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobId)));
ret.setJobState(VmJobState.NORMAL);
ret.setJobType(jobType);
return ret;
}
public static void updateVDSDynamicData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setSupportedClusterLevels(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_cluster_levels));
updateNetworkData(vds, xmlRpcStruct);
updateNumaNodesData(vds, xmlRpcStruct);
vds.setCpuThreads(AssignIntValue(xmlRpcStruct, VdsProperties.cpuThreads));
vds.setCpuCores(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_cores));
vds.setCpuSockets(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_sockets));
vds.setCpuModel(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_model));
vds.setOnlineCpus(AssignStringValue(xmlRpcStruct, VdsProperties.online_cpus));
vds.setCpuSpeedMh(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_speed_mh));
vds.setPhysicalMemMb(AssignIntValue(xmlRpcStruct, VdsProperties.physical_mem_mb));
vds.setKvmEnabled(AssignBoolValue(xmlRpcStruct, VdsProperties.kvm_enabled));
vds.setReservedMem(AssignIntValue(xmlRpcStruct, VdsProperties.reservedMem));
Integer guestOverhead = AssignIntValue(xmlRpcStruct, VdsProperties.guestOverhead);
vds.setGuestOverhead(guestOverhead != null ? guestOverhead : 0);
vds.setCpuFlags(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_flags));
UpdatePackagesVersions(vds, xmlRpcStruct);
vds.setSupportedEngines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_engines));
vds.setIScsiInitiatorName(AssignStringValue(xmlRpcStruct, VdsProperties.iSCSIInitiatorName));
vds.setSupportedEmulatedMachines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.emulatedMachines));
setRngSupportedSourcesToVds(vds, xmlRpcStruct);
String hooksStr = ""; // default value if hooks is not in the xml rpc struct
if (xmlRpcStruct.containsKey(VdsProperties.hooks)) {
hooksStr = xmlRpcStruct.get(VdsProperties.hooks).toString();
}
vds.setHooksStr(hooksStr);
// parse out the HBAs available in this host
Map<String, List<Map<String, String>>> hbas = new HashMap<>();
for (Map.Entry<String, Object[]> el: ((Map<String, Object[]>)xmlRpcStruct.get(VdsProperties.HBAInventory)).entrySet()) {
List<Map<String, String>> devicesList = new ArrayList<Map<String, String>>();
for (Object device: el.getValue()) {
devicesList.add((Map<String, String>)device);
}
hbas.put(el.getKey(), devicesList);
}
vds.setHBAs(hbas);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
vds.setKdumpStatus(KdumpStatus.valueOfNumber(AssignIntValue(xmlRpcStruct, VdsProperties.KDUMP_STATUS)));
Map<String, Object> selinux = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.selinux);
if (selinux != null) {
vds.setSELinuxEnforceMode(AssignIntValue(selinux, VdsProperties.selinux_mode));
} else {
vds.setSELinuxEnforceMode(null);
}
if (xmlRpcStruct.containsKey(VdsProperties.liveSnapshotSupport)) {
vds.setLiveSnapshotSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveSnapshotSupport));
} else {
vds.setLiveSnapshotSupport(true); // for backward compatibility's sake
}
if (xmlRpcStruct.containsKey(VdsProperties.liveMergeSupport)) {
vds.setLiveMergeSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveMergeSupport));
} else {
vds.setLiveMergeSupport(false);
}
}
private static void setRngSupportedSourcesToVds(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.getSupportedRngSources().clear();
String rngSourcesFromStruct = AssignStringValueFromArray(xmlRpcStruct, VdsProperties.rngSources);
if (rngSourcesFromStruct != null) {
vds.getSupportedRngSources().addAll(VmRngDevice.csvToSourcesSet(rngSourcesFromStruct.toUpperCase()));
}
}
public static void checkTimeDrift(VDS vds, Map<String, Object> xmlRpcStruct) {
Boolean isHostTimeDriftEnabled = Config.getValue(ConfigValues.EnableHostTimeDrift);
if (isHostTimeDriftEnabled) {
Integer maxTimeDriftAllowed = Config.getValue(ConfigValues.HostTimeDriftInSec);
Date hostDate = AssignDatetimeValue(xmlRpcStruct, VdsProperties.hostDatetime);
if (hostDate != null) {
Long timeDrift =
TimeUnit.MILLISECONDS.toSeconds(Math.abs(hostDate.getTime() - System.currentTimeMillis()));
if (timeDrift > maxTimeDriftAllowed) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("Actual", timeDrift.toString());
logable.addCustomValue("Max", maxTimeDriftAllowed.toString());
AuditLogDirector.log(logable, AuditLogType.VDS_TIME_DRIFT_ALERT);
}
} else {
log.error("Time Drift validation: failed to get Host or Engine time.");
}
}
}
private static void initDisksUsage(Map<String, Object> vmStruct, VmStatistics vm) {
Object[] vmDisksUsage = (Object[]) vmStruct.get(VdsProperties.VM_DISKS_USAGE);
if (vmDisksUsage != null) {
ArrayList<Object> disksUsageList = new ArrayList<Object>(Arrays.asList(vmDisksUsage));
vm.setDisksUsage(SerializationFactory.getSerializer().serializeUnformattedJson(disksUsageList));
}
}
private static void UpdatePackagesVersions(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setVersionName(AssignStringValue(xmlRpcStruct, VdsProperties.version_name));
vds.setSoftwareVersion(AssignStringValue(xmlRpcStruct, VdsProperties.software_version));
vds.setBuildName(AssignStringValue(xmlRpcStruct, VdsProperties.build_name));
if (xmlRpcStruct.containsKey(VdsProperties.host_os)) {
vds.setHostOs(GetPackageVersionFormated(
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.host_os), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.packages)) {
// packages is an array of xmlRpcStruct (that each is a name, ver,
// release.. of a package)
for (Object hostPackageMap : (Object[]) xmlRpcStruct.get(VdsProperties.packages)) {
Map<String, Object> hostPackage = (Map<String, Object>) hostPackageMap;
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
if (VdsProperties.kvmPackageName.equals(packageName)) {
vds.setKvmVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.spicePackageName.equals(packageName)) {
vds.setSpiceVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.kernelPackageName.equals(packageName)) {
vds.setKernelVersion(GetPackageVersionFormated(hostPackage, false));
}
}
} else if (xmlRpcStruct.containsKey(VdsProperties.packages2)) {
Map<String, Object> packages = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.packages2);
if (packages.containsKey(VdsProperties.vdsmPackageName)) {
Map<String, Object> vdsm = (Map<String, Object>) packages.get(VdsProperties.vdsmPackageName);
vds.setVersion(getPackageRpmVersion("vdsm", vdsm));
}
if (packages.containsKey(VdsProperties.qemuKvmPackageName)) {
Map<String, Object> kvm = (Map<String, Object>) packages.get(VdsProperties.qemuKvmPackageName);
vds.setKvmVersion(getPackageVersionFormated2(kvm));
}
if (packages.containsKey(VdsProperties.libvirtPackageName)) {
Map<String, Object> libvirt = (Map<String, Object>) packages.get(VdsProperties.libvirtPackageName);
vds.setLibvirtVersion(getPackageRpmVersion("libvirt", libvirt));
}
if (packages.containsKey(VdsProperties.spiceServerPackageName)) {
Map<String, Object> spice = (Map<String, Object>) packages.get(VdsProperties.spiceServerPackageName);
vds.setSpiceVersion(getPackageVersionFormated2(spice));
}
if (packages.containsKey(VdsProperties.kernelPackageName)) {
Map<String, Object> kernel = (Map<String, Object>) packages.get(VdsProperties.kernelPackageName);
vds.setKernelVersion(getPackageVersionFormated2(kernel));
}
if (packages.containsKey(VdsProperties.GLUSTER_PACKAGE_NAME)) {
Map<String, Object> gluster = (Map<String, Object>) packages.get(VdsProperties.GLUSTER_PACKAGE_NAME);
vds.setGlusterVersion(getPackageRpmVersion("glusterfs", gluster));
}
}
}
// Version 2 of GetPackageVersionFormated2:
// from 2.3 we get dictionary and not a flat list.
// from now the packages names (of spice, kernel, qemu and libvirt) are the same as far as VDSM and ENGINE.
// (VDSM use to report packages name of rpm so in RHEL6 when it change it broke our interface)
private static String getPackageVersionFormated2(Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageVersion)) {
sb.append(packageVersion);
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
private static RpmVersion getPackageRpmVersion(String packageName, Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName)) {
sb.append(packageName);
}
boolean hasPackageVersion = StringUtils.isEmpty(packageVersion);
boolean hasPackageRelease = StringUtils.isEmpty(packageRelease);
if (!hasPackageVersion || !hasPackageRelease) {
sb.append("-");
}
if (!hasPackageVersion) {
sb.append(packageVersion);
}
if (!hasPackageRelease) {
if (sb.length() > 0) {
sb.append(String.format("-%1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return new RpmVersion(sb.toString());
}
public static void updateHardwareSystemInformation(Map<String, Object> hwInfo, VDS vds){
vds.setHardwareManufacturer(AssignStringValue(hwInfo, VdsProperties.hwManufacturer));
vds.setHardwareProductName(AssignStringValue(hwInfo, VdsProperties.hwProductName));
vds.setHardwareVersion(AssignStringValue(hwInfo, VdsProperties.hwVersion));
vds.setHardwareSerialNumber(AssignStringValue(hwInfo, VdsProperties.hwSerialNumber));
vds.setHardwareUUID(AssignStringValue(hwInfo, VdsProperties.hwUUID));
vds.setHardwareFamily(AssignStringValue(hwInfo, VdsProperties.hwFamily));
}
private static String GetPackageVersionFormated(Map<String, Object> hostPackage, boolean getName) {
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
String packageVersion = AssignStringValue(hostPackage, VdsProperties.package_version);
String packageRelease = AssignStringValue(hostPackage, VdsProperties.package_release);
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName) && getName) {
sb.append(packageName);
}
if (!StringUtils.isEmpty(packageVersion)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageVersion));
} else {
sb.append(packageVersion);
}
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
public static void updateVDSStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
// ------------- vds memory usage ---------------------------
vds.setUsageMemPercent(AssignIntValue(xmlRpcStruct, VdsProperties.mem_usage));
// ------------- vds network statistics ---------------------
Map<String, Object> interfaces = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK);
if (interfaces != null) {
int networkUsage = 0;
Map<String, VdsNetworkInterface> nicsByName = Entities.entitiesByName(vds.getInterfaces());
for (Entry<String, Object> entry : interfaces.entrySet()) {
if (nicsByName.containsKey(entry.getKey())) {
VdsNetworkInterface iface = nicsByName.get(entry.getKey());
iface.setVdsId(vds.getId());
Map<String, Object> dict = (Map<String, Object>) entry.getValue();
Double rx_rate = AssignDoubleValue(dict, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(dict, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(dict, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(dict, VdsProperties.tx_dropped);
iface.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
iface.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
iface.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
iface.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
iface.setSpeed(AssignIntValue(dict, VdsProperties.INTERFACE_SPEED));
iface.getStatistics().setStatus(AssignInterfaceStatusValue(dict, VdsProperties.iface_status));
if (!NetworkUtils.isVlan(iface) && !iface.isBondSlave()) {
networkUsage = (int) Math.max(networkUsage, computeInterfaceUsage(iface));
}
}
}
vds.setUsageNetworkPercent(networkUsage);
}
// ----------- vds cpu statistics info ---------------------
vds.setCpuSys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vds.setCpuUser(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
if (vds.getCpuSys() != null && vds.getCpuUser() != null) {
vds.setUsageCpuPercent((int) (vds.getCpuSys() + vds.getCpuUser()));
}
// CPU load reported by VDSM is in uptime-style format, i.e. normalized
// to unity, so that say an 8% load is reported as 0.08
Double d = AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_load);
d = (d != null) ? d : 0;
vds.setCpuLoad(d.doubleValue() * 100.0);
vds.setCpuIdle(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_idle));
vds.setMemAvailable(AssignLongValue(xmlRpcStruct, VdsProperties.mem_available));
vds.setMemFree(AssignLongValue(xmlRpcStruct, VdsProperties.memFree));
vds.setMemShared(AssignLongValue(xmlRpcStruct, VdsProperties.mem_shared));
vds.setSwapFree(AssignLongValue(xmlRpcStruct, VdsProperties.swap_free));
vds.setSwapTotal(AssignLongValue(xmlRpcStruct, VdsProperties.swap_total));
vds.setKsmCpuPercent(AssignIntValue(xmlRpcStruct, VdsProperties.ksm_cpu_percent));
vds.setKsmPages(AssignLongValue(xmlRpcStruct, VdsProperties.ksm_pages));
vds.setKsmState(AssignBoolValue(xmlRpcStruct, VdsProperties.ksm_state));
// dynamic data got from GetVdsStats
if (xmlRpcStruct.containsKey(VdsProperties.transparent_huge_pages_state)) {
vds.setTransparentHugePagesState(EnumUtils.valueOf(VdsTransparentHugePagesState.class, xmlRpcStruct
.get(VdsProperties.transparent_huge_pages_state).toString(), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.anonymous_transparent_huge_pages)) {
vds.setAnonymousHugePages(AssignIntValue(xmlRpcStruct, VdsProperties.anonymous_transparent_huge_pages));
}
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
vds.setImagesLastCheck(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_check));
vds.setImagesLastDelay(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_delay));
Integer vm_count = AssignIntValue(xmlRpcStruct, VdsProperties.vm_count);
vds.setVmCount(vm_count == null ? 0 : vm_count);
vds.setVmActive(AssignIntValue(xmlRpcStruct, VdsProperties.vm_active));
vds.setVmMigrating(AssignIntValue(xmlRpcStruct, VdsProperties.vm_migrating));
updateVDSDomainData(vds, xmlRpcStruct);
updateLocalDisksUsage(vds, xmlRpcStruct);
// hosted engine
Integer haScore = null;
Boolean haIsConfigured = null;
Boolean haIsActive = null;
Boolean haGlobalMaint = null;
Boolean haLocalMaint = null;
if (xmlRpcStruct.containsKey(VdsProperties.ha_stats)) {
Map<String, Object> haStats = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.ha_stats);
if (haStats != null) {
haScore = AssignIntValue(haStats, VdsProperties.ha_stats_score);
haIsConfigured = AssignBoolValue(haStats, VdsProperties.ha_stats_is_configured);
haIsActive = AssignBoolValue(haStats, VdsProperties.ha_stats_is_active);
haGlobalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_global_maintenance);
haLocalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_local_maintenance);
}
} else {
haScore = AssignIntValue(xmlRpcStruct, VdsProperties.ha_score);
// prior to 3.4, haScore was returned if ha was installed; assume active if > 0
if (haScore != null) {
haIsConfigured = true;
haIsActive = (haScore > 0);
}
}
vds.setHighlyAvailableScore(haScore != null ? haScore : 0);
vds.setHighlyAvailableIsConfigured(haIsConfigured != null ? haIsConfigured : false);
vds.setHighlyAvailableIsActive(haIsActive != null ? haIsActive : false);
vds.setHighlyAvailableGlobalMaintenance(haGlobalMaint != null ? haGlobalMaint : false);
vds.setHighlyAvailableLocalMaintenance(haLocalMaint != null ? haLocalMaint : false);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
updateNumaStatisticsData(vds, xmlRpcStruct);
}
private static double computeInterfaceUsage(VdsNetworkInterface iface) {
return Math.max(truncatePercentage(iface.getStatistics().getReceiveRate()),
truncatePercentage(iface.getStatistics().getTransmitRate()));
}
private static double truncatePercentage(double value) {
return Math.min(100, value);
}
public static void updateNumaStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNumaNode> vdsNumaNodes = new ArrayList<>();
List<CpuStatistics> cpuStatsData = new ArrayList<>();
if (xmlRpcStruct.containsKey(VdsProperties.CPU_STATS)) {
Map<String, Map<String, Object>> cpuStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.CPU_STATS);
Map<Integer, List<CpuStatistics>> numaNodeCpuStats = new HashMap<>();
for (Map.Entry<String, Map<String, Object>> item : cpuStats.entrySet()) {
CpuStatistics data = buildVdsCpuStatistics(item);
cpuStatsData.add(data);
int numaNodeIndex = AssignIntValue(item.getValue(), VdsProperties.NUMA_NODE_INDEX);
if (!numaNodeCpuStats.containsKey(numaNodeIndex)) {
numaNodeCpuStats.put(numaNodeIndex, new ArrayList<CpuStatistics>());
}
numaNodeCpuStats.get(numaNodeIndex).add(data);
}
DecimalFormat percentageFormatter = new DecimalFormat("#.##");
for (Map.Entry<Integer, List<CpuStatistics>> item : numaNodeCpuStats.entrySet()) {
VdsNumaNode node = buildVdsNumaNodeStatistics(percentageFormatter, item);
vdsNumaNodes.add(node);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODE_FREE_MEM_STAT)) {
Map<String, Map<String, Object>> memStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.NUMA_NODE_FREE_MEM_STAT);
for (Map.Entry<String, Map<String, Object>> item : memStats.entrySet()) {
VdsNumaNode node = NumaUtils.getVdsNumaNodeByIndex(vdsNumaNodes, Integer.valueOf(item.getKey()));
if (node != null) {
node.getNumaNodeStatistics().setMemFree(AssignLongValue(item.getValue(),
VdsProperties.NUMA_NODE_FREE_MEM));
node.getNumaNodeStatistics().setMemUsagePercent(AssignIntValue(item.getValue(),
VdsProperties.NUMA_NODE_MEM_PERCENT));
}
}
}
vds.getNumaNodeList().clear();
vds.getNumaNodeList().addAll(vdsNumaNodes);
vds.getStatisticsData().getCpuCoreStatistics().clear();
vds.getStatisticsData().getCpuCoreStatistics().addAll(cpuStatsData);
}
private static VdsNumaNode buildVdsNumaNodeStatistics(DecimalFormat percentageFormatter,
Map.Entry<Integer, List<CpuStatistics>> item) {
VdsNumaNode node = new VdsNumaNode();
NumaNodeStatistics nodeStat = new NumaNodeStatistics();
double nodeCpuUser = 0.0;
double nodeCpuSys = 0.0;
double nodeCpuIdle = 0.0;
for (CpuStatistics cpuStat : item.getValue()) {
nodeCpuUser += cpuStat.getCpuUser();
nodeCpuSys += cpuStat.getCpuSys();
nodeCpuIdle += cpuStat.getCpuIdle();
}
nodeStat.setCpuUser(Double.valueOf(percentageFormatter.format(nodeCpuUser / item.getValue().size())));
nodeStat.setCpuSys(Double.valueOf(percentageFormatter.format(nodeCpuSys / item.getValue().size())));
nodeStat.setCpuIdle(Double.valueOf(percentageFormatter.format(nodeCpuIdle / item.getValue().size())));
nodeStat.setCpuUsagePercent((int) (nodeStat.getCpuSys() + nodeStat.getCpuUser()));
node.setIndex(item.getKey());
node.setNumaNodeStatistics(nodeStat);
return node;
}
private static CpuStatistics buildVdsCpuStatistics(Map.Entry<String, Map<String, Object>> item) {
CpuStatistics data = new CpuStatistics();
data.setCpuId(Integer.valueOf(item.getKey()));
data.setCpuUser(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_USER));
data.setCpuSys(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_SYS));
data.setCpuIdle(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_IDLE));
data.setCpuUsagePercent((int) (data.getCpuSys() + data.getCpuUser()));
return data;
}
/**
* Update {@link VDS#setLocalDisksUsage(Map)} with map of paths usage extracted from the returned returned value. The
* usage is reported in MB.
*
* @param vds
* The VDS object to update.
* @param xmlRpcStruct
* The XML/RPC to extract the usage from.
*/
protected static void updateLocalDisksUsage(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.DISK_STATS)) {
Map<String, Object> diskStatsStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.DISK_STATS);
Map<String, Long> diskStats = new HashMap<String, Long>();
vds.setLocalDisksUsage(diskStats);
for (Entry<String, Object> entry : diskStatsStruct.entrySet()) {
Map<String, Object> pathStatsStruct = (Map<String, Object>) entry.getValue();
diskStats.put(entry.getKey(), AssignLongValue(pathStatsStruct, VdsProperties.DISK_STATS_FREE));
}
}
}
private static void updateVDSDomainData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.domains)) {
Map<String, Object> domains = (Map<String, Object>)
xmlRpcStruct.get(VdsProperties.domains);
ArrayList<VDSDomainsData> domainsData = new ArrayList<VDSDomainsData>();
for (Map.Entry<String, ?> value : domains.entrySet()) {
try {
VDSDomainsData data = new VDSDomainsData();
data.setDomainId(new Guid(value.getKey().toString()));
Map<String, Object> internalValue = (Map<String, Object>) value.getValue();
double lastCheck = 0;
data.setCode((Integer) (internalValue).get(VdsProperties.code));
if (internalValue.containsKey(VdsProperties.lastCheck)) {
lastCheck = Double.parseDouble((String) internalValue.get(VdsProperties.lastCheck));
}
data.setLastCheck(lastCheck);
double delay = 0;
if (internalValue.containsKey(VdsProperties.delay)) {
delay = Double.parseDouble((String) internalValue.get(VdsProperties.delay));
}
data.setDelay(delay);
domainsData.add(data);
} catch (Exception e) {
log.error("failed building domains: {}", e.getMessage());
log.debug("Exception", e);
}
}
vds.setDomains(domainsData);
}
}
private static InterfaceStatus AssignInterfaceStatusValue(Map<String, Object> input, String name) {
InterfaceStatus ifaceStatus = InterfaceStatus.NONE;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) {
if (stringValue.toLowerCase().trim().equals("up")) {
ifaceStatus = InterfaceStatus.UP;
} else {
ifaceStatus = InterfaceStatus.DOWN;
}
}
}
return ifaceStatus;
}
private static Double AssignDoubleValue(Map<String, Object> input, String name) {
Double returnValue = null;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
returnValue = (stringValue == null) ? null : Double.parseDouble(stringValue);
}
return returnValue;
}
/**
* Do the same logic as AssignDoubleValue does, but instead, in case of null we return 0.
* @param input - the Input xml
* @param name - The name of the field we want to cast it to double.
* @return - the double value.
*/
private static Double assignDoubleValueWithNullProtection(Map<String, Object> input, String name) {
Double doubleValue = AssignDoubleValue(input, name);
return (doubleValue == null ? Double.valueOf(0.0) : doubleValue);
}
private static Integer AssignIntValue(Map input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Integer) {
return (Integer) input.get(name);
}
String stringValue = (String) input.get(name);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
int intValue = Integer.parseInt(stringValue);
return intValue;
} catch (NumberFormatException nfe) {
log.error("Failed to parse '{}' value '{}' to integer: {}", name, stringValue, nfe.getMessage());
}
}
return null;
}
private static Long AssignLongValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Long || input.get(name) instanceof Integer) {
return Long.parseLong(input.get(name).toString());
}
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
return Long.parseLong(stringValue);
} catch (NumberFormatException e) {
log.error("Failed to parse '{}' value '{}' to long: {}", name, stringValue, e.getMessage());
}
}
return null;
}
private static String AssignStringValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
return (String) ((input.get(name) instanceof String) ? input.get(name) : null);
}
return null;
}
private static String AssignStringValueFromArray(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
String[] arr = (String[]) ((input.get(name) instanceof String[]) ? input.get(name) : null);
if (arr == null) {
Object[] arr2 = (Object[]) ((input.get(name) instanceof Object[]) ? input.get(name) : null);
if (arr2 != null) {
arr = new String[arr2.length];
for (int i = 0; i < arr2.length; i++)
arr[i] = arr2[i].toString();
}
}
if (arr != null) {
return StringUtils.join(arr, ',');
}
}
return null;
}
private static Date AssignDateTImeFromEpoch(Map<String, Object> input, String name) {
Date retval = null;
try {
if (input.containsKey(name)) {
Double secsSinceEpoch = (Double) input.get(name);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(secsSinceEpoch.longValue());
retval = calendar.getTime();
}
} catch (RuntimeException ex) {
log.warn("VdsBroker::AssignDateTImeFromEpoch - failed to convert field '{}' to dateTime: {}",
name, ex.getMessage());
log.debug("Exception", ex);
retval = null;
}
return retval;
}
private static Date AssignDatetimeValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Date) {
return (Date) input.get(name);
}
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
try {
String dateStr = input.get(name).toString().replaceFirst("T", " ").trim();
return formatter.parse(dateStr);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
private static Boolean AssignBoolValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Boolean) {
return (Boolean) input.get(name);
}
return Boolean.parseBoolean(input.get(name).toString());
}
return Boolean.FALSE;
}
private static void initDisks(Map<String, Object> vmStruct, VmDynamic vm) {
Map<String, Object> disks = (Map<String, Object>) vmStruct.get(VdsProperties.vm_disks);
ArrayList<DiskImageDynamic> disksData = new ArrayList<DiskImageDynamic>();
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
DiskImageDynamic diskData = new DiskImageDynamic();
String imageGroupIdString = AssignStringValue(disk, VdsProperties.image_group_id);
if (!StringUtils.isEmpty(imageGroupIdString)) {
Guid imageGroupIdGuid = new Guid(imageGroupIdString);
diskData.setId(imageGroupIdGuid);
diskData.setread_rate(AssignIntValue(disk, VdsProperties.vm_disk_read_rate));
diskData.setwrite_rate(AssignIntValue(disk, VdsProperties.vm_disk_write_rate));
if (disk.containsKey(VdsProperties.disk_actual_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_actual_size);
diskData.setactual_size(size != null ? size * 512 : 0);
} else if (disk.containsKey(VdsProperties.disk_true_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_true_size);
diskData.setactual_size(size != null ? size : 0);
}
if (disk.containsKey(VdsProperties.vm_disk_read_latency)) {
diskData.setReadLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_read_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_write_latency)) {
diskData.setWriteLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_write_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_flush_latency)) {
diskData.setFlushLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_flush_latency) / NANO_SECONDS);
}
disksData.add(diskData);
}
}
vm.setDisks(disksData);
}
private static void initAppsList(Map<String, Object> vmStruct, VmDynamic vm) {
if (vmStruct.containsKey(VdsProperties.app_list)) {
Object tempAppsList = vmStruct.get(VdsProperties.app_list);
if (tempAppsList instanceof Object[]) {
Object[] apps = (Object[]) tempAppsList;
StringBuilder builder = new StringBuilder();
boolean firstTime = true;
for (Object app : apps) {
String appString = (String) ((app instanceof String) ? app : null);
if (app == null) {
log.warn("Failed to convert app: [null] to string");
}
if (!firstTime) {
builder.append(",");
} else {
firstTime = false;
}
builder.append(appString);
}
vm.setAppList(builder.toString());
} else {
vm.setAppList("");
}
}
}
private static VMStatus convertToVmStatus(String statusName) {
VMStatus status = VMStatus.Unassigned;
// TODO: The following condition should deleted as soon as we drop compatibility with 3.3 since "Running" state
// will be replaced "Up" state and "Unknown" will exist no more. The "Up" state will be processed by
// EnumUtils as other states below.
if ("Running".equals(statusName) || "Unknown".equals(statusName)) {
status = VMStatus.Up;
}
else if ("Migration Source".equals(statusName)) {
status = VMStatus.MigratingFrom;
}
else if ("Migration Destination".equals(statusName)) {
status = VMStatus.MigratingTo;
} else {
try {
statusName = statusName.replace(" ", "");
status = EnumUtils.valueOf(VMStatus.class, statusName, true);
} catch (Exception e) {
log.error("Illegal Vm status: '{}'.", statusName);
}
}
return status;
}
/**
* Updates the host network data with the network data reported by the host
*
* @param vds
* The host to update
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
public static void updateNetworkData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setActiveNic(AssignStringValue(xmlRpcStruct, VdsProperties.NETWORK_LAST_CLIENT_INTERFACE));
List<VdsNetworkInterface> oldInterfaces =
DbFacade.getInstance().getInterfaceDao().getAllInterfacesForVds(vds.getId());
vds.getInterfaces().clear();
addHostNetworkInterfaces(vds, xmlRpcStruct);
addHostVlanDevices(vds, xmlRpcStruct);
addHostBondDevices(vds, xmlRpcStruct);
addHostNetworksAndUpdateInterfaces(vds, xmlRpcStruct);
// set bonding options
setBondingOptions(vds, oldInterfaces);
// This information was added in 3.1, so don't use it if it's not there.
if (xmlRpcStruct.containsKey(VdsProperties.netConfigDirty)) {
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
}
}
private static void addHostNetworksAndUpdateInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct) {
// Networks collection (name point to list of nics or bonds)
Map<String, Object> networks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORKS);
if (networks != null) {
vds.getNetworks().clear();
for (Entry<String, Object> entry : networks.entrySet()) {
Map<String, Object> network = (Map<String, Object>) entry.getValue();
if (network != null) {
Network net = createNetworkData(entry.getKey(), network);
List<VdsNetworkInterface> interfaces = findNetworkInterfaces(vds, xmlRpcStruct, network);
for (VdsNetworkInterface iface : interfaces) {
updateNetworkDetailsInInterface(iface,
network,
vds,
net);
}
vds.getNetworks().add(net);
reportInvalidInterfacesForNetwork(interfaces, net, vds);
}
}
}
}
/**
* Reports a warning to the audit log if a bridge is connected to more than one interface which is considered bad
* configuration.
*
* @param interfaces
* The network's interfaces
* @param network
* The network to report for
* @param vds
* The host in which the network is defined
*/
private static void reportInvalidInterfacesForNetwork(List<VdsNetworkInterface> interfaces, Network network, VDS vds) {
if (interfaces.isEmpty()) {
AuditLogDirector.log(createHostNetworkAuditLog(network, vds), AuditLogType.NETWORK_WITHOUT_INTERFACES);
} else if (interfaces.size() > 1) {
AuditLogableBase logable = createHostNetworkAuditLog(network, vds);
logable.addCustomValue("Interfaces", StringUtils.join(Entities.objectNames(interfaces), ","));
AuditLogDirector.log(logable, AuditLogType.BRIDGED_NETWORK_OVER_MULTIPLE_INTERFACES);
}
}
protected static AuditLogableBase createHostNetworkAuditLog(Network network, VDS vds) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("NetworkName", network.getName());
return logable;
}
private static List<VdsNetworkInterface> findNetworkInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct,
Map<String, Object> network) {
Map<String, VdsNetworkInterface> vdsInterfaces = Entities.entitiesByName(vds.getInterfaces());
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
if (FeatureSupported.bridgesReportByVdsm(vds.getVdsGroupCompatibilityVersion())) {
VdsNetworkInterface iface = null;
String interfaceName = (String) network.get(VdsProperties.INTERFACE);
if (interfaceName != null) {
iface = vdsInterfaces.get(interfaceName);
if (iface == null) {
Map<String, Object> bridges =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BRIDGES);
if (bridges != null && bridges.containsKey(interfaceName)) {
interfaces.addAll(findBridgedNetworkInterfaces((Map<String, Object>) bridges.get(interfaceName),
vdsInterfaces));
}
} else {
interfaces.add(iface);
}
}
} else {
interfaces.addAll(findBridgedNetworkInterfaces(network, vdsInterfaces));
}
return interfaces;
}
private static Network createNetworkData(String networkName, Map<String, Object> network) {
Network net = new Network();
net.setName(networkName);
net.setAddr((String) network.get("addr"));
net.setSubnet((String) network.get("netmask"));
net.setGateway((String) network.get(VdsProperties.GLOBAL_GATEWAY));
if (StringUtils.isNotBlank((String) network.get(VdsProperties.MTU))) {
net.setMtu(Integer.parseInt((String) network.get(VdsProperties.MTU)));
}
return net;
}
private static List<VdsNetworkInterface> findBridgedNetworkInterfaces(Map<String, Object> bridge,
Map<String, VdsNetworkInterface> vdsInterfaces) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
Object[] ports = (Object[]) bridge.get("ports");
if (ports != null) {
for (Object port : ports) {
if (vdsInterfaces.containsKey(port.toString())) {
interfaces.add(vdsInterfaces.get(port.toString()));
}
}
}
return interfaces;
}
private static void addHostBondDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> bonds = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BONDINGS);
if (bonds != null) {
for (Entry<String, Object> entry : bonds.entrySet()) {
VdsNetworkInterface iface = new Bond();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iStats.setVdsId(vds.getId());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
iface.setBonded(true);
Map<String, Object> bond = (Map<String, Object>) entry.getValue();
if (bond != null) {
iface.setMacAddress((String) bond.get("hwaddr"));
iface.setAddress((String) bond.get("addr"));
iface.setSubnet((String) bond.get("netmask"));
if (bond.get("slaves") != null) {
addBondDeviceToHost(vds, iface, (Object[]) bond.get("slaves"));
}
if (StringUtils.isNotBlank((String) bond.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) bond.get(VdsProperties.MTU)));
}
Map<String, Object> config =
(Map<String, Object>) bond.get("cfg");
if (config != null && config.get("BONDING_OPTS") != null) {
iface.setBondOptions(config.get("BONDING_OPTS").toString());
}
addBootProtocol(config, vds, iface);
}
}
}
}
/**
* Updates the host interfaces list with vlan devices
*
* @param vds
* The host to update
* @param xmlRpcStruct
* a map contains pairs of vlan device name and vlan data
*/
private static void addHostVlanDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
// vlans
Map<String, Object> vlans = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_VLANS);
if (vlans != null) {
for (Entry<String, Object> entry : vlans.entrySet()) {
VdsNetworkInterface iface = new Vlan();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
String vlanDeviceName = entry.getKey();
iface.setName(vlanDeviceName);
iface.setVdsId(vds.getId());
Map<String, Object> vlan = (Map<String, Object>) entry.getValue();
if (vlan.get(VdsProperties.VLAN_ID) != null && vlan.get(VdsProperties.BASE_INTERFACE) != null) {
iface.setVlanId((Integer) vlan.get(VdsProperties.VLAN_ID));
iface.setBaseInterface((String) vlan.get(VdsProperties.BASE_INTERFACE));
} else if (vlanDeviceName.contains(".")) {
String[] names = vlanDeviceName.split("[.]", -1);
String vlanId = names[1];
iface.setVlanId(Integer.parseInt(vlanId));
iface.setBaseInterface(names[0]);
}
iface.setAddress((String) vlan.get("addr"));
iface.setSubnet((String) vlan.get("netmask"));
if (StringUtils.isNotBlank((String) vlan.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) vlan.get(VdsProperties.MTU)));
}
iStats.setVdsId(vds.getId());
addBootProtocol((Map<String, Object>) vlan.get("cfg"), vds, iface);
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates the host network interfaces with the collected data from the host
*
* @param vds
* The host to update its interfaces
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
private static void addHostNetworkInterfaces(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> nics = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_NICS);
if (nics != null) {
for (Entry<String, Object> entry : nics.entrySet()) {
VdsNetworkInterface iface = new Nic();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
updateNetworkInterfaceDataFromHost(iface, vds, (Map<String, Object>) entry.getValue());
iStats.setVdsId(vds.getId());
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates a given interface by data as collected from the host.
*
* @param iface
* The interface to update
* @param nic
* A key-value map of the interface properties and their value
*/
private static void updateNetworkInterfaceDataFromHost(
VdsNetworkInterface iface, VDS host, Map<String, Object> nic) {
if (nic != null) {
if (nic.get("speed") != null) {
Object speed = nic.get("speed");
iface.setSpeed((Integer) speed);
}
iface.setAddress((String) nic.get("addr"));
iface.setSubnet((String) nic.get("netmask"));
iface.setMacAddress((String) nic.get("hwaddr"));
// if we get "permhwaddr", we are a part of a bond and we use that as the mac address
String mac = (String) nic.get("permhwaddr");
if (mac != null) {
//TODO remove when the minimal supported vdsm version is >=3.6
// in older VDSM version, slave's Mac is in upper case
iface.setMacAddress(mac.toLowerCase());
}
if (StringUtils.isNotBlank((String) nic.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) nic.get(VdsProperties.MTU)));
}
addBootProtocol((Map<String, Object>) nic.get("cfg"), host, iface);
}
}
/**
* Update the network details on a given interface.
*
* @param iface
* The interface to update.
* @param network
* Network struct to get details from.
* @param net
* Network to get details from.
*/
private static void updateNetworkDetailsInInterface(VdsNetworkInterface iface,
Map<String, Object> network,
VDS host,
Network net) {
if (iface != null) {
iface.setNetworkName(net.getName());
// set the management ip
if (StringUtils.equals(iface.getNetworkName(), NetworkUtils.getEngineNetwork())) {
iface.setType(iface.getType() | VdsInterfaceType.MANAGEMENT.getValue());
}
iface.setAddress(net.getAddr());
iface.setSubnet(net.getSubnet());
boolean bridgedNetwork = isBridgedNetwork(network);
iface.setBridged(bridgedNetwork);
setGatewayIfNecessary(iface, host, net.getGateway());
if (bridgedNetwork) {
Map<String, Object> networkConfig = (Map<String, Object>) network.get("cfg");
addBootProtocol(networkConfig, host, iface);
}
HostNetworkQosMapper qosMapper = new HostNetworkQosMapper(network);
iface.setQos(qosMapper.deserialize());
}
}
/**
* Returns true if vdsm doesn't report the 'bridged' attribute or if reported - its actual value.<br>
* The assumption is bridge-less network isn't supported if the 'bridged' attribute wasn't reported.<br>
* Bridge-less networks must report 'false' for this property.
*
* @param network
* The network to evaluate its bridge attribute
* @return true is no attribute is reported or its actual value
*/
private static boolean isBridgedNetwork(Map<String, Object> network) {
return network.get("bridged") == null || Boolean.parseBoolean(network.get("bridged").toString());
}
// we check for old bonding options,
// if we had value for the bonding options, i.e. the user set it by the UI
// and we have host that is not returning it's bonding options(host below 2.2.4) we override
// the "new" bonding options with the old one only if we have the new one as null and the old one is not
private static void setBondingOptions(VDS vds, List<VdsNetworkInterface> oldInterfaces) {
for (VdsNetworkInterface iface : oldInterfaces) {
if (iface.getBondOptions() != null) {
for (VdsNetworkInterface newIface : vds.getInterfaces()) {
if (iface.getName().equals(newIface.getName()) && newIface.getBondOptions() == null) {
newIface.setBondOptions(iface.getBondOptions());
break;
}
}
}
}
}
private static void addBootProtocol(Map<String, Object> cfg, VDS host, VdsNetworkInterface iface) {
NetworkBootProtocol bootproto = NetworkBootProtocol.NONE;
if (cfg != null) {
String bootProtocol = (String) cfg.get("BOOTPROTO");
if (bootProtocol != null) {
if (bootProtocol.toLowerCase().equals("dhcp")) {
bootproto = NetworkBootProtocol.DHCP;
} else if (bootProtocol.toLowerCase().equals("none") || bootProtocol.toLowerCase().equals("static")) {
if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
}
} else if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
if (bootproto == NetworkBootProtocol.STATIC_IP) {
String gateway = (String) cfg.get(VdsProperties.GATEWAY);
if (StringUtils.isNotEmpty(gateway)) {
setGatewayIfNecessary(iface, host, gateway.toString());
}
}
}
iface.setBootProtocol(bootproto);
}
private static void addBondDeviceToHost(VDS vds, VdsNetworkInterface iface, Object[] interfaces) {
vds.getInterfaces().add(iface);
if (interfaces != null) {
for (Object name : interfaces) {
for (VdsNetworkInterface tempInterface : vds.getInterfaces()) {
if (tempInterface.getName().equals(name.toString())) {
tempInterface.setBondName(iface.getName());
break;
}
}
}
}
}
/**
* Store the gateway for either of these cases:
* 1. any host network, in a cluster that supports multiple gateways
* 2. management network, no matter the cluster compatibility version
* 3. the active interface (could happen when there is no management network yet)
* If gateway was provided for non-management network when multiple gateways aren't supported, its value should be ignored.
*
* @param iface
* the host network interface
* @param host
* the host whose interfaces are being edited
* @param gateway
* the gateway value to be set
*/
private static void setGatewayIfNecessary(VdsNetworkInterface iface, VDS host, String gateway) {
if (FeatureSupported.multipleGatewaysSupported(host.getVdsGroupCompatibilityVersion())
|| NetworkUtils.getEngineNetwork().equals(iface.getNetworkName())
|| iface.getName().equals(host.getActiveNic())) {
iface.setGateway(gateway);
}
}
/**
* Creates a list of {@link VmGuestAgentInterface} from the {@link VdsProperties.GuestNetworkInterfaces}
*
* @param vmId
* the Vm's ID which contains the interfaces
*
* @param xmlRpcStruct
* the xml structure that describes the VM as reported by VDSM
* @return a list of {@link VmGuestAgentInterface} or null if no guest vNics were reported
*/
public static List<VmGuestAgentInterface> buildVmGuestAgentInterfacesData(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK_INTERFACES)) {
return null;
}
List<VmGuestAgentInterface> interfaces = new ArrayList<VmGuestAgentInterface>();
for (Object ifaceStruct : (Object[]) xmlRpcStruct.get(VdsProperties.VM_NETWORK_INTERFACES)) {
VmGuestAgentInterface nic = new VmGuestAgentInterface();
Map ifaceMap = (Map) ifaceStruct;
nic.setInterfaceName(AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_NAME));
nic.setMacAddress(getMacAddress(ifaceMap));
nic.setIpv4Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV4_ADDRESSES));
nic.setIpv6Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV6_ADDRESSES));
nic.setVmId(vmId);
interfaces.add(nic);
}
return interfaces;
}
private static String getMacAddress(Map<String, Object> ifaceMap) {
String macAddress = AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_MAC_ADDRESS);
return macAddress != null ? macAddress.replace('-', ':') : null;
}
/**
* Build through the received NUMA nodes information
* @param vds
* @param xmlRpcStruct
*/
private static void updateNumaNodesData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.AUTO_NUMA)) {
vds.getDynamicData().setAutoNumaBalancing(AutoNumaBalanceStatus.forValue(
AssignIntValue(xmlRpcStruct, VdsProperties.AUTO_NUMA)));
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODES)) {
Map<String, Map<String, Object>> numaNodeMap =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NUMA_NODES);
Map<String, Object> numaNodeDistanceMap =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NUMA_NODE_DISTANCE);
List<VdsNumaNode> newNumaNodeList = new ArrayList<>(numaNodeMap.size());
for (Map.Entry<String, Map<String, Object>> item : numaNodeMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
Map<String, Object> itemMap = item.getValue();
List<Integer> cpuIds = extractIntegerList(itemMap, VdsProperties.NUMA_NODE_CPU_LIST);
long memTotal = AssignLongValue(itemMap, VdsProperties.NUMA_NODE_TOTAL_MEM);
VdsNumaNode numaNode = new VdsNumaNode();
numaNode.setIndex(index);
if (cpuIds != null) {
numaNode.setCpuIds(cpuIds);
}
numaNode.setMemTotal(memTotal);
newNumaNodeList.add(numaNode);
}
Collections.sort(newNumaNodeList, numaNodeComparator);
for (VdsNumaNode vdsNumaNode : newNumaNodeList) {
int index = vdsNumaNode.getIndex();
List<Integer> distances = extractIntegerList(numaNodeDistanceMap, String.valueOf(index));
Map<Integer, Integer> distanceMap = new HashMap<>(distances.size());
for (int i = 0; i < distances.size(); i++) {
distanceMap.put(newNumaNodeList.get(i).getIndex(), distances.get(i));
}
VdsNumaNode newNumaNode = NumaUtils.getVdsNumaNodeByIndex(newNumaNodeList, index);
if (newNumaNode != null) {
newNumaNode.setNumaNodeDistances(distanceMap);
}
}
vds.getDynamicData().setNumaNodeList(newNumaNodeList);
vds.setNumaSupport(newNumaNodeList.size() > 1);
}
}
/**
* Build through the received vm NUMA nodes runtime information
* @param vm
* @param xmlRpcStruct
*/
private static void updateVmNumaNodesRuntimeInfo(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
Map<String, Object[]> vNodesRunInfo = (Map<String, Object[]>)xmlRpcStruct.get(
VdsProperties.VM_NUMA_NODES_RUNTIME_INFO);
for (Map.Entry<String, Object[]> item : vNodesRunInfo.entrySet()) {
VmNumaNode vNode = new VmNumaNode();
vNode.setIndex(Integer.valueOf(item.getKey()));
for (Object pNodeIndex : item.getValue()) {
vNode.getVdsNumaNodeList().add(new Pair<>(
Guid.Empty, new Pair<>(false, (Integer)pNodeIndex)));
}
vm.getvNumaNodeStatisticsList().add(vNode);
}
}
private static List<String> extracStringtList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<String> list = new ArrayList<String>();
for (Object item : items) {
list.add((String) item);
}
return list;
}
private static List<Integer> extractIntegerList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<Integer> list = new ArrayList<Integer>();
for (Object item : items) {
list.add((Integer) item);
}
return list;
}
}
|
backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/vdsbroker/VdsBrokerObjectsBuilder.java
|
package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.AutoNumaBalanceStatus;
import org.ovirt.engine.core.common.businessentities.CpuStatistics;
import org.ovirt.engine.core.common.businessentities.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.GraphicsInfo;
import org.ovirt.engine.core.common.businessentities.GraphicsType;
import org.ovirt.engine.core.common.businessentities.KdumpStatus;
import org.ovirt.engine.core.common.businessentities.LUNs;
import org.ovirt.engine.core.common.businessentities.NumaNodeStatistics;
import org.ovirt.engine.core.common.businessentities.SessionState;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSDomainsData;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VdsNumaNode;
import org.ovirt.engine.core.common.businessentities.VdsTransparentHugePagesState;
import org.ovirt.engine.core.common.businessentities.VmBalloonInfo;
import org.ovirt.engine.core.common.businessentities.VmBlockJob;
import org.ovirt.engine.core.common.businessentities.VmBlockJobType;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmExitReason;
import org.ovirt.engine.core.common.businessentities.VmExitStatus;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmJobState;
import org.ovirt.engine.core.common.businessentities.VmJobType;
import org.ovirt.engine.core.common.businessentities.VmNumaNode;
import org.ovirt.engine.core.common.businessentities.VmPauseStatus;
import org.ovirt.engine.core.common.businessentities.VmRngDevice;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.Bond;
import org.ovirt.engine.core.common.businessentities.network.InterfaceStatus;
import org.ovirt.engine.core.common.businessentities.network.Network;
import org.ovirt.engine.core.common.businessentities.network.NetworkBootProtocol;
import org.ovirt.engine.core.common.businessentities.network.Nic;
import org.ovirt.engine.core.common.businessentities.network.VdsInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.Vlan;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.utils.EnumUtils;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.utils.SizeConverter;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.utils.NetworkUtils;
import org.ovirt.engine.core.utils.NumaUtils;
import org.ovirt.engine.core.utils.SerializationFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class encapsulate the knowledge of how to create objects from the VDS RPC protocol response.
* This class has methods that receive XmlRpcStruct and construct the following Classes: VmDynamic VdsDynamic VdsStatic.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class VdsBrokerObjectsBuilder {
private static final Logger log = LoggerFactory.getLogger(VdsBrokerObjectsBuilder.class);
private final static int VNC_START_PORT = 5900;
private final static double NANO_SECONDS = 1000000000;
private static final Comparator<VdsNumaNode> numaNodeComparator = new Comparator<VdsNumaNode>() {
@Override
public int compare(VdsNumaNode arg0, VdsNumaNode arg1) {
return arg0.getIndex() < arg1.getIndex() ? -1 : 1;
}
};
public static VmDynamic buildVMDynamicDataFromList(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vmdynamic.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vmdynamic.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
return vmdynamic;
}
public static VmDynamic buildVMDynamicData(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
updateVMDynamicData(vmdynamic, xmlRpcStruct);
return vmdynamic;
}
public static StoragePool buildStoragePool(Map<String, Object> xmlRpcStruct) {
StoragePool sPool = new StoragePool();
if (xmlRpcStruct.containsKey("type")) {
sPool.setIsLocal(StorageType.valueOf(xmlRpcStruct.get("type").toString()).isLocal());
}
sPool.setName(AssignStringValue(xmlRpcStruct, "name"));
Integer masterVersion = AssignIntValue(xmlRpcStruct, "master_ver");
if (masterVersion != null) {
sPool.setMasterDomainVersion(masterVersion);
}
return sPool;
}
public static VmStatistics buildVMStatisticsData(Map<String, Object> xmlRpcStruct) {
VmStatistics vmStatistics = new VmStatistics();
updateVMStatisticsData(vmStatistics, xmlRpcStruct);
return vmStatistics;
}
public static Map<String, LUNs> buildVmLunDisksData(Map<String, Object> xmlRpcStruct) {
Map<String, Object> disks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_disks);
Map<String, LUNs> lunsMap = new HashMap<>();
if (disks != null) {
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
String lunGuidString = AssignStringValue(disk, VdsProperties.lun_guid);
if (!StringUtils.isEmpty(lunGuidString)) {
LUNs lun = new LUNs();
lun.setLUN_id(lunGuidString);
if (disk.containsKey(VdsProperties.disk_true_size)) {
long sizeInBytes = AssignLongValue(disk, VdsProperties.disk_true_size);
int sizeInGB = SizeConverter.convert(
sizeInBytes, SizeConverter.SizeUnit.BYTES, SizeConverter.SizeUnit.GB).intValue();
lun.setDeviceSize(sizeInGB);
}
lunsMap.put(lunGuidString, lun);
}
}
}
return lunsMap;
}
public static void updateVMDynamicData(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.session)) {
String session = (String) xmlRpcStruct.get(VdsProperties.session);
try {
vm.setSession(SessionState.valueOf(session));
} catch (Exception e) {
log.error("Illegal vm session '{}'.", session);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.kvmEnable)) {
vm.setKvmEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.kvmEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.acpiEnable)) {
vm.setAcpiEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.acpiEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.win2kHackEnable)) {
vm.setWin2kHackEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.win2kHackEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vm.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
boolean hasGraphicsInfo = updateGraphicsInfo(vm, xmlRpcStruct);
if (!hasGraphicsInfo) {
updateGraphicsInfoFromConf(vm, xmlRpcStruct);
}
if (xmlRpcStruct.containsKey((VdsProperties.utc_diff))) {
String utc_diff = xmlRpcStruct.get(VdsProperties.utc_diff).toString();
if (utc_diff.startsWith("+")) {
utc_diff = utc_diff.substring(1);
}
try {
vm.setUtcDiff(Integer.parseInt(utc_diff));
} catch (NumberFormatException e) {
log.error("Illegal vm offset (utc_diff) '{}'.", utc_diff);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.hash)) {
String hash = (String) xmlRpcStruct.get(VdsProperties.hash);
try {
vm.setHash(hash);
} catch (Exception e) {
log.error("Illegal vm hash '{}'.", hash);
}
}
/**
* vm disks
*/
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
initDisks(xmlRpcStruct, vm);
}
// ------------- vm internal agent data
vm.setGuestLastLoginTime(AssignDateTImeFromEpoch(xmlRpcStruct, VdsProperties.guest_last_login_time));
vm.setVmHost(AssignStringValue(xmlRpcStruct, VdsProperties.vm_host));
String guestUserName = AssignStringValue(xmlRpcStruct, VdsProperties.guest_cur_user_name);
vm.setGuestCurrentUserName(guestUserName);
initAppsList(xmlRpcStruct, vm);
vm.setGuestOs(AssignStringValue(xmlRpcStruct, VdsProperties.guest_os));
if (xmlRpcStruct.containsKey(VdsProperties.VM_FQDN)) {
vm.setVmFQDN(AssignStringValue(xmlRpcStruct, VdsProperties.VM_FQDN));
String fqdn = vm.getVmFQDN().trim();
if ("localhost".equalsIgnoreCase(fqdn) || "localhost.localdomain".equalsIgnoreCase(fqdn)) {
vm.setVmFQDN(null);
}
else {
vm.setVmFQDN(fqdn);
}
}
vm.setVmIp(AssignStringValue(xmlRpcStruct, VdsProperties.VM_IP));
if (vm.getVmIp() != null) {
if (vm.getVmIp().startsWith("127.0.")) {
vm.setVmIp(null);
} else {
vm.setVmIp(vm.getVmIp().trim());
}
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_code)) {
String exitCodeStr = xmlRpcStruct.get(VdsProperties.exit_code).toString();
vm.setExitStatus(VmExitStatus.forValue(Integer.parseInt(exitCodeStr)));
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_message)) {
String exitMsg = (String) xmlRpcStruct.get(VdsProperties.exit_message);
vm.setExitMessage(exitMsg);
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_reason)) {
String exitReasonStr = xmlRpcStruct.get(VdsProperties.exit_reason).toString();
vm.setExitReason(VmExitReason.forValue(Integer.parseInt(exitReasonStr)));
} else {
vm.setExitReason(VmExitReason.Unknown);
}
// if monitorResponse returns negative it means its erroneous
if (xmlRpcStruct.containsKey(VdsProperties.monitorResponse)) {
int response = Integer.parseInt(xmlRpcStruct.get(VdsProperties.monitorResponse).toString());
if (response < 0) {
vm.setStatus(VMStatus.NotResponding);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.clientIp)) {
vm.setClientIp(xmlRpcStruct.get(VdsProperties.clientIp).toString());
}
VmPauseStatus pauseStatus = VmPauseStatus.NONE;
if (xmlRpcStruct.containsKey(VdsProperties.pauseCode)) {
String pauseCodeStr = (String) xmlRpcStruct.get(VdsProperties.pauseCode);
try {
pauseStatus = VmPauseStatus.valueOf(pauseCodeStr);
} catch (IllegalArgumentException ex) {
log.error("Error in parsing vm pause status. Setting value to NONE");
pauseStatus = VmPauseStatus.NONE;
}
}
vm.setPauseStatus(pauseStatus);
if (xmlRpcStruct.containsKey(VdsProperties.watchdogEvent)) {
Map<String, Object> watchdogStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.watchdogEvent);
double time = Double.parseDouble(watchdogStruct.get(VdsProperties.time).toString());
// vdsm may not send the action http://gerrit.ovirt.org/14134
String action =
watchdogStruct.containsKey(VdsProperties.action) ? watchdogStruct.get(VdsProperties.action)
.toString() : null;
vm.setLastWatchdogEvent((long) time);
vm.setLastWatchdogAction(action);
}
if (xmlRpcStruct.containsKey(VdsProperties.CDRom)) {
String isoName = Paths.get((String) xmlRpcStruct.get(VdsProperties.CDRom)).getFileName().toString();
vm.setCurrentCd(isoName);
}
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_CPU_COUNT)) {
vm.setGuestCpuCount(AssignIntValue(xmlRpcStruct, VdsProperties.GUEST_CPU_COUNT));
}
}
/**
* Updates graphics runtime information according displayInfo VDSM structure if it exists.
*
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
* @return true if displayInfo exists, false otherwise
*/
private static boolean updateGraphicsInfo(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
Object displayInfo = xmlRpcStruct.get(VdsProperties.displayInfo);
if (displayInfo == null) {
return false;
}
for (Object info : (Object[]) displayInfo) {
Map<String, String> infoMap = (Map<String, String>) info;
GraphicsType graphicsType = GraphicsType.fromString(infoMap.get(VdsProperties.type));
GraphicsInfo graphicsInfo = new GraphicsInfo();
graphicsInfo.setIp(infoMap.get(VdsProperties.ipAddress))
.setPort(parseIntegerOrNull(infoMap.get(VdsProperties.port)))
.setTlsPort(parseIntegerOrNull(infoMap.get(VdsProperties.tlsPort)));
if (graphicsInfo.getPort() != null || graphicsInfo.getTlsPort() != null) {
vm.getGraphicsInfos().put(graphicsType, graphicsInfo);
}
}
return true;
}
/**
* Updates graphics runtime information according to vm.conf vdsm structure. It's used with legacy VDSMs that have
* no notion about graphics device.
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
*/
private static void updateGraphicsInfoFromConf(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
GraphicsType vmGraphicsType = parseGraphicsType(xmlRpcStruct);
if (vmGraphicsType == null) {
log.warn("Can't set graphics data from XML.");
return;
}
GraphicsInfo graphicsInfo = vm.getGraphicsInfos().get(vmGraphicsType);
if (graphicsInfo != null) {
if (xmlRpcStruct.containsKey(VdsProperties.display_port)) {
try {
graphicsInfo.setPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_port value illegal : {0}", xmlRpcStruct.get(VdsProperties.display_port));
}
} else if (xmlRpcStruct.containsKey(VdsProperties.display)) {
try {
graphicsInfo
.setPort(VNC_START_PORT + Integer.parseInt(xmlRpcStruct.get(VdsProperties.display).toString()));
} catch (NumberFormatException e) {
log.error("vm display value illegal : {0}", xmlRpcStruct.get(VdsProperties.display));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.display_secure_port)) {
try {
graphicsInfo
.setTlsPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_secure_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_secure_port value illegal : {0}",
xmlRpcStruct.get(VdsProperties.display_secure_port));
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayIp))) {
graphicsInfo.setIp((String) xmlRpcStruct.get(VdsProperties.displayIp));
}
}
}
/**
* Retrieves graphics type from xml.
* @param xmlRpcStruct
* @return
* - graphics type derived from xml on success
* - null on error
*/
private static GraphicsType parseGraphicsType(Map<String, Object> xmlRpcStruct) {
GraphicsType result = null;
try {
String displayTypeStr = xmlRpcStruct.get(VdsProperties.displayType).toString();
switch (displayTypeStr) {
case VdsProperties.VNC:
result = GraphicsType.VNC;
break;
case VdsProperties.QXL:
result = GraphicsType.SPICE;
break;
}
} catch (Exception e) {
}
return result;
}
private static Integer parseIntegerOrNull(String s) {
try {
return Integer.parseInt(s);
} catch (Exception e) {
return null;
}
}
public static void updateVMStatisticsData(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
vm.setelapsed_time(AssignDoubleValue(xmlRpcStruct, VdsProperties.elapsed_time));
// ------------- vm network statistics -----------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK)) {
Map networkStruct = (Map) xmlRpcStruct.get(VdsProperties.VM_NETWORK);
vm.setInterfaceStatistics(new ArrayList<VmNetworkInterface>());
for (Object tempNic : networkStruct.values()) {
Map nic = (Map) tempNic;
VmNetworkInterface stats = new VmNetworkInterface();
vm.getInterfaceStatistics().add(stats);
if (nic.containsKey(VdsProperties.VM_INTERFACE_NAME)) {
stats.setName((String) ((nic.get(VdsProperties.VM_INTERFACE_NAME) instanceof String) ? nic
.get(VdsProperties.VM_INTERFACE_NAME) : null));
}
Double rx_rate = AssignDoubleValue(nic, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(nic, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(nic, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(nic, VdsProperties.tx_dropped);
stats.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
stats.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
stats.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
stats.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
stats.setMacAddress((String) ((nic.get(VdsProperties.MAC_ADDR) instanceof String) ? nic
.get(VdsProperties.MAC_ADDR) : null));
stats.setSpeed(AssignIntValue(nic, VdsProperties.INTERFACE_SPEED));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_DISKS_USAGE)) {
initDisksUsage(xmlRpcStruct, vm);
}
// ------------- vm cpu statistics -----------------------
vm.setcpu_sys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vm.setcpu_user(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
// ------------- vm memory statistics -----------------------
vm.setusage_mem_percent(AssignIntValue(xmlRpcStruct, VdsProperties.vm_usage_mem_percent));
vm.setVmBalloonInfo(getBalloonInfo(xmlRpcStruct));
// ------------- vm migration statistics -----------------------
Integer migrationProgress = AssignIntValue(xmlRpcStruct, VdsProperties.vm_migration_progress_percent);
vm.setMigrationProgressPercent(migrationProgress != null ? migrationProgress : 0);
// ------------- vm jobs -------------
vm.setVmJobs(getVmJobs(vm.getId(), xmlRpcStruct));
// ------------- vm numa nodes runtime info -------------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NUMA_NODES_RUNTIME_INFO)) {
updateVmNumaNodesRuntimeInfo(vm, xmlRpcStruct);
}
}
private static VmBalloonInfo getBalloonInfo(Map<String, Object> xmlRpcStruct) {
Map<String, Object> balloonInfo = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_balloonInfo);
VmBalloonInfo vmBalloonInfo = new VmBalloonInfo();
if (balloonInfo != null && balloonInfo.size() > 0) {
vmBalloonInfo.setCurrentMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_cur));
vmBalloonInfo.setBalloonMaxMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_max));
vmBalloonInfo.setBalloonTargetMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_target));
vmBalloonInfo.setBalloonMinMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_min));
if (balloonInfo.size() >= 4) { // only if all 4 properties are found the balloon is considered enabled (available from 3.3)
vmBalloonInfo.setBalloonDeviceEnabled(true);
}
} else {
vmBalloonInfo.setBalloonDeviceEnabled(false);
}
return vmBalloonInfo;
}
private static List<VmJob> getVmJobs(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vmJobs)) {
return null;
}
List<VmJob> vmJobs = new ArrayList<VmJob>();
for (Object jobMap : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.vmJobs)).values()) {
VmJob job = buildVmJobData(vmId, (Map<String, Object>) jobMap);
vmJobs.add(job);
}
return vmJobs;
}
private static VmJob buildVmJobData(Guid vmId, Map<String, Object> xmlRpcStruct) {
VmJob ret;
VmJobType jobType = VmJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobType));
if (jobType == null) {
jobType = VmJobType.UNKNOWN;
}
switch (jobType) {
case BLOCK:
VmBlockJob blockJob = new VmBlockJob();
blockJob.setBlockJobType(VmBlockJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmBlockJobType)));
blockJob.setCursorCur(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorCur));
blockJob.setCursorEnd(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorEnd));
blockJob.setBandwidth(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobBandwidth));
blockJob.setImageGroupId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobImageUUID)));
ret = blockJob;
break;
default:
ret = new VmJob();
break;
}
ret.setVmId(vmId);
ret.setId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobId)));
ret.setJobState(VmJobState.NORMAL);
ret.setJobType(jobType);
return ret;
}
public static void updateVDSDynamicData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setSupportedClusterLevels(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_cluster_levels));
updateNetworkData(vds, xmlRpcStruct);
updateNumaNodesData(vds, xmlRpcStruct);
vds.setCpuThreads(AssignIntValue(xmlRpcStruct, VdsProperties.cpuThreads));
vds.setCpuCores(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_cores));
vds.setCpuSockets(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_sockets));
vds.setCpuModel(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_model));
vds.setOnlineCpus(AssignStringValue(xmlRpcStruct, VdsProperties.online_cpus));
vds.setCpuSpeedMh(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_speed_mh));
vds.setPhysicalMemMb(AssignIntValue(xmlRpcStruct, VdsProperties.physical_mem_mb));
vds.setKvmEnabled(AssignBoolValue(xmlRpcStruct, VdsProperties.kvm_enabled));
vds.setReservedMem(AssignIntValue(xmlRpcStruct, VdsProperties.reservedMem));
Integer guestOverhead = AssignIntValue(xmlRpcStruct, VdsProperties.guestOverhead);
vds.setGuestOverhead(guestOverhead != null ? guestOverhead : 0);
vds.setCpuFlags(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_flags));
UpdatePackagesVersions(vds, xmlRpcStruct);
vds.setSupportedEngines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_engines));
vds.setIScsiInitiatorName(AssignStringValue(xmlRpcStruct, VdsProperties.iSCSIInitiatorName));
vds.setSupportedEmulatedMachines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.emulatedMachines));
setRngSupportedSourcesToVds(vds, xmlRpcStruct);
String hooksStr = ""; // default value if hooks is not in the xml rpc struct
if (xmlRpcStruct.containsKey(VdsProperties.hooks)) {
hooksStr = xmlRpcStruct.get(VdsProperties.hooks).toString();
}
vds.setHooksStr(hooksStr);
// parse out the HBAs available in this host
Map<String, List<Map<String, String>>> hbas = new HashMap<>();
for (Map.Entry<String, Object[]> el: ((Map<String, Object[]>)xmlRpcStruct.get(VdsProperties.HBAInventory)).entrySet()) {
List<Map<String, String>> devicesList = new ArrayList<Map<String, String>>();
for (Object device: el.getValue()) {
devicesList.add((Map<String, String>)device);
}
hbas.put(el.getKey(), devicesList);
}
vds.setHBAs(hbas);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
vds.setKdumpStatus(KdumpStatus.valueOfNumber(AssignIntValue(xmlRpcStruct, VdsProperties.KDUMP_STATUS)));
Map<String, Object> selinux = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.selinux);
if (selinux != null) {
vds.setSELinuxEnforceMode(AssignIntValue(selinux, VdsProperties.selinux_mode));
} else {
vds.setSELinuxEnforceMode(null);
}
if (xmlRpcStruct.containsKey(VdsProperties.liveSnapshotSupport)) {
vds.setLiveSnapshotSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveSnapshotSupport));
} else {
vds.setLiveSnapshotSupport(true); // for backward compatibility's sake
}
if (xmlRpcStruct.containsKey(VdsProperties.liveMergeSupport)) {
vds.setLiveMergeSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveMergeSupport));
} else {
vds.setLiveMergeSupport(false);
}
}
private static void setRngSupportedSourcesToVds(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.getSupportedRngSources().clear();
String rngSourcesFromStruct = AssignStringValueFromArray(xmlRpcStruct, VdsProperties.rngSources);
if (rngSourcesFromStruct != null) {
vds.getSupportedRngSources().addAll(VmRngDevice.csvToSourcesSet(rngSourcesFromStruct.toUpperCase()));
}
}
public static void checkTimeDrift(VDS vds, Map<String, Object> xmlRpcStruct) {
Boolean isHostTimeDriftEnabled = Config.getValue(ConfigValues.EnableHostTimeDrift);
if (isHostTimeDriftEnabled) {
Integer maxTimeDriftAllowed = Config.getValue(ConfigValues.HostTimeDriftInSec);
Date hostDate = AssignDatetimeValue(xmlRpcStruct, VdsProperties.hostDatetime);
if (hostDate != null) {
Long timeDrift =
TimeUnit.MILLISECONDS.toSeconds(Math.abs(hostDate.getTime() - System.currentTimeMillis()));
if (timeDrift > maxTimeDriftAllowed) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("Actual", timeDrift.toString());
logable.addCustomValue("Max", maxTimeDriftAllowed.toString());
AuditLogDirector.log(logable, AuditLogType.VDS_TIME_DRIFT_ALERT);
}
} else {
log.error("Time Drift validation: failed to get Host or Engine time.");
}
}
}
private static void initDisksUsage(Map<String, Object> vmStruct, VmStatistics vm) {
Object[] vmDisksUsage = (Object[]) vmStruct.get(VdsProperties.VM_DISKS_USAGE);
if (vmDisksUsage != null) {
ArrayList<Object> disksUsageList = new ArrayList<Object>(Arrays.asList(vmDisksUsage));
vm.setDisksUsage(SerializationFactory.getSerializer().serializeUnformattedJson(disksUsageList));
}
}
private static void UpdatePackagesVersions(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setVersionName(AssignStringValue(xmlRpcStruct, VdsProperties.version_name));
vds.setSoftwareVersion(AssignStringValue(xmlRpcStruct, VdsProperties.software_version));
vds.setBuildName(AssignStringValue(xmlRpcStruct, VdsProperties.build_name));
if (xmlRpcStruct.containsKey(VdsProperties.host_os)) {
vds.setHostOs(GetPackageVersionFormated(
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.host_os), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.packages)) {
// packages is an array of xmlRpcStruct (that each is a name, ver,
// release.. of a package)
for (Object hostPackageMap : (Object[]) xmlRpcStruct.get(VdsProperties.packages)) {
Map<String, Object> hostPackage = (Map<String, Object>) hostPackageMap;
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
if (VdsProperties.kvmPackageName.equals(packageName)) {
vds.setKvmVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.spicePackageName.equals(packageName)) {
vds.setSpiceVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.kernelPackageName.equals(packageName)) {
vds.setKernelVersion(GetPackageVersionFormated(hostPackage, false));
}
}
} else if (xmlRpcStruct.containsKey(VdsProperties.packages2)) {
Map<String, Object> packages = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.packages2);
if (packages.containsKey(VdsProperties.vdsmPackageName)) {
Map<String, Object> vdsm = (Map<String, Object>) packages.get(VdsProperties.vdsmPackageName);
vds.setVersion(getPackageRpmVersion("vdsm", vdsm));
}
if (packages.containsKey(VdsProperties.qemuKvmPackageName)) {
Map<String, Object> kvm = (Map<String, Object>) packages.get(VdsProperties.qemuKvmPackageName);
vds.setKvmVersion(getPackageVersionFormated2(kvm));
}
if (packages.containsKey(VdsProperties.libvirtPackageName)) {
Map<String, Object> libvirt = (Map<String, Object>) packages.get(VdsProperties.libvirtPackageName);
vds.setLibvirtVersion(getPackageRpmVersion("libvirt", libvirt));
}
if (packages.containsKey(VdsProperties.spiceServerPackageName)) {
Map<String, Object> spice = (Map<String, Object>) packages.get(VdsProperties.spiceServerPackageName);
vds.setSpiceVersion(getPackageVersionFormated2(spice));
}
if (packages.containsKey(VdsProperties.kernelPackageName)) {
Map<String, Object> kernel = (Map<String, Object>) packages.get(VdsProperties.kernelPackageName);
vds.setKernelVersion(getPackageVersionFormated2(kernel));
}
if (packages.containsKey(VdsProperties.GLUSTER_PACKAGE_NAME)) {
Map<String, Object> gluster = (Map<String, Object>) packages.get(VdsProperties.GLUSTER_PACKAGE_NAME);
vds.setGlusterVersion(getPackageRpmVersion("glusterfs", gluster));
}
}
}
// Version 2 of GetPackageVersionFormated2:
// from 2.3 we get dictionary and not a flat list.
// from now the packages names (of spice, kernel, qemu and libvirt) are the same as far as VDSM and ENGINE.
// (VDSM use to report packages name of rpm so in RHEL6 when it change it broke our interface)
private static String getPackageVersionFormated2(Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageVersion)) {
sb.append(packageVersion);
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
private static RpmVersion getPackageRpmVersion(String packageName, Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName)) {
sb.append(packageName);
}
boolean hasPackageVersion = StringUtils.isEmpty(packageVersion);
boolean hasPackageRelease = StringUtils.isEmpty(packageRelease);
if (!hasPackageVersion || !hasPackageRelease) {
sb.append("-");
}
if (!hasPackageVersion) {
sb.append(packageVersion);
}
if (!hasPackageRelease) {
if (sb.length() > 0) {
sb.append(String.format("-%1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return new RpmVersion(sb.toString());
}
public static void updateHardwareSystemInformation(Map<String, Object> hwInfo, VDS vds){
vds.setHardwareManufacturer(AssignStringValue(hwInfo, VdsProperties.hwManufacturer));
vds.setHardwareProductName(AssignStringValue(hwInfo, VdsProperties.hwProductName));
vds.setHardwareVersion(AssignStringValue(hwInfo, VdsProperties.hwVersion));
vds.setHardwareSerialNumber(AssignStringValue(hwInfo, VdsProperties.hwSerialNumber));
vds.setHardwareUUID(AssignStringValue(hwInfo, VdsProperties.hwUUID));
vds.setHardwareFamily(AssignStringValue(hwInfo, VdsProperties.hwFamily));
}
private static String GetPackageVersionFormated(Map<String, Object> hostPackage, boolean getName) {
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
String packageVersion = AssignStringValue(hostPackage, VdsProperties.package_version);
String packageRelease = AssignStringValue(hostPackage, VdsProperties.package_release);
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName) && getName) {
sb.append(packageName);
}
if (!StringUtils.isEmpty(packageVersion)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageVersion));
} else {
sb.append(packageVersion);
}
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
public static void updateVDSStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
// ------------- vds memory usage ---------------------------
vds.setUsageMemPercent(AssignIntValue(xmlRpcStruct, VdsProperties.mem_usage));
// ------------- vds network statistics ---------------------
Map<String, Object> interfaces = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK);
if (interfaces != null) {
int networkUsage = 0;
Map<String, VdsNetworkInterface> nicsByName = Entities.entitiesByName(vds.getInterfaces());
for (Entry<String, Object> entry : interfaces.entrySet()) {
if (nicsByName.containsKey(entry.getKey())) {
VdsNetworkInterface iface = nicsByName.get(entry.getKey());
iface.setVdsId(vds.getId());
Map<String, Object> dict = (Map<String, Object>) entry.getValue();
Double rx_rate = AssignDoubleValue(dict, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(dict, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(dict, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(dict, VdsProperties.tx_dropped);
iface.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
iface.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
iface.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
iface.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
iface.setSpeed(AssignIntValue(dict, VdsProperties.INTERFACE_SPEED));
iface.getStatistics().setStatus(AssignInterfaceStatusValue(dict, VdsProperties.iface_status));
if (!NetworkUtils.isVlan(iface) && !iface.isBondSlave()) {
networkUsage = (int) Math.max(networkUsage, computeInterfaceUsage(iface));
}
}
}
vds.setUsageNetworkPercent(networkUsage);
}
// ----------- vds cpu statistics info ---------------------
vds.setCpuSys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vds.setCpuUser(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
if (vds.getCpuSys() != null && vds.getCpuUser() != null) {
vds.setUsageCpuPercent((int) (vds.getCpuSys() + vds.getCpuUser()));
}
// CPU load reported by VDSM is in uptime-style format, i.e. normalized
// to unity, so that say an 8% load is reported as 0.08
Double d = AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_load);
d = (d != null) ? d : 0;
vds.setCpuLoad(d.doubleValue() * 100.0);
vds.setCpuIdle(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_idle));
vds.setMemAvailable(AssignLongValue(xmlRpcStruct, VdsProperties.mem_available));
vds.setMemFree(AssignLongValue(xmlRpcStruct, VdsProperties.memFree));
vds.setMemShared(AssignLongValue(xmlRpcStruct, VdsProperties.mem_shared));
vds.setSwapFree(AssignLongValue(xmlRpcStruct, VdsProperties.swap_free));
vds.setSwapTotal(AssignLongValue(xmlRpcStruct, VdsProperties.swap_total));
vds.setKsmCpuPercent(AssignIntValue(xmlRpcStruct, VdsProperties.ksm_cpu_percent));
vds.setKsmPages(AssignLongValue(xmlRpcStruct, VdsProperties.ksm_pages));
vds.setKsmState(AssignBoolValue(xmlRpcStruct, VdsProperties.ksm_state));
// dynamic data got from GetVdsStats
if (xmlRpcStruct.containsKey(VdsProperties.transparent_huge_pages_state)) {
vds.setTransparentHugePagesState(EnumUtils.valueOf(VdsTransparentHugePagesState.class, xmlRpcStruct
.get(VdsProperties.transparent_huge_pages_state).toString(), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.anonymous_transparent_huge_pages)) {
vds.setAnonymousHugePages(AssignIntValue(xmlRpcStruct, VdsProperties.anonymous_transparent_huge_pages));
}
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
vds.setImagesLastCheck(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_check));
vds.setImagesLastDelay(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_delay));
Integer vm_count = AssignIntValue(xmlRpcStruct, VdsProperties.vm_count);
vds.setVmCount(vm_count == null ? 0 : vm_count);
vds.setVmActive(AssignIntValue(xmlRpcStruct, VdsProperties.vm_active));
vds.setVmMigrating(AssignIntValue(xmlRpcStruct, VdsProperties.vm_migrating));
updateVDSDomainData(vds, xmlRpcStruct);
updateLocalDisksUsage(vds, xmlRpcStruct);
// hosted engine
Integer haScore = null;
Boolean haIsConfigured = null;
Boolean haIsActive = null;
Boolean haGlobalMaint = null;
Boolean haLocalMaint = null;
if (xmlRpcStruct.containsKey(VdsProperties.ha_stats)) {
Map<String, Object> haStats = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.ha_stats);
if (haStats != null) {
haScore = AssignIntValue(haStats, VdsProperties.ha_stats_score);
haIsConfigured = AssignBoolValue(haStats, VdsProperties.ha_stats_is_configured);
haIsActive = AssignBoolValue(haStats, VdsProperties.ha_stats_is_active);
haGlobalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_global_maintenance);
haLocalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_local_maintenance);
}
} else {
haScore = AssignIntValue(xmlRpcStruct, VdsProperties.ha_score);
// prior to 3.4, haScore was returned if ha was installed; assume active if > 0
if (haScore != null) {
haIsConfigured = true;
haIsActive = (haScore > 0);
}
}
vds.setHighlyAvailableScore(haScore != null ? haScore : 0);
vds.setHighlyAvailableIsConfigured(haIsConfigured != null ? haIsConfigured : false);
vds.setHighlyAvailableIsActive(haIsActive != null ? haIsActive : false);
vds.setHighlyAvailableGlobalMaintenance(haGlobalMaint != null ? haGlobalMaint : false);
vds.setHighlyAvailableLocalMaintenance(haLocalMaint != null ? haLocalMaint : false);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
updateNumaStatisticsData(vds, xmlRpcStruct);
}
private static double computeInterfaceUsage(VdsNetworkInterface iface) {
return Math.max(truncatePercentage(iface.getStatistics().getReceiveRate()),
truncatePercentage(iface.getStatistics().getTransmitRate()));
}
private static double truncatePercentage(double value) {
return Math.min(100, value);
}
public static void updateNumaStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNumaNode> vdsNumaNodes = new ArrayList<>();
List<CpuStatistics> cpuStatsData = new ArrayList<>();
if (xmlRpcStruct.containsKey(VdsProperties.CPU_STATS)) {
Map<String, Map<String, Object>> cpuStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.CPU_STATS);
Map<Integer, List<CpuStatistics>> numaNodeCpuStats = new HashMap<>();
for (Map.Entry<String, Map<String, Object>> item : cpuStats.entrySet()) {
CpuStatistics data = buildVdsCpuStatistics(item);
cpuStatsData.add(data);
int numaNodeIndex = AssignIntValue(item.getValue(), VdsProperties.NUMA_NODE_INDEX);
if (!numaNodeCpuStats.containsKey(numaNodeIndex)) {
numaNodeCpuStats.put(numaNodeIndex, new ArrayList<CpuStatistics>());
}
numaNodeCpuStats.get(numaNodeIndex).add(data);
}
DecimalFormat percentageFormatter = new DecimalFormat("#.##");
for (Map.Entry<Integer, List<CpuStatistics>> item : numaNodeCpuStats.entrySet()) {
VdsNumaNode node = buildVdsNumaNodeStatistics(percentageFormatter, item);
vdsNumaNodes.add(node);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODE_FREE_MEM_STAT)) {
Map<String, Map<String, Object>> memStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.NUMA_NODE_FREE_MEM_STAT);
for (Map.Entry<String, Map<String, Object>> item : memStats.entrySet()) {
VdsNumaNode node = NumaUtils.getVdsNumaNodeByIndex(vdsNumaNodes, Integer.valueOf(item.getKey()));
if (node != null) {
node.getNumaNodeStatistics().setMemFree(AssignLongValue(item.getValue(),
VdsProperties.NUMA_NODE_FREE_MEM));
node.getNumaNodeStatistics().setMemUsagePercent(AssignIntValue(item.getValue(),
VdsProperties.NUMA_NODE_MEM_PERCENT));
}
}
}
vds.getNumaNodeList().clear();
vds.getNumaNodeList().addAll(vdsNumaNodes);
vds.getStatisticsData().getCpuCoreStatistics().clear();
vds.getStatisticsData().getCpuCoreStatistics().addAll(cpuStatsData);
}
private static VdsNumaNode buildVdsNumaNodeStatistics(DecimalFormat percentageFormatter,
Map.Entry<Integer, List<CpuStatistics>> item) {
VdsNumaNode node = new VdsNumaNode();
NumaNodeStatistics nodeStat = new NumaNodeStatistics();
double nodeCpuUser = 0.0;
double nodeCpuSys = 0.0;
double nodeCpuIdle = 0.0;
for (CpuStatistics cpuStat : item.getValue()) {
nodeCpuUser += cpuStat.getCpuUser();
nodeCpuSys += cpuStat.getCpuSys();
nodeCpuIdle += cpuStat.getCpuIdle();
}
nodeStat.setCpuUser(Double.valueOf(percentageFormatter.format(nodeCpuUser / item.getValue().size())));
nodeStat.setCpuSys(Double.valueOf(percentageFormatter.format(nodeCpuSys / item.getValue().size())));
nodeStat.setCpuIdle(Double.valueOf(percentageFormatter.format(nodeCpuIdle / item.getValue().size())));
nodeStat.setCpuUsagePercent((int) (nodeStat.getCpuSys() + nodeStat.getCpuUser()));
node.setIndex(item.getKey());
node.setNumaNodeStatistics(nodeStat);
return node;
}
private static CpuStatistics buildVdsCpuStatistics(Map.Entry<String, Map<String, Object>> item) {
CpuStatistics data = new CpuStatistics();
data.setCpuId(Integer.valueOf(item.getKey()));
data.setCpuUser(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_USER));
data.setCpuSys(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_SYS));
data.setCpuIdle(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_IDLE));
data.setCpuUsagePercent((int) (data.getCpuSys() + data.getCpuUser()));
return data;
}
/**
* Update {@link VDS#setLocalDisksUsage(Map)} with map of paths usage extracted from the returned returned value. The
* usage is reported in MB.
*
* @param vds
* The VDS object to update.
* @param xmlRpcStruct
* The XML/RPC to extract the usage from.
*/
protected static void updateLocalDisksUsage(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.DISK_STATS)) {
Map<String, Object> diskStatsStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.DISK_STATS);
Map<String, Long> diskStats = new HashMap<String, Long>();
vds.setLocalDisksUsage(diskStats);
for (Entry<String, Object> entry : diskStatsStruct.entrySet()) {
Map<String, Object> pathStatsStruct = (Map<String, Object>) entry.getValue();
diskStats.put(entry.getKey(), AssignLongValue(pathStatsStruct, VdsProperties.DISK_STATS_FREE));
}
}
}
private static void updateVDSDomainData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.domains)) {
Map<String, Object> domains = (Map<String, Object>)
xmlRpcStruct.get(VdsProperties.domains);
ArrayList<VDSDomainsData> domainsData = new ArrayList<VDSDomainsData>();
for (Map.Entry<String, ?> value : domains.entrySet()) {
try {
VDSDomainsData data = new VDSDomainsData();
data.setDomainId(new Guid(value.getKey().toString()));
Map<String, Object> internalValue = (Map<String, Object>) value.getValue();
double lastCheck = 0;
data.setCode((Integer) (internalValue).get(VdsProperties.code));
if (internalValue.containsKey(VdsProperties.lastCheck)) {
lastCheck = Double.parseDouble((String) internalValue.get(VdsProperties.lastCheck));
}
data.setLastCheck(lastCheck);
double delay = 0;
if (internalValue.containsKey(VdsProperties.delay)) {
delay = Double.parseDouble((String) internalValue.get(VdsProperties.delay));
}
data.setDelay(delay);
domainsData.add(data);
} catch (Exception e) {
log.error("failed building domains: {}", e.getMessage());
log.debug("Exception", e);
}
}
vds.setDomains(domainsData);
}
}
private static InterfaceStatus AssignInterfaceStatusValue(Map<String, Object> input, String name) {
InterfaceStatus ifaceStatus = InterfaceStatus.NONE;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) {
if (stringValue.toLowerCase().trim().equals("up")) {
ifaceStatus = InterfaceStatus.UP;
} else {
ifaceStatus = InterfaceStatus.DOWN;
}
}
}
return ifaceStatus;
}
private static Double AssignDoubleValue(Map<String, Object> input, String name) {
Double returnValue = null;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
returnValue = (stringValue == null) ? null : Double.parseDouble(stringValue);
}
return returnValue;
}
/**
* Do the same logic as AssignDoubleValue does, but instead, in case of null we return 0.
* @param input - the Input xml
* @param name - The name of the field we want to cast it to double.
* @return - the double value.
*/
private static Double assignDoubleValueWithNullProtection(Map<String, Object> input, String name) {
Double doubleValue = AssignDoubleValue(input, name);
return (doubleValue == null ? Double.valueOf(0.0) : doubleValue);
}
private static Integer AssignIntValue(Map input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Integer) {
return (Integer) input.get(name);
}
String stringValue = (String) input.get(name);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
int intValue = Integer.parseInt(stringValue);
return intValue;
} catch (NumberFormatException nfe) {
log.error("Failed to parse '{}' value '{}' to integer: {}", name, stringValue, nfe.getMessage());
}
}
return null;
}
private static Long AssignLongValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Long || input.get(name) instanceof Integer) {
return Long.parseLong(input.get(name).toString());
}
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
return Long.parseLong(stringValue);
} catch (NumberFormatException e) {
log.error("Failed to parse '{}' value '{}' to long: {}", name, stringValue, e.getMessage());
}
}
return null;
}
private static String AssignStringValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
return (String) ((input.get(name) instanceof String) ? input.get(name) : null);
}
return null;
}
private static String AssignStringValueFromArray(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
String[] arr = (String[]) ((input.get(name) instanceof String[]) ? input.get(name) : null);
if (arr == null) {
Object[] arr2 = (Object[]) ((input.get(name) instanceof Object[]) ? input.get(name) : null);
if (arr2 != null) {
arr = new String[arr2.length];
for (int i = 0; i < arr2.length; i++)
arr[i] = arr2[i].toString();
}
}
if (arr != null) {
return StringUtils.join(arr, ',');
}
}
return null;
}
private static Date AssignDateTImeFromEpoch(Map<String, Object> input, String name) {
Date retval = null;
try {
if (input.containsKey(name)) {
Double secsSinceEpoch = (Double) input.get(name);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(secsSinceEpoch.longValue());
retval = calendar.getTime();
}
} catch (RuntimeException ex) {
log.warn("VdsBroker::AssignDateTImeFromEpoch - failed to convert field '{}' to dateTime: {}",
name, ex.getMessage());
log.debug("Exception", ex);
retval = null;
}
return retval;
}
private static Date AssignDatetimeValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Date) {
return (Date) input.get(name);
}
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
try {
String dateStr = input.get(name).toString().replaceFirst("T", " ").trim();
return formatter.parse(dateStr);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
private static Boolean AssignBoolValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Boolean) {
return (Boolean) input.get(name);
}
return Boolean.parseBoolean(input.get(name).toString());
}
return Boolean.FALSE;
}
private static void initDisks(Map<String, Object> vmStruct, VmDynamic vm) {
Map<String, Object> disks = (Map<String, Object>) vmStruct.get(VdsProperties.vm_disks);
ArrayList<DiskImageDynamic> disksData = new ArrayList<DiskImageDynamic>();
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
DiskImageDynamic diskData = new DiskImageDynamic();
String imageGroupIdString = AssignStringValue(disk, VdsProperties.image_group_id);
if (!StringUtils.isEmpty(imageGroupIdString)) {
Guid imageGroupIdGuid = new Guid(imageGroupIdString);
diskData.setId(imageGroupIdGuid);
diskData.setread_rate(AssignIntValue(disk, VdsProperties.vm_disk_read_rate));
diskData.setwrite_rate(AssignIntValue(disk, VdsProperties.vm_disk_write_rate));
if (disk.containsKey(VdsProperties.disk_actual_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_actual_size);
diskData.setactual_size(size != null ? size * 512 : 0);
} else if (disk.containsKey(VdsProperties.disk_true_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_true_size);
diskData.setactual_size(size != null ? size : 0);
}
if (disk.containsKey(VdsProperties.vm_disk_read_latency)) {
diskData.setReadLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_read_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_write_latency)) {
diskData.setWriteLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_write_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_flush_latency)) {
diskData.setFlushLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_flush_latency) / NANO_SECONDS);
}
disksData.add(diskData);
}
}
vm.setDisks(disksData);
}
private static void initAppsList(Map<String, Object> vmStruct, VmDynamic vm) {
if (vmStruct.containsKey(VdsProperties.app_list)) {
Object tempAppsList = vmStruct.get(VdsProperties.app_list);
if (tempAppsList instanceof Object[]) {
Object[] apps = (Object[]) tempAppsList;
StringBuilder builder = new StringBuilder();
boolean firstTime = true;
for (Object app : apps) {
String appString = (String) ((app instanceof String) ? app : null);
if (app == null) {
log.warn("Failed to convert app: [null] to string");
}
if (!firstTime) {
builder.append(",");
} else {
firstTime = false;
}
builder.append(appString);
}
vm.setAppList(builder.toString());
} else {
vm.setAppList("");
}
}
}
private static VMStatus convertToVmStatus(String statusName) {
VMStatus status = VMStatus.Unassigned;
// TODO: The following condition should deleted as soon as we drop compatibility with 3.3 since "Running" state
// will be replaced "Up" state and "Unknown" will exist no more. The "Up" state will be processed by
// EnumUtils as other states below.
if ("Running".equals(statusName) || "Unknown".equals(statusName)) {
status = VMStatus.Up;
}
else if ("Migration Source".equals(statusName)) {
status = VMStatus.MigratingFrom;
}
else if ("Migration Destination".equals(statusName)) {
status = VMStatus.MigratingTo;
} else {
try {
statusName = statusName.replace(" ", "");
status = EnumUtils.valueOf(VMStatus.class, statusName, true);
} catch (Exception e) {
log.error("Illegal Vm status: '{}'.", statusName);
}
}
return status;
}
/**
* Updates the host network data with the network data reported by the host
*
* @param vds
* The host to update
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
public static void updateNetworkData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setActiveNic(AssignStringValue(xmlRpcStruct, VdsProperties.NETWORK_LAST_CLIENT_INTERFACE));
List<VdsNetworkInterface> oldInterfaces =
DbFacade.getInstance().getInterfaceDao().getAllInterfacesForVds(vds.getId());
vds.getInterfaces().clear();
addHostNetworkInterfaces(vds, xmlRpcStruct);
addHostVlanDevices(vds, xmlRpcStruct);
addHostBondDevices(vds, xmlRpcStruct);
addHostNetworksAndUpdateInterfaces(vds, xmlRpcStruct);
// set bonding options
setBondingOptions(vds, oldInterfaces);
// This information was added in 3.1, so don't use it if it's not there.
if (xmlRpcStruct.containsKey(VdsProperties.netConfigDirty)) {
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
}
}
private static void addHostNetworksAndUpdateInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct) {
// Networks collection (name point to list of nics or bonds)
Map<String, Object> networks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORKS);
if (networks != null) {
vds.getNetworks().clear();
for (Entry<String, Object> entry : networks.entrySet()) {
Map<String, Object> network = (Map<String, Object>) entry.getValue();
if (network != null) {
Network net = createNetworkData(entry.getKey(), network);
List<VdsNetworkInterface> interfaces = findNetworkInterfaces(vds, xmlRpcStruct, network);
for (VdsNetworkInterface iface : interfaces) {
updateNetworkDetailsInInterface(iface,
network,
vds,
net);
}
vds.getNetworks().add(net);
reportInvalidInterfacesForNetwork(interfaces, net, vds);
}
}
}
}
/**
* Reports a warning to the audit log if a bridge is connected to more than one interface which is considered bad
* configuration.
*
* @param interfaces
* The network's interfaces
* @param network
* The network to report for
* @param vds
* The host in which the network is defined
*/
private static void reportInvalidInterfacesForNetwork(List<VdsNetworkInterface> interfaces, Network network, VDS vds) {
if (interfaces.isEmpty()) {
AuditLogDirector.log(createHostNetworkAuditLog(network, vds), AuditLogType.NETWORK_WITHOUT_INTERFACES);
} else if (interfaces.size() > 1) {
AuditLogableBase logable = createHostNetworkAuditLog(network, vds);
logable.addCustomValue("Interfaces", StringUtils.join(Entities.objectNames(interfaces), ","));
AuditLogDirector.log(logable, AuditLogType.BRIDGED_NETWORK_OVER_MULTIPLE_INTERFACES);
}
}
protected static AuditLogableBase createHostNetworkAuditLog(Network network, VDS vds) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("NetworkName", network.getName());
return logable;
}
private static List<VdsNetworkInterface> findNetworkInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct,
Map<String, Object> network) {
Map<String, VdsNetworkInterface> vdsInterfaces = Entities.entitiesByName(vds.getInterfaces());
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
if (FeatureSupported.bridgesReportByVdsm(vds.getVdsGroupCompatibilityVersion())) {
VdsNetworkInterface iface = null;
String interfaceName = (String) network.get(VdsProperties.INTERFACE);
if (interfaceName != null) {
iface = vdsInterfaces.get(interfaceName);
if (iface == null) {
Map<String, Object> bridges =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BRIDGES);
if (bridges != null && bridges.containsKey(interfaceName)) {
interfaces.addAll(findBridgedNetworkInterfaces((Map<String, Object>) bridges.get(interfaceName),
vdsInterfaces));
}
} else {
interfaces.add(iface);
}
}
} else {
interfaces.addAll(findBridgedNetworkInterfaces(network, vdsInterfaces));
}
return interfaces;
}
private static Network createNetworkData(String networkName, Map<String, Object> network) {
Network net = new Network();
net.setName(networkName);
net.setAddr((String) network.get("addr"));
net.setSubnet((String) network.get("netmask"));
net.setGateway((String) network.get(VdsProperties.GLOBAL_GATEWAY));
if (StringUtils.isNotBlank((String) network.get(VdsProperties.MTU))) {
net.setMtu(Integer.parseInt((String) network.get(VdsProperties.MTU)));
}
return net;
}
private static List<VdsNetworkInterface> findBridgedNetworkInterfaces(Map<String, Object> bridge,
Map<String, VdsNetworkInterface> vdsInterfaces) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
Object[] ports = (Object[]) bridge.get("ports");
if (ports != null) {
for (Object port : ports) {
if (vdsInterfaces.containsKey(port.toString())) {
interfaces.add(vdsInterfaces.get(port.toString()));
}
}
}
return interfaces;
}
private static void addHostBondDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> bonds = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BONDINGS);
if (bonds != null) {
for (Entry<String, Object> entry : bonds.entrySet()) {
VdsNetworkInterface iface = new Bond();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iStats.setVdsId(vds.getId());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
iface.setBonded(true);
Map<String, Object> bond = (Map<String, Object>) entry.getValue();
if (bond != null) {
iface.setMacAddress((String) bond.get("hwaddr"));
iface.setAddress((String) bond.get("addr"));
iface.setSubnet((String) bond.get("netmask"));
if (bond.get("slaves") != null) {
addBondDeviceToHost(vds, iface, (Object[]) bond.get("slaves"));
}
if (StringUtils.isNotBlank((String) bond.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) bond.get(VdsProperties.MTU)));
}
Map<String, Object> config =
(Map<String, Object>) bond.get("cfg");
if (config != null && config.get("BONDING_OPTS") != null) {
iface.setBondOptions(config.get("BONDING_OPTS").toString());
}
addBootProtocol(config, vds, iface);
}
}
}
}
/**
* Updates the host interfaces list with vlan devices
*
* @param vds
* The host to update
* @param xmlRpcStruct
* a map contains pairs of vlan device name and vlan data
*/
private static void addHostVlanDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
// vlans
Map<String, Object> vlans = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_VLANS);
if (vlans != null) {
for (Entry<String, Object> entry : vlans.entrySet()) {
VdsNetworkInterface iface = new Vlan();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
String vlanDeviceName = entry.getKey();
iface.setName(vlanDeviceName);
iface.setVdsId(vds.getId());
Map<String, Object> vlan = (Map<String, Object>) entry.getValue();
if (vlan.get(VdsProperties.VLAN_ID) != null && vlan.get(VdsProperties.BASE_INTERFACE) != null) {
iface.setVlanId((Integer) vlan.get(VdsProperties.VLAN_ID));
iface.setBaseInterface((String) vlan.get(VdsProperties.BASE_INTERFACE));
} else if (vlanDeviceName.contains(".")) {
String[] names = vlanDeviceName.split("[.]", -1);
String vlanId = names[1];
iface.setVlanId(Integer.parseInt(vlanId));
iface.setBaseInterface(names[0]);
}
iface.setAddress((String) vlan.get("addr"));
iface.setSubnet((String) vlan.get("netmask"));
if (StringUtils.isNotBlank((String) vlan.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) vlan.get(VdsProperties.MTU)));
}
iStats.setVdsId(vds.getId());
addBootProtocol((Map<String, Object>) vlan.get("cfg"), vds, iface);
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates the host network interfaces with the collected data from the host
*
* @param vds
* The host to update its interfaces
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
private static void addHostNetworkInterfaces(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> nics = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_NICS);
if (nics != null) {
for (Entry<String, Object> entry : nics.entrySet()) {
VdsNetworkInterface iface = new Nic();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
updateNetworkInterfaceDataFromHost(iface, vds, (Map<String, Object>) entry.getValue());
iStats.setVdsId(vds.getId());
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates a given interface by data as collected from the host.
*
* @param iface
* The interface to update
* @param nic
* A key-value map of the interface properties and their value
*/
private static void updateNetworkInterfaceDataFromHost(
VdsNetworkInterface iface, VDS host, Map<String, Object> nic) {
if (nic != null) {
if (nic.get("speed") != null) {
Object speed = nic.get("speed");
iface.setSpeed((Integer) speed);
}
iface.setAddress((String) nic.get("addr"));
iface.setSubnet((String) nic.get("netmask"));
iface.setMacAddress((String) nic.get("hwaddr"));
// if we get "permhwaddr", we are a part of a bond and we use that as the mac address
String mac = (String) nic.get("permhwaddr");
if (mac != null) {
//TODO remove when the minimal supported vdsm version is >=3.6
// in older VDSM version, slave's Mac is in upper case
iface.setMacAddress(mac.toLowerCase());
}
if (StringUtils.isNotBlank((String) nic.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) nic.get(VdsProperties.MTU)));
}
addBootProtocol((Map<String, Object>) nic.get("cfg"), host, iface);
}
}
/**
* Update the network details on a given interface.
*
* @param iface
* The interface to update.
* @param network
* Network struct to get details from.
* @param net
* Network to get details from.
*/
private static void updateNetworkDetailsInInterface(VdsNetworkInterface iface,
Map<String, Object> network,
VDS host,
Network net) {
if (iface != null) {
iface.setNetworkName(net.getName());
// set the management ip
if (StringUtils.equals(iface.getNetworkName(), NetworkUtils.getEngineNetwork())) {
iface.setType(iface.getType() | VdsInterfaceType.MANAGEMENT.getValue());
}
iface.setAddress(net.getAddr());
iface.setSubnet(net.getSubnet());
boolean bridgedNetwork = isBridgedNetwork(network);
iface.setBridged(bridgedNetwork);
setGatewayIfNecessary(iface, host, net.getGateway());
if (bridgedNetwork) {
Map<String, Object> networkConfig = (Map<String, Object>) network.get("cfg");
addBootProtocol(networkConfig, host, iface);
}
HostNetworkQosMapper qosMapper = new HostNetworkQosMapper(network);
iface.setQos(qosMapper.deserialize());
}
}
/**
* Returns true if vdsm doesn't report the 'bridged' attribute or if reported - its actual value.<br>
* The assumption is bridge-less network isn't supported if the 'bridged' attribute wasn't reported.<br>
* Bridge-less networks must report 'false' for this property.
*
* @param network
* The network to evaluate its bridge attribute
* @return true is no attribute is reported or its actual value
*/
private static boolean isBridgedNetwork(Map<String, Object> network) {
return network.get("bridged") == null || Boolean.parseBoolean(network.get("bridged").toString());
}
// we check for old bonding options,
// if we had value for the bonding options, i.e. the user set it by the UI
// and we have host that is not returning it's bonding options(host below 2.2.4) we override
// the "new" bonding options with the old one only if we have the new one as null and the old one is not
private static void setBondingOptions(VDS vds, List<VdsNetworkInterface> oldInterfaces) {
for (VdsNetworkInterface iface : oldInterfaces) {
if (iface.getBondOptions() != null) {
for (VdsNetworkInterface newIface : vds.getInterfaces()) {
if (iface.getName().equals(newIface.getName()) && newIface.getBondOptions() == null) {
newIface.setBondOptions(iface.getBondOptions());
break;
}
}
}
}
}
private static void addBootProtocol(Map<String, Object> cfg, VDS host, VdsNetworkInterface iface) {
NetworkBootProtocol bootproto = NetworkBootProtocol.NONE;
if (cfg != null) {
String bootProtocol = (String) cfg.get("BOOTPROTO");
if (bootProtocol != null) {
if (bootProtocol.toLowerCase().equals("dhcp")) {
bootproto = NetworkBootProtocol.DHCP;
} else if (bootProtocol.toLowerCase().equals("none") || bootProtocol.toLowerCase().equals("static")) {
if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
}
} else if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
if (bootproto == NetworkBootProtocol.STATIC_IP) {
String gateway = (String) cfg.get(VdsProperties.GATEWAY);
if (StringUtils.isNotEmpty(gateway)) {
setGatewayIfNecessary(iface, host, gateway.toString());
}
}
}
iface.setBootProtocol(bootproto);
}
private static void addBondDeviceToHost(VDS vds, VdsNetworkInterface iface, Object[] interfaces) {
vds.getInterfaces().add(iface);
if (interfaces != null) {
for (Object name : interfaces) {
for (VdsNetworkInterface tempInterface : vds.getInterfaces()) {
if (tempInterface.getName().equals(name.toString())) {
tempInterface.setBondName(iface.getName());
break;
}
}
}
}
}
/**
* Store the gateway for either of these cases:
* 1. any host network, in a cluster that supports multiple gateways
* 2. management network, no matter the cluster compatibility version
* 3. the active interface (could happen when there is no management network yet)
* If gateway was provided for non-management network when multiple gateways aren't supported, its value should be ignored.
*
* @param iface
* the host network interface
* @param host
* the host whose interfaces are being edited
* @param gateway
* the gateway value to be set
*/
private static void setGatewayIfNecessary(VdsNetworkInterface iface, VDS host, String gateway) {
if (FeatureSupported.multipleGatewaysSupported(host.getVdsGroupCompatibilityVersion())
|| NetworkUtils.getEngineNetwork().equals(iface.getNetworkName())
|| iface.getName().equals(host.getActiveNic())) {
iface.setGateway(gateway);
}
}
/**
* Creates a list of {@link VmGuestAgentInterface} from the {@link VdsProperties.GuestNetworkInterfaces}
*
* @param vmId
* the Vm's ID which contains the interfaces
*
* @param xmlRpcStruct
* the xml structure that describes the VM as reported by VDSM
* @return a list of {@link VmGuestAgentInterface} or null if no guest vNics were reported
*/
public static List<VmGuestAgentInterface> buildVmGuestAgentInterfacesData(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK_INTERFACES)) {
return null;
}
List<VmGuestAgentInterface> interfaces = new ArrayList<VmGuestAgentInterface>();
for (Object ifaceStruct : (Object[]) xmlRpcStruct.get(VdsProperties.VM_NETWORK_INTERFACES)) {
VmGuestAgentInterface nic = new VmGuestAgentInterface();
Map ifaceMap = (Map) ifaceStruct;
nic.setInterfaceName(AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_NAME));
nic.setMacAddress(getMacAddress(ifaceMap));
nic.setIpv4Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV4_ADDRESSES));
nic.setIpv6Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV6_ADDRESSES));
nic.setVmId(vmId);
interfaces.add(nic);
}
return interfaces;
}
private static String getMacAddress(Map<String, Object> ifaceMap) {
String macAddress = AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_MAC_ADDRESS);
return macAddress != null ? macAddress.replace('-', ':') : null;
}
/**
* Build through the received NUMA nodes information
* @param vds
* @param xmlRpcStruct
*/
private static void updateNumaNodesData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.AUTO_NUMA)) {
vds.getDynamicData().setAutoNumaBalancing(AutoNumaBalanceStatus.forValue(
AssignIntValue(xmlRpcStruct, VdsProperties.AUTO_NUMA)));
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODES)) {
Map<String, Map<String, Object>> numaNodeMap =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NUMA_NODES);
Map<String, Object> numaNodeDistanceMap =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NUMA_NODE_DISTANCE);
List<VdsNumaNode> newNumaNodeList = new ArrayList<>(numaNodeMap.size());
for (Map.Entry<String, Map<String, Object>> item : numaNodeMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
Map<String, Object> itemMap = item.getValue();
List<Integer> cpuIds = extractIntegerList(itemMap, VdsProperties.NUMA_NODE_CPU_LIST);
long memTotal = AssignLongValue(itemMap, VdsProperties.NUMA_NODE_TOTAL_MEM);
VdsNumaNode numaNode = new VdsNumaNode();
numaNode.setIndex(index);
if (cpuIds != null) {
numaNode.setCpuIds(cpuIds);
}
numaNode.setMemTotal(memTotal);
newNumaNodeList.add(numaNode);
}
Collections.sort(newNumaNodeList, numaNodeComparator);
for (VdsNumaNode vdsNumaNode : newNumaNodeList) {
int index = vdsNumaNode.getIndex();
List<Integer> distances = extractIntegerList(numaNodeDistanceMap, String.valueOf(index));
Map<Integer, Integer> distanceMap = new HashMap<>(distances.size());
for (int i = 0; i < distances.size(); i++) {
distanceMap.put(newNumaNodeList.get(i).getIndex(), distances.get(i));
}
VdsNumaNode newNumaNode = NumaUtils.getVdsNumaNodeByIndex(newNumaNodeList, index);
if (newNumaNode != null) {
newNumaNode.setNumaNodeDistances(distanceMap);
}
}
vds.getDynamicData().setNumaNodeList(newNumaNodeList);
vds.setNumaSupport(newNumaNodeList.size() > 1);
}
}
/**
* Build through the received vm NUMA nodes runtime information
* @param vm
* @param xmlRpcStruct
*/
private static void updateVmNumaNodesRuntimeInfo(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
Map<String, Object[]> vNodesRunInfo = (Map<String, Object[]>)xmlRpcStruct.get(
VdsProperties.VM_NUMA_NODES_RUNTIME_INFO);
for (Map.Entry<String, Object[]> item : vNodesRunInfo.entrySet()) {
VmNumaNode vNode = new VmNumaNode();
vNode.setIndex(Integer.valueOf(item.getKey()));
for (Object pNodeIndex : item.getValue()) {
vNode.getVdsNumaNodeList().add(new Pair<>(
Guid.Empty, new Pair<>(false, (Integer)pNodeIndex)));
}
vm.getvNumaNodeStatisticsList().add(vNode);
}
}
private static List<String> extracStringtList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<String> list = new ArrayList<String>();
for (Object item : items) {
list.add((String) item);
}
return list;
}
private static List<Integer> extractIntegerList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<Integer> list = new ArrayList<Integer>();
for (Object item : items) {
list.add((Integer) item);
}
return list;
}
}
|
core: legacy VDSM fix - part 2
Adding forgotten part of legacy vdsm fix to
I48cd5f76ed870530208fcd0fc69c3d1a52b7249b which I forgot to fixup.
Change-Id: I3b0cd1d109dae745c5a91e4f7a928457c9a7a9d3
Signed-off-by: Frantisek Kobzik <0b328e9ba0d2f62ea1913cf63eeb61d1089dc497@redhat.com>
|
backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/vdsbroker/VdsBrokerObjectsBuilder.java
|
core: legacy VDSM fix - part 2
|
|
Java
|
bsd-3-clause
|
b3db5f58e02ca02e12d7d5b12f9e715b7b7e3ff0
| 0
|
vincentml/basex,JensErat/basex,ksclarke/basex,drmacro/basex,dimitarp/basex,BaseXdb/basex,BaseXdb/basex,vincentml/basex,vincentml/basex,drmacro/basex,dimitarp/basex,ksclarke/basex,dimitarp/basex,ksclarke/basex,JensErat/basex,deshmnnit04/basex,vincentml/basex,drmacro/basex,deshmnnit04/basex,BaseXdb/basex,BaseXdb/basex,drmacro/basex,vincentml/basex,ksclarke/basex,JensErat/basex,joansmith/basex,JensErat/basex,ksclarke/basex,dimitarp/basex,deshmnnit04/basex,vincentml/basex,ksclarke/basex,vincentml/basex,JensErat/basex,joansmith/basex,joansmith/basex,deshmnnit04/basex,ksclarke/basex,joansmith/basex,drmacro/basex,dimitarp/basex,joansmith/basex,joansmith/basex,joansmith/basex,deshmnnit04/basex,BaseXdb/basex,JensErat/basex,vincentml/basex,deshmnnit04/basex,BaseXdb/basex,ksclarke/basex,vincentml/basex,dimitarp/basex,drmacro/basex,joansmith/basex,BaseXdb/basex,joansmith/basex,dimitarp/basex,dimitarp/basex,dimitarp/basex,joansmith/basex,deshmnnit04/basex,BaseXdb/basex,deshmnnit04/basex,BaseXdb/basex,joansmith/basex,drmacro/basex,vincentml/basex,drmacro/basex,drmacro/basex,vincentml/basex,JensErat/basex,ksclarke/basex,ksclarke/basex,drmacro/basex,dimitarp/basex,JensErat/basex,BaseXdb/basex,dimitarp/basex,deshmnnit04/basex,joansmith/basex,BaseXdb/basex,drmacro/basex,deshmnnit04/basex,JensErat/basex,dimitarp/basex,JensErat/basex,ksclarke/basex,ksclarke/basex,vincentml/basex,BaseXdb/basex,drmacro/basex,deshmnnit04/basex,deshmnnit04/basex,JensErat/basex,JensErat/basex
|
package org.basex.query.xquery.func;
import org.basex.query.xquery.XQContext;
import org.basex.query.xquery.XQException;
import org.basex.query.xquery.expr.CmpV;
import org.basex.query.xquery.item.Bln;
import org.basex.query.xquery.item.Item;
import org.basex.query.xquery.item.Itr;
import org.basex.query.xquery.item.Node;
import org.basex.query.xquery.item.Type;
import org.basex.query.xquery.iter.Iter;
import org.basex.query.xquery.iter.NodeIter;
import org.basex.query.xquery.iter.RangeIter;
import org.basex.query.xquery.iter.SeqIter;
import org.basex.util.Token;
/**
* Sequence functions.
*
* @author Workgroup DBIS, University of Konstanz 2005-08, ISC License
* @author Christian Gruen
*/
final class FNSeq extends Fun {
@Override
public Iter iter(final XQContext ctx, final Iter[] arg) throws XQException {
final Iter iter = arg[0];
SeqIter seq = new SeqIter();
Item i;
switch(func) {
case INDEXOF:
Item it = arg[1].atomic(this, false);
if(arg.length == 3) checkColl(arg[2]);
//int p = 1;
//while((i = iter.next()) != null) {
// if(CmpV.valCheck(i, it) && CmpV.COMP.EQ.e(i, it))
// seq.add(Itr.iter(p));
// p++;
//}
//return seq;
return indexOf(iter, it);
case DISTINCT:
if(arg.length == 2) checkColl(arg[1]);
//while((i = iter.next()) != null) distinct(seq, i);
//return seq;
return distinctPipelined(iter);
case INSBEF:
final Iter sub = arg[2];
long r = Math.max(1, checkItr(arg[1]));
while((i = iter.next()) != null) {
if(--r == 0) seq.add(sub);
seq.add(i.iter());
}
if(r > 0) seq.add(sub);
return seq;
case REVERSE:
if(iter instanceof RangeIter) {
((RangeIter) iter).reverse();
return iter;
}
while((i = iter.next()) != null) seq.insert(i, 0);
return seq;
case REMOVE:
final long pos = checkItr(arg[1]);
//long c = 0;
//while((i = iter.next()) != null) if(++c != pos) seq.add(i);
//return seq;
return remove(iter, pos);
case SUBSEQ:
final long start = checkItr(arg[1]);
final long end = arg.length > 2 ? start + checkItr(arg[2]) :
Long.MAX_VALUE;
//c = 0;
//while((i = iter.next()) != null) {
// if(++c < start) continue;
// if(c >= end) break;
// seq.add(i);
//}
//return seq;
return subseq(iter, start, end);
case DEEPEQ:
return Bln.get(deep(arg)).iter();
default:
throw new RuntimeException("Not defined: " + func);
}
}
/**
* Looks for the index of an specified input item (pipelined).
* @param iter input iterator
* @param it item to searched for in iterator
* @return position(s) of item
*/
private Iter indexOf(final Iter iter, final Item it) {
return new Iter() {
Item i;
int index = 0;
@Override
public Item next() throws XQException {
while ((i = iter.next()) != null) {
index++;
if(CmpV.valCheck(i, it) && CmpV.COMP.EQ.e(i, it)) {
return Itr.get(index);
}
}
return null;
}
};
}
/**
* Looks for the specified item in the sequence.
* @param sq sequence to be parsed
* @param i item to be found
* @throws XQException evaluation exception
*/
private void distinct(final SeqIter sq, final Item i)
throws XQException {
final boolean nan = i.n() && i.dbl() != i.dbl();
for(int r = 0; r < sq.size; r++) {
final Item c = sq.item[r];
if(nan && c.dbl() != c.dbl()) return;
if(CmpV.valCheck(i, c) && CmpV.COMP.EQ.e(i, c)) return;
}
sq.add(FNGen.atom(i));
}
/**
* Looks for the specified item in the sequence (pipelined).
* @param iter input iterator
* @return distinct iterator
*/
private Iter distinctPipelined(final Iter iter) {
return new Iter() {
SeqIter sq = new SeqIter();
Item i;
@Override
public Item next() throws XQException {
loop1: while((i = iter.next()) != null) {
final boolean nan = i.n() && i.dbl() != i.dbl();
for(int r = 0; r < sq.size; r++) {
final Item c = sq.item[r];
if(nan && c.dbl() != c.dbl()) continue loop1;
if(CmpV.valCheck(i, c) && CmpV.COMP.EQ.e(i, c)) continue loop1;
}
sq.add(FNGen.atom(i));
return i;
}
return null;
}
};
}
/**
* Removes an Item at a specified position in a sequence (pipelined).
* @param iter input iterator
* @param pos position of item to be removed
* @return iterator without Item
*/
private Iter remove(final Iter iter, final long pos) {
return new Iter() {
long c = 0;
@Override
public Item next() throws XQException {
Item i;
while((i = iter.next()) != null) if(++c != pos) return i;
return null;
}
};
}
/**
* Creates a subsequence out of a sequence, starting with start and
* ending with end (pipelined).
* @param iter input iterator
* @param start starting position
* @param end ending position
* @return subsequence
*/
private Iter subseq(final Iter iter, final long start, final long end) {
return new Iter() {
long c = 0;
@Override
public Item next() throws XQException {
Item i;
while((i = iter.next()) != null) {
if(++c < start) continue;
if(c >= end) break;
return i;
}
return null;
}
};
}
/**
* Checks items for deep equality.
* @param arg arguments
* @return result of check
* @throws XQException evaluation exception
*/
private boolean deep(final Iter[] arg) throws XQException {
if(arg.length == 3) checkColl(arg[2]);
final Iter iter1 = arg[0];
final Iter iter2 = arg[1];
Item it1 = null;
Item it2 = null;
while((it1 = iter1.next()) != null & (it2 = iter2.next()) != null) {
if(it1.n() && it2.n() && it1.dbl() != it1.dbl() && it2.dbl() != it2.dbl())
continue;
if(!CmpV.valCheck(it1, it2) || CmpV.COMP.NE.e(it1, it2)) return false;
if(!it1.node() && !it2.node()) continue;
// comparing nodes
if(!(it1.node() && it2.node())) return false;
final NodeIter niter1 = ((Node) it1).descOrSelf();
final NodeIter niter2 = ((Node) it2).descOrSelf();
Node n1 = null, n2 = null;
while((n1 = niter1.next()) != null & (n2 = niter2.next()) != null) {
if(n1.type != n2.type) return false;
if((n1.type == Type.ELM || n1.type == Type.PI) &&
!n1.qname().eq(n2.qname())) return false;
if(n1.type == Type.ATT) {
if(!n1.qname().eq(n2.qname()) || !Token.eq(n1.str(), n2.str()))
return false;
continue;
}
NodeIter att1 = n1.attr();
int s1 = 0;
while(att1.next() != null) s1++;
NodeIter att2 = n2.attr();
int s2 = 0;
while(att2.next() != null) s2++;
if(s1 != s2) return false;
Node a1 = null, a2 = null;
att1 = n1.attr();
while((a1 = att1.next()) != null) {
att2 = n2.attr();
boolean found = false;
while((a2 = att2.next()) != null) {
if(a1.qname().eq(a2.qname())) {
found = Token.eq(a1.str(), a2.str());
break;
}
}
if(!found) return false;
}
}
if(n1 != n2) return false;
}
return it1 == it2;
}
}
|
src/org/basex/query/xquery/func/FNSeq.java
|
package org.basex.query.xquery.func;
import org.basex.query.xquery.XQContext;
import org.basex.query.xquery.XQException;
import org.basex.query.xquery.expr.CmpV;
import org.basex.query.xquery.item.Bln;
import org.basex.query.xquery.item.Item;
import org.basex.query.xquery.item.Itr;
import org.basex.query.xquery.item.Node;
import org.basex.query.xquery.item.Type;
import org.basex.query.xquery.iter.Iter;
import org.basex.query.xquery.iter.NodeIter;
import org.basex.query.xquery.iter.RangeIter;
import org.basex.query.xquery.iter.SeqIter;
import org.basex.util.Token;
/**
* Sequence functions.
*
* @author Workgroup DBIS, University of Konstanz 2005-08, ISC License
* @author Christian Gruen
*/
final class FNSeq extends Fun {
@Override
public Iter iter(final XQContext ctx, final Iter[] arg) throws XQException {
final Iter iter = arg[0];
SeqIter seq = new SeqIter();
Item i;
switch(func) {
case INDEXOF:
Item it = arg[1].atomic(this, false);
if(arg.length == 3) checkColl(arg[2]);
//int p = 1;
//while((i = iter.next()) != null) {
// if(CmpV.valCheck(i, it) && CmpV.COMP.EQ.e(i, it))
// seq.add(Itr.iter(p));
// p++;
//}
//return seq;
return indexOf(iter, it);
case DISTINCT:
if(arg.length == 2) checkColl(arg[1]);
//while((i = iter.next()) != null) distinct(seq, i);
//return seq;
return distinctPipelined(iter);
case INSBEF:
final Iter sub = arg[2];
long r = Math.max(1, checkItr(arg[1]));
while((i = iter.next()) != null) {
if(--r == 0) seq.add(sub);
seq.add(i.iter());
}
if(r > 0) seq.add(sub);
return seq;
case REVERSE:
if(iter instanceof RangeIter) {
((RangeIter) iter).reverse();
return iter;
}
while((i = iter.next()) != null) seq.insert(i, 0);
return seq;
case REMOVE:
final long pos = checkItr(arg[1]);
//long c = 0;
//while((i = iter.next()) != null) if(++c != pos) seq.add(i);
//return seq;
return remove(iter, pos);
case SUBSEQ:
final long start = checkItr(arg[1]);
final long end = arg.length > 2 ? start + checkItr(arg[2]) :
Long.MAX_VALUE;
//c = 0;
//while((i = iter.next()) != null) {
// if(++c < start) continue;
// if(c >= end) break;
// seq.add(i);
//}
//return seq;
return subseq(iter, start, end);
case DEEPEQ:
return Bln.get(deep(arg)).iter();
default:
throw new RuntimeException("Not defined: " + func);
}
}
/**
* Looks for the index of an specified input item (pipelined).
* @param iter input iterator
* @param it item to searched for in iterator
* @return position(s) of item
*/
private Iter indexOf(final Iter iter, final Item it) {
return new Iter() {
SeqIter sq = new SeqIter();
Item i;
int index = 0;
int position = -1;
@Override
public Item next() throws XQException {
while ((i = iter.next()) != null) {
index++;
if(CmpV.valCheck(i, it) && CmpV.COMP.EQ.e(i, it)) {
position++;
sq.add(Itr.iter(index));
return sq.item[position];
}
}
return null;
}
};
}
/**
* Looks for the specified item in the sequence.
* @param sq sequence to be parsed
* @param i item to be found
* @throws XQException evaluation exception
*/
private void distinct(final SeqIter sq, final Item i)
throws XQException {
final boolean nan = i.n() && i.dbl() != i.dbl();
for(int r = 0; r < sq.size; r++) {
final Item c = sq.item[r];
if(nan && c.dbl() != c.dbl()) return;
if(CmpV.valCheck(i, c) && CmpV.COMP.EQ.e(i, c)) return;
}
sq.add(FNGen.atom(i));
}
/**
* Looks for the specified item in the sequence (pipelined).
* @param iter input iterator
* @return distinct iterator
*/
private Iter distinctPipelined(final Iter iter) {
return new Iter() {
SeqIter sq = new SeqIter();
Item i;
@Override
public Item next() throws XQException {
loop1: while((i = iter.next()) != null) {
final boolean nan = i.n() && i.dbl() != i.dbl();
for(int r = 0; r < sq.size; r++) {
final Item c = sq.item[r];
if(nan && c.dbl() != c.dbl()) continue loop1;
if(CmpV.valCheck(i, c) && CmpV.COMP.EQ.e(i, c)) continue loop1;
}
sq.add(FNGen.atom(i));
return i;
}
return null;
}
};
}
/**
* Removes an Item at a specified position in a sequence (pipelined).
* @param iter input iterator
* @param pos position of item to be removed
* @return iterator without Item
*/
private Iter remove(final Iter iter, final long pos) {
return new Iter() {
long c = 0;
@Override
public Item next() throws XQException {
Item i;
while((i = iter.next()) != null) if(++c != pos) return i;
return null;
}
};
}
/**
* Creates a subsequence out of a sequence, starting with start and
* ending with end (pipelined).
* @param iter input iterator
* @param start starting position
* @param end ending position
* @return subsequence
*/
private Iter subseq(final Iter iter, final long start, final long end) {
return new Iter() {
long c = 0;
@Override
public Item next() throws XQException {
Item i;
while((i = iter.next()) != null) {
if(++c < start) continue;
if(c >= end) break;
return i;
}
return null;
}
};
}
/**
* Checks items for deep equality.
* @param arg arguments
* @return result of check
* @throws XQException evaluation exception
*/
private boolean deep(final Iter[] arg) throws XQException {
if(arg.length == 3) checkColl(arg[2]);
final Iter iter1 = arg[0];
final Iter iter2 = arg[1];
Item it1 = null;
Item it2 = null;
while((it1 = iter1.next()) != null & (it2 = iter2.next()) != null) {
if(it1.n() && it2.n() && it1.dbl() != it1.dbl() && it2.dbl() != it2.dbl())
continue;
if(!CmpV.valCheck(it1, it2) || CmpV.COMP.NE.e(it1, it2)) return false;
if(!it1.node() && !it2.node()) continue;
// comparing nodes
if(!(it1.node() && it2.node())) return false;
final NodeIter niter1 = ((Node) it1).descOrSelf();
final NodeIter niter2 = ((Node) it2).descOrSelf();
Node n1 = null, n2 = null;
while((n1 = niter1.next()) != null & (n2 = niter2.next()) != null) {
if(n1.type != n2.type) return false;
if((n1.type == Type.ELM || n1.type == Type.PI) &&
!n1.qname().eq(n2.qname())) return false;
if(n1.type == Type.ATT) {
if(!n1.qname().eq(n2.qname()) || !Token.eq(n1.str(), n2.str()))
return false;
continue;
}
NodeIter att1 = n1.attr();
int s1 = 0;
while(att1.next() != null) s1++;
NodeIter att2 = n2.attr();
int s2 = 0;
while(att2.next() != null) s2++;
if(s1 != s2) return false;
Node a1 = null, a2 = null;
att1 = n1.attr();
while((a1 = att1.next()) != null) {
att2 = n2.attr();
boolean found = false;
while((a2 = att2.next()) != null) {
if(a1.qname().eq(a2.qname())) {
found = Token.eq(a1.str(), a2.str());
break;
}
}
if(!found) return false;
}
}
if(n1 != n2) return false;
}
return it1 == it2;
}
}
|
opimized indexOf Method
|
src/org/basex/query/xquery/func/FNSeq.java
|
opimized indexOf Method
|
|
Java
|
bsd-3-clause
|
ccd06fc60b027162f8e13acd65fc4fc878699561
| 0
|
luxiaohan/openxal-csns-luxh,luxiaohan/openxal-csns-luxh,openxal/openxal,luxiaohan/openxal-csns-luxh,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,EuropeanSpallationSource/openxal,openxal/openxal,EuropeanSpallationSource/openxal,openxal/openxal,openxal/openxal
|
/*
* JcaChannelServerPV.java
*
* Created on October 21, 2013, 9:37 AM
*/
package xal.plugin.jca;
import xal.ca.ChannelServerPV;
import com.cosylab.epics.caj.cas.util.MemoryProcessVariable;
/**
* Concrete JcaChannelServerPV wraps MemoryProcessVariable
* @author tap
*/
public class JcaChannelServerPV extends ChannelServerPV {
/** native process variable wrapped by this instance */
final private MemoryProcessVariable NATIVE_PROCESS_VARIABLE;
/** Constructor */
protected JcaChannelServerPV( final MemoryProcessVariable nativeProcessVariable ) {
NATIVE_PROCESS_VARIABLE = nativeProcessVariable;
}
/** get the units */
public String getUnits() {
return NATIVE_PROCESS_VARIABLE.getUnits();
}
/** set the units */
public void setUnits( final String units ) {
NATIVE_PROCESS_VARIABLE.setUnits( units );
}
/** get the precision */
public short getPrecision() {
return NATIVE_PROCESS_VARIABLE.getPrecision();
}
/** set the precision */
public void setPrecision( final short precision ) {
NATIVE_PROCESS_VARIABLE.setPrecision( precision );
}
/** get the lower display limit */
public Number getLowerDispLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerDispLimit();
}
/** set the lower display limit */
public void setLowerDispLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerDispLimit( lowerLimit );
}
/** get the upper display limit */
public Number getUpperDispLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperDispLimit();
}
/** set the upper display limit */
public void setUpperDispLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperDispLimit( upperLimit );
}
/** get the lower alarm limit */
public Number getLowerAlarmLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerAlarmLimit();
}
/** set the lower alarm limit */
public void setLowerAlarmLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerAlarmLimit( lowerLimit );
}
/** get the upper alarm limit */
public Number getUpperAlarmLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperAlarmLimit();
}
/** set the upper alarm limit */
public void setUpperAlarmLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperAlarmLimit( upperLimit );
}
/** get the lower control limit */
public Number getLowerCtrlLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerCtrlLimit();
}
/** set the lower control limit */
public void setLowerCtrlLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerCtrlLimit( lowerLimit );
}
/** get the upper control limit */
public Number getUpperCtrlLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperCtrlLimit();
}
/** set the upper control limit */
public void setUpperCtrlLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperCtrlLimit( upperLimit );
}
/** get the lower warning limit */
public Number getLowerWarningLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerWarningLimit();
}
/** set the lower warning limit */
public void setLowerWarningLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerWarningLimit( lowerLimit );
}
/** get the upper warning limit */
public Number getUpperWarningLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperWarningLimit();
}
/** set the upper warning limit */
public void setUpperWarningLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperWarningLimit( upperLimit );
}
}
|
plugins/jca/src/xal/plugin/jca/JcaChannelServerPV.java
|
/*
* JcaChannelServerPV.java
*
* Created on October 21, 2013, 9:37 AM
*/
package xal.plugin.jca;
import xal.ca.ChannelServerPV;
import com.cosylab.epics.caj.cas.util.MemoryProcessVariable;
/**
* Concrete JcaChannelServerPV wraps MemoryProcessVariable
* @author tap
*/
@Deprecated
public class JcaChannelServerPV extends ChannelServerPV {
/** native process variable wrapped by this instance */
final private MemoryProcessVariable NATIVE_PROCESS_VARIABLE;
/** Constructor */
protected JcaChannelServerPV( final MemoryProcessVariable nativeProcessVariable ) {
NATIVE_PROCESS_VARIABLE = nativeProcessVariable;
}
/** get the units */
public String getUnits() {
return NATIVE_PROCESS_VARIABLE.getUnits();
}
/** set the units */
public void setUnits( final String units ) {
NATIVE_PROCESS_VARIABLE.setUnits( units );
}
/** get the precision */
public short getPrecision() {
return NATIVE_PROCESS_VARIABLE.getPrecision();
}
/** set the precision */
public void setPrecision( final short precision ) {
NATIVE_PROCESS_VARIABLE.setPrecision( precision );
}
/** get the lower display limit */
public Number getLowerDispLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerDispLimit();
}
/** set the lower display limit */
public void setLowerDispLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerDispLimit( lowerLimit );
}
/** get the upper display limit */
public Number getUpperDispLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperDispLimit();
}
/** set the upper display limit */
public void setUpperDispLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperDispLimit( upperLimit );
}
/** get the lower alarm limit */
public Number getLowerAlarmLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerAlarmLimit();
}
/** set the lower alarm limit */
public void setLowerAlarmLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerAlarmLimit( lowerLimit );
}
/** get the upper alarm limit */
public Number getUpperAlarmLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperAlarmLimit();
}
/** set the upper alarm limit */
public void setUpperAlarmLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperAlarmLimit( upperLimit );
}
/** get the lower control limit */
public Number getLowerCtrlLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerCtrlLimit();
}
/** set the lower control limit */
public void setLowerCtrlLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerCtrlLimit( lowerLimit );
}
/** get the upper control limit */
public Number getUpperCtrlLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperCtrlLimit();
}
/** set the upper control limit */
public void setUpperCtrlLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperCtrlLimit( upperLimit );
}
/** get the lower warning limit */
public Number getLowerWarningLimit() {
return NATIVE_PROCESS_VARIABLE.getLowerWarningLimit();
}
/** set the lower warning limit */
public void setLowerWarningLimit( final Number lowerLimit ) {
NATIVE_PROCESS_VARIABLE.setLowerWarningLimit( lowerLimit );
}
/** get the upper warning limit */
public Number getUpperWarningLimit() {
return NATIVE_PROCESS_VARIABLE.getUpperWarningLimit();
}
/** set the upper warning limit */
public void setUpperWarningLimit( final Number upperLimit ) {
NATIVE_PROCESS_VARIABLE.setUpperWarningLimit( upperLimit );
}
}
|
JcaChannelServer is used in scripts and in some cases is preferable to the new jca server mechanism, so it is no longer deprecated.
|
plugins/jca/src/xal/plugin/jca/JcaChannelServerPV.java
|
JcaChannelServer is used in scripts and in some cases is preferable to the new jca server mechanism, so it is no longer deprecated.
|
|
Java
|
mit
|
9e3d929cbc293720cbca8adcdcd6fd31ed1f0367
| 0
|
pennlabs/penn-mobile-android,pennlabs/penn-mobile-android,pennlabs/penn-mobile-android
|
package com.pennapps.labs.pennmobile;
import android.os.Bundle;
import android.support.v4.app.ListFragment;
import android.view.View;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.pennapps.labs.pennmobile.api.Labs;
import java.util.concurrent.atomic.AtomicInteger;
import butterknife.Bind;
/**
* Created by Jason on 1/26/2016.
*/
public abstract class SearchFavoriteTab extends ListFragment {
protected boolean fav;
protected String type;
protected ListView mListView;
protected MainActivity mActivity;
protected Labs mLabs;
private static final AtomicInteger sNextGeneratedId = new AtomicInteger(1);
protected @Bind(R.id.loadingPanel) RelativeLayout loadingPanel;
protected @Bind(R.id.no_results) TextView no_results;
protected @Bind(R.id.search_instructions) TextView search_instructions;
public SearchFavoriteTab() {
super();
}
@Override
public void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
fav = getArguments().getBoolean(getString(R.string.search_favorite), false);
type = getArguments().getString(getString(R.string.search_list), "");
mActivity = (MainActivity) getActivity();
mLabs = MainActivity.getLabsInstance();
}
public void processQuery(String query) {
if (search_instructions.getVisibility() == View.VISIBLE && !query.isEmpty()) {
search_instructions.setVisibility(View.GONE);
if(loadingPanel != null) {
loadingPanel.setVisibility(View.VISIBLE);
}
}
}
protected void noResults() {
if (loadingPanel != null) {
loadingPanel.setVisibility(View.GONE);
no_results.setVisibility(View.VISIBLE);
mListView.setVisibility(View.GONE);
search_instructions.setVisibility(View.GONE);
}
}
protected void notFavoriteInit() {
if (search_instructions != null) {
search_instructions.setVisibility(View.VISIBLE);
no_results.setVisibility(View.GONE);
loadingPanel.setVisibility(View.GONE);
mListView.setVisibility(View.GONE);
}
}
public abstract void initList();
public static int generateViewId() {
for (;;) {
final int result = sNextGeneratedId.get();
// aapt-generated IDs have the high byte nonzero; clamp to the range under that.
int newValue = result + 1;
if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0.
if (sNextGeneratedId.compareAndSet(result, newValue)) {
return result;
}
}
}
}
|
PennMobile/src/main/java/com/pennapps/labs/pennmobile/SearchFavoriteTab.java
|
package com.pennapps.labs.pennmobile;
import android.os.Bundle;
import android.support.v4.app.ListFragment;
import android.view.View;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.pennapps.labs.pennmobile.api.Labs;
import java.util.concurrent.atomic.AtomicInteger;
import butterknife.Bind;
/**
* Created by Jason on 1/26/2016.
*/
public abstract class SearchFavoriteTab extends ListFragment {
protected boolean fav;
protected String type;
protected ListView mListView;
protected MainActivity mActivity;
protected Labs mLabs;
private static final AtomicInteger sNextGeneratedId = new AtomicInteger(1);
protected @Bind(R.id.loadingPanel) RelativeLayout loadingPanel;
protected @Bind(R.id.no_results) TextView no_results;
protected @Bind(R.id.search_instructions) TextView search_instructions;
public SearchFavoriteTab() {
super();
}
@Override
public void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
fav = getArguments().getBoolean(getString(R.string.search_favorite), false);
type = getArguments().getString(getString(R.string.search_list), "");
mActivity = (MainActivity) getActivity();
mLabs = MainActivity.getLabsInstance();
}
public void processQuery(String query) {
if (search_instructions.getVisibility() == View.VISIBLE && !query.isEmpty()) {
search_instructions.setVisibility(View.GONE);
if(loadingPanel != null) {
loadingPanel.setVisibility(View.VISIBLE);
}
}
}
protected void noResults() {
if (loadingPanel != null) {
loadingPanel.setVisibility(View.GONE);
no_results.setVisibility(View.VISIBLE);
mListView.setVisibility(View.GONE);
search_instructions.setVisibility(View.GONE);
}
}
protected void notFavoriteInit() {
search_instructions.setVisibility(View.VISIBLE);
no_results.setVisibility(View.GONE);
loadingPanel.setVisibility(View.GONE);
mListView.setVisibility(View.GONE);
}
public abstract void initList();
public static int generateViewId() {
for (;;) {
final int result = sNextGeneratedId.get();
// aapt-generated IDs have the high byte nonzero; clamp to the range under that.
int newValue = result + 1;
if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0.
if (sNextGeneratedId.compareAndSet(result, newValue)) {
return result;
}
}
}
}
|
fixed issue #413
|
PennMobile/src/main/java/com/pennapps/labs/pennmobile/SearchFavoriteTab.java
|
fixed issue #413
|
|
Java
|
mit
|
a48e38de7b075a2bec404d2cc4a49cfaf64aec33
| 0
|
JetBrains/ideavim,JetBrains/ideavim
|
/*
* IdeaVim - Vim emulator for IDEs based on the IntelliJ platform
* Copyright (C) 2003-2022 The IdeaVim authors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package com.maddyhome.idea.vim.helper;
import javax.swing.*;
import java.util.List;
import static com.maddyhome.idea.vim.api.VimInjectorKt.injector;
/**
* COMPATIBILITY-LAYER: Created a helper class
*/
public class StringHelper {
static List<KeyStroke> parseKeys(String string) {
return injector.getParser().parseKeys(string);
}
}
|
src/main/java/com/maddyhome/idea/vim/helper/StringHelper.java
|
/*
* IdeaVim - Vim emulator for IDEs based on the IntelliJ platform
* Copyright (C) 2003-2022 The IdeaVim authors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package com.maddyhome.idea.vim.helper;
import javax.swing.*;
import java.util.List;
import static com.maddyhome.idea.vim.api.VimInjectorKt.injector;
/**
* COMPATIBILITY-LAYER: Created a helper class
*/
public class StringHelper {
List<KeyStroke> parseKeys(String string) {
return injector.getParser().parseKeys(string);
}
}
|
Make function static
|
src/main/java/com/maddyhome/idea/vim/helper/StringHelper.java
|
Make function static
|
|
Java
|
mit
|
fe2dc8398908d1d7e7879f5f9c11c8592e471b5e
| 0
|
henu/gdxextras
|
package fi.henu.gdxextras.gui;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.Vector2;
public class Checkbox extends Widget
{
// This is used to set default background and selected graphics for Button
public static void setDefaultStyle(CheckboxStyle style)
{
default_style = style;
}
public Checkbox()
{
enabled = true;
}
public Checkbox(boolean checked)
{
this.checked = checked;
enabled = true;
}
public void setChecked(boolean checked)
{
this.checked = checked;
}
public boolean getChecked()
{
return checked;
}
public void setEnabled(boolean enabled)
{
this.enabled = enabled;
}
public boolean getEnabled()
{
return enabled;
}
@Override
public boolean pointerDown(int pointer_id, Vector2 pos)
{
if (enabled) {
checked = !checked;
fireEvent();
}
return false;
}
@Override
protected void doRendering(SpriteBatch batch, ShapeRenderer shapes)
{
CheckboxStyle style = getStyle();
if (checked) {
render(batch, style.checked, getPositionX(), getPositionY(), style.scaling);
} else {
render(batch, style.not_checked, getPositionX(), getPositionY(), style.scaling);
}
}
@Override
protected float doGetMinWidth()
{
CheckboxStyle style = getStyle();
if (checked) {
return style.checked.originalWidth * style.scaling;
} else {
return style.not_checked.originalWidth * style.scaling;
}
}
@Override
protected float doGetMinHeight(float width)
{
CheckboxStyle style = getStyle();
if (checked) {
return style.checked.originalHeight * style.scaling;
} else {
return style.not_checked.originalHeight * style.scaling;
}
}
private boolean checked;
private boolean enabled;
private static CheckboxStyle default_style;
private CheckboxStyle getStyle()
{
return default_style;
}
}
|
gui/Checkbox.java
|
package fi.henu.gdxextras.gui;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.Vector2;
public class Checkbox extends Widget
{
// This is used to set default background and selected graphics for Button
public static void setDefaultStyle(CheckboxStyle style)
{
default_style = style;
}
public Checkbox()
{
}
public Checkbox(boolean checked)
{
this.checked = checked;
}
public boolean getChecked()
{
return checked;
}
@Override
public boolean pointerDown(int pointer_id, Vector2 pos)
{
checked = !checked;
fireEvent();
return false;
}
@Override
protected void doRendering(SpriteBatch batch, ShapeRenderer shapes)
{
CheckboxStyle style = getStyle();
if (checked) {
render(batch, style.checked, getPositionX(), getPositionY(), style.scaling);
} else {
render(batch, style.not_checked, getPositionX(), getPositionY(), style.scaling);
}
}
@Override
protected float doGetMinWidth()
{
CheckboxStyle style = getStyle();
if (checked) {
return style.checked.originalWidth * style.scaling;
} else {
return style.not_checked.originalWidth * style.scaling;
}
}
@Override
protected float doGetMinHeight(float width)
{
CheckboxStyle style = getStyle();
if (checked) {
return style.checked.originalHeight * style.scaling;
} else {
return style.not_checked.originalHeight * style.scaling;
}
}
private boolean checked;
private static CheckboxStyle default_style;
private CheckboxStyle getStyle()
{
return default_style;
}
}
|
Checkbox: Option to enable/disable it.
|
gui/Checkbox.java
|
Checkbox: Option to enable/disable it.
|
|
Java
|
mit
|
cea793ac96f796353cfd6305745800fbc58c025a
| 0
|
McJty/RFToolsDimensions
|
package mcjty.rftoolsdim.dimensions.dimlets;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import mcjty.lib.varia.EntityTools;
import mcjty.lib.varia.Logging;
import mcjty.rftoolsdim.RFToolsDim;
import mcjty.rftoolsdim.config.DimletRules;
import mcjty.rftoolsdim.config.Filter;
import mcjty.rftoolsdim.config.GeneralConfiguration;
import mcjty.rftoolsdim.config.Settings;
import mcjty.rftoolsdim.dimensions.DimensionInformation;
import mcjty.rftoolsdim.dimensions.description.SkyDescriptor;
import mcjty.rftoolsdim.dimensions.description.WeatherDescriptor;
import mcjty.rftoolsdim.dimensions.dimlets.types.DimletType;
import mcjty.rftoolsdim.dimensions.types.*;
import mcjty.rftoolsdim.dimensions.world.BiomeControllerMapping;
import mcjty.rftoolsdim.items.ModItems;
import net.minecraft.block.Block;
import net.minecraft.block.BlockFalling;
import net.minecraft.block.BlockLiquid;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.biome.Biome;
import net.minecraftforge.common.IPlantable;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fml.common.registry.EntityEntry;
import net.minecraftforge.fml.common.registry.ForgeRegistries;
import net.minecraftforge.oredict.OreDictionary;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Level;
import java.lang.reflect.Modifier;
import java.util.*;
public class KnownDimletConfiguration {
private static Set<DimletKey> craftableDimlets = new HashSet<>();
private static SortedMap<DimletKey, Settings> knownDimlets = new TreeMap<>();
public static Settings getSettings(DimletKey key) {
initDimlets();
return knownDimlets.get(key);
}
public static SortedMap<DimletKey, Settings> getKnownDimlets() {
initDimlets();
return knownDimlets;
}
public static void init() {
knownDimlets.clear();
craftableDimlets.clear();
}
private static void initDimlets() {
if (!knownDimlets.isEmpty()) {
return;
}
for (int i = 0 ; i <= 9 ; i++) {
initDimlet(new DimletKey(DimletType.DIMLET_DIGIT, Integer.toString(i)), RFToolsDim.MODID);
}
if (GeneralConfiguration.voidOnly) {
initDimlet(new DimletKey(DimletType.DIMLET_TERRAIN, TerrainType.TERRAIN_VOID.getId()), RFToolsDim.MODID);
} else {
Arrays.stream(TerrainType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_TERRAIN, t.getId()), RFToolsDim.MODID));
}
Arrays.stream(ControllerType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_CONTROLLER, t.getId()), RFToolsDim.MODID));
Arrays.stream(FeatureType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_FEATURE, t.getId()), RFToolsDim.MODID));
Arrays.stream(EffectType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_EFFECT, t.getId()), RFToolsDim.MODID));
Arrays.stream(StructureType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_STRUCTURE, t.getId()), RFToolsDim.MODID));
Arrays.stream(SpecialType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_SPECIAL, t.getId()), RFToolsDim.MODID));
Biome.REGISTRY.iterator().forEachRemaining(KnownDimletConfiguration::initBiomeDimlet);
Set<ResourceLocation> keys = ForgeRegistries.ENTITIES.getKeys();
keys.stream().map(ResourceLocation::toString).forEach(KnownDimletConfiguration::initMobDimlet);
initMobDimlet(DimletObjectMapping.DEFAULT_ID);
FluidRegistry.getRegisteredFluids().entrySet().stream().forEach(KnownDimletConfiguration::initFluidDimlet);
Block.REGISTRY.forEach(KnownDimletConfiguration::initMaterialDimlet);
initDimlet(new DimletKey(DimletType.DIMLET_MATERIAL, Blocks.STONE.getRegistryName() + "@0"), "minecraft");
initDimlet(new DimletKey(DimletType.DIMLET_LIQUID, Blocks.WATER.getRegistryName() + "@0"), "minecraft");
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Normal"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Noon"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Midnight"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Morning"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Evening"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Fast"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Slow"), RFToolsDim.MODID);
// addExtraInformation(keyTimeNormal, "With this normal dimlet you will get", "default day/night timing");
initSkyDimlets();
initWeatherDimlets();
BiomeControllerMapping.setupControllerBiomes();
}
private static void initWeatherDimlets() {
initWeatherDimlet("Default", new WeatherDescriptor.Builder().build());
initWeatherDimlet("no.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_NORAIN).build());
initWeatherDimlet("light.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_LIGHTRAIN).build());
initWeatherDimlet("hard.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_HARDRAIN).build());
initWeatherDimlet("no.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_NOTHUNDER).build());
initWeatherDimlet("light.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_LIGHTTHUNDER).build());
initWeatherDimlet("hard.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_HARDTHUNDER).build());
}
private static void initWeatherDimlet(String id, WeatherDescriptor weatherDescriptor) {
DimletKey key = new DimletKey(DimletType.DIMLET_WEATHER, id);
initDimlet(key, RFToolsDim.MODID);
WeatherRegistry.registerWeather(key, weatherDescriptor);
}
private static void initSkyDimlets() {
// Day brightness
initSkyDimlet("normal.day", new SkyDescriptor.Builder().sunBrightnessFactor(1.0f).build(), false);
initSkyDimlet("dark.day", new SkyDescriptor.Builder().sunBrightnessFactor(0.4f).skyColorFactor(0.6f, 0.6f, 0.6f).build(), false);
// Night brightness
initSkyDimlet("normal.night", new SkyDescriptor.Builder().starBrightnessFactor(1.0f).build(), false);
initSkyDimlet("bright.night", new SkyDescriptor.Builder().starBrightnessFactor(1.5f).build(), false);
initSkyDimlet("dark.night", new SkyDescriptor.Builder().starBrightnessFactor(0.4f).build(), false);
// Sky color
initSkyDimlet("red", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("dark.red", new SkyDescriptor.Builder().skyColorFactor(0.6f, 0.0f, 0.0f).build(), false);
initSkyDimlet("green", new SkyDescriptor.Builder().skyColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("dark.green", new SkyDescriptor.Builder().skyColorFactor(0f, 0.6f, 0f).build(), false);
initSkyDimlet("blue", new SkyDescriptor.Builder().skyColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("dark.blue", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.0f, 0.6f).build(), false);
initSkyDimlet("yellow", new SkyDescriptor.Builder().skyColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan", new SkyDescriptor.Builder().skyColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("dark.cyan", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.6f, 0.6f).build(), false);
initSkyDimlet("purple", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.2f, 1.0f).build(), false);
initSkyDimlet("dark.purple", new SkyDescriptor.Builder().skyColorFactor(0.6f, 0, 0.6f).build(), false);
initSkyDimlet("black", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("gold", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.6f, 0.0f).build(), false);
// Fog color
initSkyDimlet("normal.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 1.0f, 1.0f).build(), false);
initSkyDimlet("black.fog", new SkyDescriptor.Builder().fogColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("red.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("green.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("blue.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("yellow.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("purple.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 0.2f, 1.0f).build(), false);
// Sky type
initSkyDimlet("normal", new SkyDescriptor.Builder().skyType(SkyType.SKY_NORMAL).build(), false);
initSkyDimlet("ender", new SkyDescriptor.Builder().skyType(SkyType.SKY_ENDER).build(), false);
initSkyDimlet("inferno", new SkyDescriptor.Builder().skyType(SkyType.SKY_INFERNO).build(), false);
initSkyDimlet("stars1", new SkyDescriptor.Builder().skyType(SkyType.SKY_STARS1).build(), false);
initSkyDimlet("stars2", new SkyDescriptor.Builder().skyType(SkyType.SKY_STARS2).build(), false);
// Celestial bodies
initSkyDimlet("body.none", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_NONE).build(), false); // False because we don't want to select this randomly.
initSkyDimlet("body.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SUN).build(), true);
initSkyDimlet("body.large.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGESUN).build(), true);
initSkyDimlet("body.small.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SMALLSUN).build(), true);
initSkyDimlet("body.red.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_REDSUN).build(), true);
initSkyDimlet("body.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_MOON).build(), true);
initSkyDimlet("body.large.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGEMOON).build(), true);
initSkyDimlet("body.small.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SMALLMOON).build(), true);
initSkyDimlet("body.red.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_REDMOON).build(), true);
initSkyDimlet("body.planet", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_PLANET).build(), true);
initSkyDimlet("body.large.planet", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGEPLANET).build(), true);
// Cloud color
initSkyDimlet("normal.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 1.0f, 1.0f).build(), false);
initSkyDimlet("black.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("red.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("green.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("blue.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("yellow.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("purple.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 0.2f, 1.0f).build(), false);
}
private static void initSkyDimlet(String id, SkyDescriptor descriptor, boolean body) {
DimletKey key = new DimletKey(DimletType.DIMLET_SKY, id);
initDimlet(key, RFToolsDim.MODID);
SkyRegistry.registerSky(key, descriptor, body);
}
private static void initBiomeDimlet(Biome biome) {
ResourceLocation registryName = biome.getRegistryName();
if (registryName != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_BIOME, registryName.toString());
initDimlet(key, biome.getRegistryName().getResourceDomain());
}
}
private static void initMobDimlet(String id) {
if (DimletObjectMapping.DEFAULT_ID.equals(id)) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
initDimlet(key, RFToolsDim.MODID);
} else {
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(id));
if (entry != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
initDimlet(key, entry.getRegistryName().getResourceDomain());
}
}
}
private static void initFluidDimlet(Map.Entry<String, Fluid> me) {
if (me.getValue().canBePlacedInWorld()) {
String name = me.getKey();
if (name != null && !name.isEmpty()) {
Block block = me.getValue().getBlock();
if (block != null) {
ResourceLocation nameForObject = Block.REGISTRY.getNameForObject(block);
if (nameForObject != null) {
String mod = nameForObject.getResourceDomain();
DimletKey key = new DimletKey(DimletType.DIMLET_LIQUID, block.getRegistryName() + "@0");
initDimlet(key, mod);
}
}
}
}
}
private static void initDimlet(DimletKey key, String mod) {
Settings settings = DimletRules.getSettings(key, mod);
if (!settings.isBlacklisted()) {
knownDimlets.put(key, settings);
}
}
private static void initMaterialDimlet(Block block) {
if (block instanceof BlockLiquid || block == Blocks.LIT_REDSTONE_ORE) {
return;
}
Set<Filter.Feature> features = getBlockFeatures(block);
ResourceLocation nameForObject = Block.REGISTRY.getNameForObject(block);
String mod = nameForObject.getResourceDomain();
for (IBlockState state : block.getBlockState().getValidStates()) {
int meta = state.getBlock().getMetaFromState(state);
ItemStack stack = new ItemStack(state.getBlock(), 1, state.getBlock().getMetaFromState(state));
if (stack.getItem() != null) { // Protection
List<IProperty<?>> propertyNames = new ArrayList<>(state.getPropertyKeys());
propertyNames.sort((o1, o2) -> o1.getName().compareTo(o2.getName()));
ImmutableMap<IProperty<?>, Comparable<?>> properties = state.getProperties();
Map<String, String> props = new HashMap<>();
for (Map.Entry<IProperty<?>, Comparable<?>> entry : properties.entrySet()) {
props.put(entry.getKey().getName(), entry.getValue().toString());
}
DimletKey key = new DimletKey(DimletType.DIMLET_MATERIAL, block.getRegistryName() + "@" + meta);
Settings settings = DimletRules.getSettings(key, mod, features, props);
if (!settings.isBlacklisted()) {
knownDimlets.put(key, settings);
}
}
}
}
public static Set<Filter.Feature> getBlockFeatures(Block block) {
Set<Filter.Feature> features = EnumSet.noneOf(Filter.Feature.class);
ItemStack stack = null;
try {
stack = new ItemStack(block, 1, OreDictionary.WILDCARD_VALUE);
} catch (Exception e) {
Logging.getLogger().log(Level.ERROR, "Failed to create a dimlet for block " + block.getRegistryName() +
"! Please report to the correct mod!", e);
return features;
}
int[] iDs = null;
if (!stack.isEmpty() && stack.getItem() != null) {
iDs = OreDictionary.getOreIDs(stack);
}
if (iDs != null && iDs.length > 0) {
features.add(Filter.Feature.OREDICT);
}
if (block instanceof BlockFalling) {
features.add(Filter.Feature.FALLING);
}
if (block.hasTileEntity(block.getDefaultState())) {
features.add(Filter.Feature.TILEENTITY);
}
if (block instanceof IPlantable) {
features.add(Filter.Feature.PLANTABLE);
}
if (!block.isFullBlock(block.getDefaultState())) {
features.add(Filter.Feature.NOFULLBLOCK);
}
return features;
}
public static void dumpMobs() {
Set<ResourceLocation> keys = ForgeRegistries.ENTITIES.getKeys();
keys.stream().map(ResourceLocation::toString).forEach(KnownDimletConfiguration::dumpMob);
}
private static void dumpMob(String id) {
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(id));
Class<? extends Entity> entityClass = entry == null ? null : entry.getEntityClass();
if (entry != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
String mod = entry.getRegistryName().getResourceDomain();
Settings settings = DimletRules.getSettings(key, mod);
String resourceName = EntityTools.findEntityIdByClass(entityClass);
String readableName = EntityTools.findEntityLocNameByClass(entityClass);
Logging.log(resourceName + " (" + resourceName + ", " + readableName + "): " + settings.toString());
}
}
private static boolean isValidMobClass(Class<? extends Entity> entityClass) {
if (!EntityLivingBase.class.isAssignableFrom(entityClass)) {
return false;
}
if (Modifier.isAbstract(entityClass.getModifiers())) {
return false;
}
return true;
}
public static ItemStack getDimletStack(DimletKey key) {
ItemStack stack = new ItemStack(ModItems.knownDimletItem, 1, key.getType().ordinal());
NBTTagCompound compound = new NBTTagCompound();
compound.setString("dkey", key.getId());
stack.setTagCompound(compound);
return stack;
}
public static ItemStack getDimletStack(DimletType type, String id) {
return getDimletStack(new DimletKey(type, id));
}
public static DimletKey getDimletKey(ItemStack dimletStack) {
DimletType type = DimletType.values()[dimletStack.getItemDamage()];
NBTTagCompound tagCompound = dimletStack.getTagCompound();
if (tagCompound != null && tagCompound.hasKey("dkey")) {
return new DimletKey(type, tagCompound.getString("dkey"));
} else {
return new DimletKey(type, null);
}
}
public static boolean isBlacklisted(DimletKey key) {
return KnownDimletConfiguration.getSettings(key) == null;
}
public static boolean isCraftable(DimletKey key) {
if (craftableDimlets.isEmpty()) {
registerCraftables();
}
return craftableDimlets.contains(key);
}
private static void registerCraftables() {
craftableDimlets.add(new DimletKey(DimletType.DIMLET_EFFECT, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_FEATURE, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_STRUCTURE, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TERRAIN, "Void"));
if (!GeneralConfiguration.voidOnly) {
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TERRAIN, "Flat"));
}
craftableDimlets.add(new DimletKey(DimletType.DIMLET_CONTROLLER, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_CONTROLLER, "Single"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_MATERIAL, Blocks.STONE.getRegistryName() + "@0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_LIQUID, Blocks.WATER.getRegistryName() + "@0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal.day"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal.night"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_MOB, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TIME, "Normal"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_WEATHER, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "1"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "2"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "3"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "4"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "5"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "6"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "7"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "8"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "9"));
}
public static Set<DimletKey> getCraftableDimlets() {
if (craftableDimlets.isEmpty()) {
registerCraftables();
}
return craftableDimlets;
}
public static boolean isSeedDimlet(DimletKey key) {
return key.getType() == DimletType.DIMLET_SPECIAL && "Seed".equals(key.getId());
}
public static String getDisplayName(DimletKey key) {
switch (key.getType()) {
case DIMLET_BIOME:
Biome biome = Biome.REGISTRY.getObject(new ResourceLocation(key.getId()));
return biome == null ? "<invalid>" : biome.biomeName;
case DIMLET_LIQUID:
return DimensionInformation.getDisplayName(DimletObjectMapping.getFluid(key));
case DIMLET_MATERIAL:
return DimensionInformation.getDisplayName(DimletObjectMapping.getBlock(key));
case DIMLET_MOB:
if (DimletObjectMapping.DEFAULT_ID.equals(key.getId())) {
return key.getId();
}
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(EntityTools.fixEntityId(key.getId())));
Class<? extends Entity> entityClass = entry == null ? null : entry.getEntityClass();
if (entityClass == null) {
return "<Unknown>";
}
return EntityTools.findEntityLocNameByClass(entityClass);
case DIMLET_SKY:
return StringUtils.capitalize(StringUtils.join(StringUtils.split(key.getId(), '.'), ' '));
case DIMLET_STRUCTURE:
return key.getId();
case DIMLET_TERRAIN:
return key.getId();
case DIMLET_FEATURE:
return key.getId();
case DIMLET_TIME:
return key.getId();
case DIMLET_DIGIT:
return key.getId();
case DIMLET_EFFECT:
return key.getId();
case DIMLET_SPECIAL:
return key.getId();
case DIMLET_CONTROLLER:
return key.getId();
case DIMLET_WEATHER:
return StringUtils.capitalize(StringUtils.join(StringUtils.split(key.getId(), '.'), ' '));
case DIMLET_PATREON:
return key.getId();
}
return "Unknown";
}
private static List<List<ItemStack>> randomPartLists = null;
public static List<List<ItemStack>> getRandomPartLists() {
if (randomPartLists == null) {
randomPartLists = new ArrayList<>();
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletBaseItem), new ItemStack(ModItems.dimletControlCircuitItem, 1, 0), new ItemStack(ModItems.dimletEnergyModuleItem)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 1), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 0), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 0)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 2)));
ArrayList<ItemStack> list3 = Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 3), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 1), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 1));
for (DimletType type : DimletType.values()) {
list3.add(new ItemStack(ModItems.dimletTypeControllerItem, 1, type.ordinal()));
}
randomPartLists.add(list3);
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 4)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 5), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 2), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 2)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 6)));
}
return randomPartLists;
}
}
|
src/main/java/mcjty/rftoolsdim/dimensions/dimlets/KnownDimletConfiguration.java
|
package mcjty.rftoolsdim.dimensions.dimlets;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import mcjty.lib.varia.EntityTools;
import mcjty.lib.varia.Logging;
import mcjty.rftoolsdim.RFToolsDim;
import mcjty.rftoolsdim.config.DimletRules;
import mcjty.rftoolsdim.config.Filter;
import mcjty.rftoolsdim.config.GeneralConfiguration;
import mcjty.rftoolsdim.config.Settings;
import mcjty.rftoolsdim.dimensions.DimensionInformation;
import mcjty.rftoolsdim.dimensions.description.SkyDescriptor;
import mcjty.rftoolsdim.dimensions.description.WeatherDescriptor;
import mcjty.rftoolsdim.dimensions.dimlets.types.DimletType;
import mcjty.rftoolsdim.dimensions.types.*;
import mcjty.rftoolsdim.dimensions.world.BiomeControllerMapping;
import mcjty.rftoolsdim.items.ModItems;
import net.minecraft.block.Block;
import net.minecraft.block.BlockFalling;
import net.minecraft.block.BlockLiquid;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.biome.Biome;
import net.minecraftforge.common.IPlantable;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fml.common.registry.EntityEntry;
import net.minecraftforge.fml.common.registry.ForgeRegistries;
import net.minecraftforge.oredict.OreDictionary;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Level;
import java.lang.reflect.Modifier;
import java.util.*;
public class KnownDimletConfiguration {
private static Set<DimletKey> craftableDimlets = new HashSet<>();
private static SortedMap<DimletKey, Settings> knownDimlets = new TreeMap<>();
public static Settings getSettings(DimletKey key) {
initDimlets();
return knownDimlets.get(key);
}
public static SortedMap<DimletKey, Settings> getKnownDimlets() {
initDimlets();
return knownDimlets;
}
public static void init() {
knownDimlets.clear();
craftableDimlets.clear();
}
private static void initDimlets() {
if (!knownDimlets.isEmpty()) {
return;
}
for (int i = 0 ; i <= 9 ; i++) {
initDimlet(new DimletKey(DimletType.DIMLET_DIGIT, Integer.toString(i)), RFToolsDim.MODID);
}
if (GeneralConfiguration.voidOnly) {
initDimlet(new DimletKey(DimletType.DIMLET_TERRAIN, TerrainType.TERRAIN_VOID.getId()), RFToolsDim.MODID);
} else {
Arrays.stream(TerrainType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_TERRAIN, t.getId()), RFToolsDim.MODID));
}
Arrays.stream(ControllerType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_CONTROLLER, t.getId()), RFToolsDim.MODID));
Arrays.stream(FeatureType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_FEATURE, t.getId()), RFToolsDim.MODID));
Arrays.stream(EffectType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_EFFECT, t.getId()), RFToolsDim.MODID));
Arrays.stream(StructureType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_STRUCTURE, t.getId()), RFToolsDim.MODID));
Arrays.stream(SpecialType.values()).forEach(t -> initDimlet(new DimletKey(DimletType.DIMLET_SPECIAL, t.getId()), RFToolsDim.MODID));
Biome.REGISTRY.iterator().forEachRemaining(KnownDimletConfiguration::initBiomeDimlet);
Set<ResourceLocation> keys = ForgeRegistries.ENTITIES.getKeys();
keys.stream().map(ResourceLocation::toString).forEach(KnownDimletConfiguration::initMobDimlet);
initMobDimlet(DimletObjectMapping.DEFAULT_ID);
FluidRegistry.getRegisteredFluids().entrySet().stream().forEach(KnownDimletConfiguration::initFluidDimlet);
Block.REGISTRY.forEach(KnownDimletConfiguration::initMaterialDimlet);
initDimlet(new DimletKey(DimletType.DIMLET_MATERIAL, Blocks.STONE.getRegistryName() + "@0"), "minecraft");
initDimlet(new DimletKey(DimletType.DIMLET_LIQUID, Blocks.WATER.getRegistryName() + "@0"), "minecraft");
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Normal"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Noon"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Midnight"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Morning"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Evening"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Fast"), RFToolsDim.MODID);
initDimlet(new DimletKey(DimletType.DIMLET_TIME, "Slow"), RFToolsDim.MODID);
// addExtraInformation(keyTimeNormal, "With this normal dimlet you will get", "default day/night timing");
initSkyDimlets();
initWeatherDimlets();
BiomeControllerMapping.setupControllerBiomes();
}
private static void initWeatherDimlets() {
initWeatherDimlet("Default", new WeatherDescriptor.Builder().build());
initWeatherDimlet("no.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_NORAIN).build());
initWeatherDimlet("light.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_LIGHTRAIN).build());
initWeatherDimlet("hard.rain", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_HARDRAIN).build());
initWeatherDimlet("no.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_NOTHUNDER).build());
initWeatherDimlet("light.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_LIGHTTHUNDER).build());
initWeatherDimlet("hard.thunder", new WeatherDescriptor.Builder().weatherType(WeatherType.WEATHER_HARDTHUNDER).build());
}
private static void initWeatherDimlet(String id, WeatherDescriptor weatherDescriptor) {
DimletKey key = new DimletKey(DimletType.DIMLET_WEATHER, id);
initDimlet(key, RFToolsDim.MODID);
WeatherRegistry.registerWeather(key, weatherDescriptor);
}
private static void initSkyDimlets() {
initSkyDimlet("normal", new SkyDescriptor.Builder().skyType(SkyType.SKY_NORMAL).build(), false);
initSkyDimlet("normal.day", new SkyDescriptor.Builder().sunBrightnessFactor(1.0f).build(), false);
initSkyDimlet("normal.night", new SkyDescriptor.Builder().starBrightnessFactor(1.0f).build(), false);
initSkyDimlet("dark.day", new SkyDescriptor.Builder().sunBrightnessFactor(0.4f).skyColorFactor(0.6f, 0.6f, 0.6f).build(), false);
initSkyDimlet("bright.night", new SkyDescriptor.Builder().starBrightnessFactor(1.5f).build(), false);
initSkyDimlet("dark.night", new SkyDescriptor.Builder().starBrightnessFactor(0.4f).build(), false);
initSkyDimlet("red", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("dark.red", new SkyDescriptor.Builder().skyColorFactor(0.6f, 0.0f, 0.0f).build(), false);
initSkyDimlet("green", new SkyDescriptor.Builder().skyColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("dark.green", new SkyDescriptor.Builder().skyColorFactor(0f, 0.6f, 0f).build(), false);
initSkyDimlet("blue", new SkyDescriptor.Builder().skyColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("dark.blue", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.0f, 0.6f).build(), false);
initSkyDimlet("yellow", new SkyDescriptor.Builder().skyColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan", new SkyDescriptor.Builder().skyColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("dark.cyan", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.6f, 0.6f).build(), false);
initSkyDimlet("purple", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.2f, 1.0f).build(), false);
initSkyDimlet("dark.purple", new SkyDescriptor.Builder().skyColorFactor(0.6f, 0, 0.6f).build(), false);
initSkyDimlet("black", new SkyDescriptor.Builder().skyColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("gold", new SkyDescriptor.Builder().skyColorFactor(1.0f, 0.6f, 0.0f).build(), false);
initSkyDimlet("normal.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 1.0f, 1.0f).build(), false);
initSkyDimlet("black.fog", new SkyDescriptor.Builder().fogColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("red.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("green.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("blue.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("yellow.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan.fog", new SkyDescriptor.Builder().fogColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("purple.fog", new SkyDescriptor.Builder().fogColorFactor(1.0f, 0.2f, 1.0f).build(), false);
initSkyDimlet("ender", new SkyDescriptor.Builder().skyType(SkyType.SKY_ENDER).build(), false);
initSkyDimlet("inferno", new SkyDescriptor.Builder().skyType(SkyType.SKY_INFERNO).build(), false);
initSkyDimlet("stars1", new SkyDescriptor.Builder().skyType(SkyType.SKY_STARS1).build(), false);
initSkyDimlet("stars2", new SkyDescriptor.Builder().skyType(SkyType.SKY_STARS2).build(), false);
initSkyDimlet("body.none", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_NONE).build(), false); // False because we don't want to select this randomly.
initSkyDimlet("body.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SUN).build(), true);
initSkyDimlet("body.large.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGESUN).build(), true);
initSkyDimlet("body.small.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SMALLSUN).build(), true);
initSkyDimlet("body.red.sun", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_REDSUN).build(), true);
initSkyDimlet("body.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_MOON).build(), true);
initSkyDimlet("body.large.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGEMOON).build(), true);
initSkyDimlet("body.small.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_SMALLMOON).build(), true);
initSkyDimlet("body.red.moon", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_REDMOON).build(), true);
initSkyDimlet("body.planet", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_PLANET).build(), true);
initSkyDimlet("body.large.planet", new SkyDescriptor.Builder().addBody(CelestialBodyType.BODY_LARGEPLANET).build(), true);
initSkyDimlet("normal.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 1.0f, 1.0f).build(), false);
initSkyDimlet("black.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.0f, 0.0f, 0.0f).build(), false);
initSkyDimlet("red.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 0.2f, 0.2f).build(), false);
initSkyDimlet("green.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 1.0f, 0.2f).build(), false);
initSkyDimlet("blue.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 0.2f, 1.0f).build(), false);
initSkyDimlet("yellow.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 1.0f, 0.2f).build(), false);
initSkyDimlet("cyan.clouds", new SkyDescriptor.Builder().cloudColorFactor(0.2f, 1.0f, 1.0f).build(), false);
initSkyDimlet("purple.clouds", new SkyDescriptor.Builder().cloudColorFactor(1.0f, 0.2f, 1.0f).build(), false);
}
private static void initSkyDimlet(String id, SkyDescriptor descriptor, boolean body) {
DimletKey key = new DimletKey(DimletType.DIMLET_SKY, id);
initDimlet(key, RFToolsDim.MODID);
SkyRegistry.registerSky(key, descriptor, body);
}
private static void initBiomeDimlet(Biome biome) {
ResourceLocation registryName = biome.getRegistryName();
if (registryName != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_BIOME, registryName.toString());
initDimlet(key, biome.getRegistryName().getResourceDomain());
}
}
private static void initMobDimlet(String id) {
if (DimletObjectMapping.DEFAULT_ID.equals(id)) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
initDimlet(key, RFToolsDim.MODID);
} else {
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(id));
if (entry != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
initDimlet(key, entry.getRegistryName().getResourceDomain());
}
}
}
private static void initFluidDimlet(Map.Entry<String, Fluid> me) {
if (me.getValue().canBePlacedInWorld()) {
String name = me.getKey();
if (name != null && !name.isEmpty()) {
Block block = me.getValue().getBlock();
if (block != null) {
ResourceLocation nameForObject = Block.REGISTRY.getNameForObject(block);
if (nameForObject != null) {
String mod = nameForObject.getResourceDomain();
DimletKey key = new DimletKey(DimletType.DIMLET_LIQUID, block.getRegistryName() + "@0");
initDimlet(key, mod);
}
}
}
}
}
private static void initDimlet(DimletKey key, String mod) {
Settings settings = DimletRules.getSettings(key, mod);
if (!settings.isBlacklisted()) {
knownDimlets.put(key, settings);
}
}
private static void initMaterialDimlet(Block block) {
if (block instanceof BlockLiquid || block == Blocks.LIT_REDSTONE_ORE) {
return;
}
Set<Filter.Feature> features = getBlockFeatures(block);
ResourceLocation nameForObject = Block.REGISTRY.getNameForObject(block);
String mod = nameForObject.getResourceDomain();
for (IBlockState state : block.getBlockState().getValidStates()) {
int meta = state.getBlock().getMetaFromState(state);
ItemStack stack = new ItemStack(state.getBlock(), 1, state.getBlock().getMetaFromState(state));
if (stack.getItem() != null) { // Protection
List<IProperty<?>> propertyNames = new ArrayList<>(state.getPropertyKeys());
propertyNames.sort((o1, o2) -> o1.getName().compareTo(o2.getName()));
ImmutableMap<IProperty<?>, Comparable<?>> properties = state.getProperties();
Map<String, String> props = new HashMap<>();
for (Map.Entry<IProperty<?>, Comparable<?>> entry : properties.entrySet()) {
props.put(entry.getKey().getName(), entry.getValue().toString());
}
DimletKey key = new DimletKey(DimletType.DIMLET_MATERIAL, block.getRegistryName() + "@" + meta);
Settings settings = DimletRules.getSettings(key, mod, features, props);
if (!settings.isBlacklisted()) {
knownDimlets.put(key, settings);
}
}
}
}
public static Set<Filter.Feature> getBlockFeatures(Block block) {
Set<Filter.Feature> features = EnumSet.noneOf(Filter.Feature.class);
ItemStack stack = null;
try {
stack = new ItemStack(block, 1, OreDictionary.WILDCARD_VALUE);
} catch (Exception e) {
Logging.getLogger().log(Level.ERROR, "Failed to create a dimlet for block " + block.getRegistryName() +
"! Please report to the correct mod!", e);
return features;
}
int[] iDs = null;
if (!stack.isEmpty() && stack.getItem() != null) {
iDs = OreDictionary.getOreIDs(stack);
}
if (iDs != null && iDs.length > 0) {
features.add(Filter.Feature.OREDICT);
}
if (block instanceof BlockFalling) {
features.add(Filter.Feature.FALLING);
}
if (block.hasTileEntity(block.getDefaultState())) {
features.add(Filter.Feature.TILEENTITY);
}
if (block instanceof IPlantable) {
features.add(Filter.Feature.PLANTABLE);
}
if (!block.isFullBlock(block.getDefaultState())) {
features.add(Filter.Feature.NOFULLBLOCK);
}
return features;
}
public static void dumpMobs() {
Set<ResourceLocation> keys = ForgeRegistries.ENTITIES.getKeys();
keys.stream().map(ResourceLocation::toString).forEach(KnownDimletConfiguration::dumpMob);
}
private static void dumpMob(String id) {
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(id));
Class<? extends Entity> entityClass = entry == null ? null : entry.getEntityClass();
if (entry != null) {
DimletKey key = new DimletKey(DimletType.DIMLET_MOB, id);
String mod = entry.getRegistryName().getResourceDomain();
Settings settings = DimletRules.getSettings(key, mod);
String resourceName = EntityTools.findEntityIdByClass(entityClass);
String readableName = EntityTools.findEntityLocNameByClass(entityClass);
Logging.log(resourceName + " (" + resourceName + ", " + readableName + "): " + settings.toString());
}
}
private static boolean isValidMobClass(Class<? extends Entity> entityClass) {
if (!EntityLivingBase.class.isAssignableFrom(entityClass)) {
return false;
}
if (Modifier.isAbstract(entityClass.getModifiers())) {
return false;
}
return true;
}
public static ItemStack getDimletStack(DimletKey key) {
ItemStack stack = new ItemStack(ModItems.knownDimletItem, 1, key.getType().ordinal());
NBTTagCompound compound = new NBTTagCompound();
compound.setString("dkey", key.getId());
stack.setTagCompound(compound);
return stack;
}
public static ItemStack getDimletStack(DimletType type, String id) {
return getDimletStack(new DimletKey(type, id));
}
public static DimletKey getDimletKey(ItemStack dimletStack) {
DimletType type = DimletType.values()[dimletStack.getItemDamage()];
NBTTagCompound tagCompound = dimletStack.getTagCompound();
if (tagCompound != null && tagCompound.hasKey("dkey")) {
return new DimletKey(type, tagCompound.getString("dkey"));
} else {
return new DimletKey(type, null);
}
}
public static boolean isBlacklisted(DimletKey key) {
return KnownDimletConfiguration.getSettings(key) == null;
}
public static boolean isCraftable(DimletKey key) {
if (craftableDimlets.isEmpty()) {
registerCraftables();
}
return craftableDimlets.contains(key);
}
private static void registerCraftables() {
craftableDimlets.add(new DimletKey(DimletType.DIMLET_EFFECT, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_FEATURE, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_STRUCTURE, "None"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TERRAIN, "Void"));
if (!GeneralConfiguration.voidOnly) {
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TERRAIN, "Flat"));
}
craftableDimlets.add(new DimletKey(DimletType.DIMLET_CONTROLLER, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_CONTROLLER, "Single"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_MATERIAL, Blocks.STONE.getRegistryName() + "@0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_LIQUID, Blocks.WATER.getRegistryName() + "@0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal.day"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_SKY, "normal.night"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_MOB, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_TIME, "Normal"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_WEATHER, DimletObjectMapping.DEFAULT_ID));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "0"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "1"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "2"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "3"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "4"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "5"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "6"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "7"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "8"));
craftableDimlets.add(new DimletKey(DimletType.DIMLET_DIGIT, "9"));
}
public static Set<DimletKey> getCraftableDimlets() {
if (craftableDimlets.isEmpty()) {
registerCraftables();
}
return craftableDimlets;
}
public static boolean isSeedDimlet(DimletKey key) {
return key.getType() == DimletType.DIMLET_SPECIAL && "Seed".equals(key.getId());
}
public static String getDisplayName(DimletKey key) {
switch (key.getType()) {
case DIMLET_BIOME:
Biome biome = Biome.REGISTRY.getObject(new ResourceLocation(key.getId()));
return biome == null ? "<invalid>" : biome.biomeName;
case DIMLET_LIQUID:
return DimensionInformation.getDisplayName(DimletObjectMapping.getFluid(key));
case DIMLET_MATERIAL:
return DimensionInformation.getDisplayName(DimletObjectMapping.getBlock(key));
case DIMLET_MOB:
if (DimletObjectMapping.DEFAULT_ID.equals(key.getId())) {
return key.getId();
}
EntityEntry entry = ForgeRegistries.ENTITIES.getValue(new ResourceLocation(EntityTools.fixEntityId(key.getId())));
Class<? extends Entity> entityClass = entry == null ? null : entry.getEntityClass();
if (entityClass == null) {
return "<Unknown>";
}
return EntityTools.findEntityLocNameByClass(entityClass);
case DIMLET_SKY:
return StringUtils.capitalize(StringUtils.join(StringUtils.split(key.getId(), '.'), ' '));
case DIMLET_STRUCTURE:
return key.getId();
case DIMLET_TERRAIN:
return key.getId();
case DIMLET_FEATURE:
return key.getId();
case DIMLET_TIME:
return key.getId();
case DIMLET_DIGIT:
return key.getId();
case DIMLET_EFFECT:
return key.getId();
case DIMLET_SPECIAL:
return key.getId();
case DIMLET_CONTROLLER:
return key.getId();
case DIMLET_WEATHER:
return StringUtils.capitalize(StringUtils.join(StringUtils.split(key.getId(), '.'), ' '));
case DIMLET_PATREON:
return key.getId();
}
return "Unknown";
}
private static List<List<ItemStack>> randomPartLists = null;
public static List<List<ItemStack>> getRandomPartLists() {
if (randomPartLists == null) {
randomPartLists = new ArrayList<>();
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletBaseItem), new ItemStack(ModItems.dimletControlCircuitItem, 1, 0), new ItemStack(ModItems.dimletEnergyModuleItem)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 1), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 0), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 0)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 2)));
ArrayList<ItemStack> list3 = Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 3), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 1), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 1));
for (DimletType type : DimletType.values()) {
list3.add(new ItemStack(ModItems.dimletTypeControllerItem, 1, type.ordinal()));
}
randomPartLists.add(list3);
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 4)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 5), new ItemStack(ModItems.dimletEnergyModuleItem, 1, 2), new ItemStack(ModItems.dimletMemoryUnitItem, 1, 2)));
randomPartLists.add(Lists.newArrayList(new ItemStack(ModItems.dimletControlCircuitItem, 1, 6)));
}
return randomPartLists;
}
}
|
Reorganize sky dimlets
|
src/main/java/mcjty/rftoolsdim/dimensions/dimlets/KnownDimletConfiguration.java
|
Reorganize sky dimlets
|
|
Java
|
mit
|
1d345682159ffd4888b31f610d24452b2812dcb4
| 0
|
plrthink/react-native-zip-archive,plrthink/react-native-zip-archive,plrthink/react-native-zip-archive,plrthink/react-native-zip-archive,plrthink/react-native-zip-archive,plrthink/react-native-zip-archive
|
package com.rnziparchive;
import android.content.res.AssetFileDescriptor;
import android.os.Build;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader;
import net.lingala.zip4j.model.ZipParameters;
import net.lingala.zip4j.model.enums.CompressionMethod;
import net.lingala.zip4j.model.enums.CompressionLevel;
import net.lingala.zip4j.model.enums.EncryptionMethod;
import net.lingala.zip4j.model.enums.AesKeyStrength;
import java.nio.charset.Charset;
public class RNZipArchiveModule extends ReactContextBaseJavaModule {
private static final String TAG = RNZipArchiveModule.class.getSimpleName();
private static final String PROGRESS_EVENT_NAME = "zipArchiveProgressEvent";
private static final String EVENT_KEY_FILENAME = "filePath";
private static final String EVENT_KEY_PROGRESS = "progress";
public RNZipArchiveModule(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public String getName() {
return "RNZipArchive";
}
@ReactMethod
public void isPasswordProtected(final String zipFilePath, final Promise promise) {
try {
net.lingala.zip4j.ZipFile zipFile = new net.lingala.zip4j.ZipFile(zipFilePath);
promise.resolve(zipFile.isEncrypted());
} catch (ZipException ex) {
promise.reject(null, String.format("Unable to check for encryption due to: %s", getStackTrace(ex)));
}
}
@ReactMethod
public void unzipWithPassword(final String zipFilePath, final String destDirectory,
final String password, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
try {
net.lingala.zip4j.ZipFile zipFile = new net.lingala.zip4j.ZipFile(zipFilePath);
if (zipFile.isEncrypted()) {
zipFile.setPassword(password.toCharArray());
} else {
promise.reject(null, String.format("Zip file: %s is not password protected", zipFilePath));
}
List fileHeaderList = zipFile.getFileHeaders();
List extractedFileNames = new ArrayList<>();
int totalFiles = fileHeaderList.size();
updateProgress(0, 1, zipFilePath); // force 0%
for (int i = 0; i < totalFiles; i++) {
FileHeader fileHeader = (FileHeader) fileHeaderList.get(i);
File fout = new File(destDirectory, fileHeader.getFileName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
zipFile.extractFile(fileHeader, destDirectory);
if (!fileHeader.isDirectory()) {
extractedFileNames.add(fileHeader.getFileName());
}
updateProgress(i + 1, totalFiles, zipFilePath);
}
promise.resolve(Arguments.fromList(extractedFileNames));
} catch (Exception ex) {
updateProgress(0, 1, zipFilePath); // force 0%
promise.reject(null, String.format("Failed to unzip file, due to: %s", getStackTrace(ex)));
}
}
}).start();
}
@ReactMethod
public void unzip(final String zipFilePath, final String destDirectory, final String charset, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
// Check the file exists
try {
new File(zipFilePath);
} catch (NullPointerException e) {
promise.reject(null, "Couldn't open file " + zipFilePath + ". ");
return;
}
try {
// Find the total uncompressed size of every file in the zip, so we can
// get an accurate progress measurement
final long totalUncompressedBytes = getUncompressedSize(zipFilePath, charset);
File destDir = new File(destDirectory);
if (!destDir.exists()) {
//noinspection ResultOfMethodCallIgnored
destDir.mkdirs();
}
updateProgress(0, 1, zipFilePath); // force 0%
// We use arrays here so we can update values
// from inside the callback
final long[] extractedBytes = {0};
final int[] lastPercentage = {0};
ZipFile zipFile = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
zipFile = new ZipFile(zipFilePath, Charset.forName(charset));
} else {
zipFile = new ZipFile(zipFilePath);
}
final Enumeration<? extends ZipEntry> entries = zipFile.entries();
Log.d(TAG, "Zip has " + zipFile.size() + " entries");
while (entries.hasMoreElements()) {
final ZipEntry entry = entries.nextElement();
if (entry.isDirectory()) continue;
StreamUtil.ProgressCallback cb = new StreamUtil.ProgressCallback() {
@Override
public void onCopyProgress(long bytesRead) {
extractedBytes[0] += bytesRead;
int lastTime = lastPercentage[0];
int percentDone = (int) ((double) extractedBytes[0] * 100 / (double) totalUncompressedBytes);
// update at most once per percent.
if (percentDone > lastTime) {
lastPercentage[0] = percentDone;
updateProgress(extractedBytes[0], totalUncompressedBytes, zipFilePath);
}
}
};
File fout = new File(destDirectory, entry.getName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
if (!fout.exists()) {
//noinspection ResultOfMethodCallIgnored
(new File(fout.getParent())).mkdirs();
}
InputStream in = null;
BufferedOutputStream Bout = null;
try {
in = zipFile.getInputStream(entry);
Bout = new BufferedOutputStream(new FileOutputStream(fout));
StreamUtil.copy(in, Bout, cb);
Bout.close();
in.close();
} catch (IOException ex) {
if (in != null) {
try {
in.close();
} catch (Exception ignored) {
}
}
if (Bout != null) {
try {
Bout.close();
} catch (Exception ignored) {
}
}
}
}
zipFile.close();
updateProgress(1, 1, zipFilePath); // force 100%
promise.resolve(destDirectory);
} catch (Exception ex) {
updateProgress(0, 1, zipFilePath); // force 0%
promise.reject(null, "Failed to extract file " + ex.getLocalizedMessage());
}
}
}).start();
}
/**
* Extract a zip held in the assets directory.
* <p>
* Note that the progress value isn't as accurate as when unzipping
* from a file. When reading a zip from a stream, we can't
* get accurate uncompressed sizes for files (ZipEntry#getCompressedSize() returns -1).
* <p>
* Instead, we compare the number of bytes extracted to the size of the compressed zip file.
* In most cases this means the progress 'stays on' 100% for a little bit (compressedSize < uncompressed size)
*/
@ReactMethod
public void unzipAssets(final String assetsPath, final String destDirectory, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
InputStream assetsInputStream;
final long size;
try {
assetsInputStream = getReactApplicationContext().getAssets().open(assetsPath);
AssetFileDescriptor fileDescriptor = getReactApplicationContext().getAssets().openFd(assetsPath);
size = fileDescriptor.getLength();
} catch (IOException e) {
promise.reject(null, String.format("Asset file `%s` could not be opened", assetsPath));
return;
}
try {
try {
File destDir = new File(destDirectory);
if (!destDir.exists()) {
//noinspection ResultOfMethodCallIgnored
destDir.mkdirs();
}
ZipInputStream zipIn = new ZipInputStream(assetsInputStream);
BufferedInputStream bin = new BufferedInputStream(zipIn);
ZipEntry entry;
final long[] extractedBytes = {0};
final int[] lastPercentage = {0};
updateProgress(0, 1, assetsPath); // force 0%
File fout;
while ((entry = zipIn.getNextEntry()) != null) {
if (entry.isDirectory()) continue;
fout = new File(destDirectory, entry.getName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
if (!fout.exists()) {
//noinspection ResultOfMethodCallIgnored
(new File(fout.getParent())).mkdirs();
}
final ZipEntry finalEntry = entry;
StreamUtil.ProgressCallback cb = new StreamUtil.ProgressCallback() {
@Override
public void onCopyProgress(long bytesRead) {
extractedBytes[0] += bytesRead;
int lastTime = lastPercentage[0];
int percentDone = (int) ((double) extractedBytes[0] * 100 / (double) size);
// update at most once per percent.
if (percentDone > lastTime) {
lastPercentage[0] = percentDone;
updateProgress(extractedBytes[0], size, finalEntry.getName());
}
}
};
FileOutputStream out = new FileOutputStream(fout);
BufferedOutputStream Bout = new BufferedOutputStream(out);
StreamUtil.copy(bin, Bout, cb);
Bout.close();
out.close();
}
updateProgress(1, 1, assetsPath); // force 100%
bin.close();
zipIn.close();
} catch (Exception ex) {
ex.printStackTrace();
updateProgress(0, 1, assetsPath); // force 0%
throw new Exception(String.format("Couldn't extract %s", assetsPath));
}
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
promise.resolve(destDirectory);
}
}).start();
}
@ReactMethod
public void zipFiles(final ReadableArray files, final String destDirectory, final Promise promise) {
zip(files.toArrayList(), destDirectory, promise);
}
@ReactMethod
public void zipFolder(final String folder, final String destFile, final Promise promise) {
ArrayList<Object> folderAsArrayList = new ArrayList<>();
folderAsArrayList.add(folder);
zip(folderAsArrayList, destFile, promise);
}
@ReactMethod
public void zipFilesWithPassword(final ReadableArray files, final String destFile, final String password,
String encryptionMethod, Promise promise) {
zipWithPassword(files.toArrayList(), destFile, password, encryptionMethod, promise);
}
@ReactMethod
public void zipFolderWithPassword(final String folder, final String destFile, final String password,
String encryptionMethod, Promise promise) {
ArrayList<Object> folderAsArrayList = new ArrayList<>();
folderAsArrayList.add(folder);
zipWithPassword(folderAsArrayList, destFile, password, encryptionMethod, promise);
}
private void zipWithPassword(final ArrayList<Object> filesOrDirectory, final String destFile, final String password,
String encryptionMethod, Promise promise) {
try{
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(CompressionMethod.DEFLATE);
parameters.setCompressionLevel(CompressionLevel.NORMAL);
String encParts[] = encryptionMethod.split("-");
if (password != null && !password.isEmpty()) {
parameters.setEncryptFiles(true);
if (encParts[0].equals("AES")) {
parameters.setEncryptionMethod(EncryptionMethod.AES);
if (encParts[1].equals("128")) {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_128);
} else if (encParts[1].equals("256")) {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_256);
} else {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_128);
}
} else if (encryptionMethod.equals("STANDARD")) {
parameters.setEncryptionMethod(EncryptionMethod.ZIP_STANDARD_VARIANT_STRONG);
Log.d(TAG, "Standard Encryption");
} else {
parameters.setEncryptionMethod(EncryptionMethod.ZIP_STANDARD);
Log.d(TAG, "Encryption type not supported default to Standard Encryption");
}
} else {
promise.reject(null, "Password is empty");
}
processZip(filesOrDirectory, destFile, parameters, promise, password.toCharArray());
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
private void zip(final ArrayList<Object> filesOrDirectory, final String destFile, final Promise promise) {
try{
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(CompressionMethod.DEFLATE);
parameters.setCompressionLevel(CompressionLevel.NORMAL);
processZip(filesOrDirectory, destFile, parameters, promise, null);
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
private void processZip(final ArrayList<Object> entries, final String destFile, final ZipParameters parameters, final Promise promise, final char[] password) {
new Thread(new Runnable() {
@Override
public void run() {
try {
net.lingala.zip4j.ZipFile zipFile;
if (password != null) {
zipFile = new net.lingala.zip4j.ZipFile(destFile, password);
} else {
zipFile = new net.lingala.zip4j.ZipFile(destFile);
}
updateProgress(0, 100, destFile);
int totalFiles = 0;
int fileCounter = 0;
for (int i = 0; i < entries.size(); i++) {
File f = new File(entries.get(i).toString());
if (f.exists()) {
if (f.isDirectory()) {
List<File> files = Arrays.asList(f.listFiles());
totalFiles += files.size();
for (int j = 0; j < files.size(); j++) {
if (files.get(j).isDirectory()) {
zipFile.addFolder(files.get(j), parameters);
}
else {
zipFile.addFile(files.get(j), parameters);
}
fileCounter += 1;
updateProgress(fileCounter, totalFiles, destFile);
}
} else {
totalFiles += 1;
zipFile.addFile(f, parameters);
fileCounter += 1;
updateProgress(fileCounter, totalFiles, destFile);
}
}
else {
promise.reject(null, "File or folder does not exist");
}
updateProgress(1, 1, destFile); // force 100%
}
promise.resolve(destFile);
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
}).start();
}
protected void updateProgress(long extractedBytes, long totalSize, String zipFilePath) {
// Ensure progress can't overflow 1
double progress = Math.min((double) extractedBytes / (double) totalSize, 1);
Log.d(TAG, String.format("updateProgress: %.0f%%", progress * 100));
WritableMap map = Arguments.createMap();
map.putString(EVENT_KEY_FILENAME, zipFilePath);
map.putDouble(EVENT_KEY_PROGRESS, progress);
getReactApplicationContext().getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(PROGRESS_EVENT_NAME, map);
}
/**
* Return the uncompressed size of the ZipFile (only works for files on disk, not in assets)
*
* @return -1 on failure
*/
private long getUncompressedSize(String zipFilePath, String charset) {
long totalSize = 0;
try {
ZipFile zipFile = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
zipFile = new ZipFile(zipFilePath, Charset.forName(charset));
} else {
zipFile = new ZipFile(zipFilePath);
}
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
long size = entry.getSize();
if (size != -1) {
totalSize += size;
}
}
zipFile.close();
} catch (IOException ignored) {
return -1;
}
return totalSize;
}
/**
* Returns the exception stack trace as a string
*/
private String getStackTrace(Exception e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
return sw.toString();
}
}
|
android/src/main/java/com/rnziparchive/RNZipArchiveModule.java
|
package com.rnziparchive;
import android.content.res.AssetFileDescriptor;
import android.os.Build;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader;
import net.lingala.zip4j.model.ZipParameters;
import net.lingala.zip4j.model.enums.CompressionMethod;
import net.lingala.zip4j.model.enums.CompressionLevel;
import net.lingala.zip4j.model.enums.EncryptionMethod;
import net.lingala.zip4j.model.enums.AesKeyStrength;
import java.nio.charset.Charset;
public class RNZipArchiveModule extends ReactContextBaseJavaModule {
private static final String TAG = RNZipArchiveModule.class.getSimpleName();
private static final String PROGRESS_EVENT_NAME = "zipArchiveProgressEvent";
private static final String EVENT_KEY_FILENAME = "filePath";
private static final String EVENT_KEY_PROGRESS = "progress";
public RNZipArchiveModule(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public String getName() {
return "RNZipArchive";
}
@ReactMethod
public void isPasswordProtected(final String zipFilePath, final Promise promise) {
try {
net.lingala.zip4j.ZipFile zipFile = new net.lingala.zip4j.ZipFile(zipFilePath);
promise.resolve(zipFile.isEncrypted());
} catch (ZipException ex) {
promise.reject(null, String.format("Unable to check for encryption due to: %s", getStackTrace(ex)));
}
}
@ReactMethod
public void unzipWithPassword(final String zipFilePath, final String destDirectory,
final String password, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
try {
net.lingala.zip4j.ZipFile zipFile = new net.lingala.zip4j.ZipFile(zipFilePath);
if (zipFile.isEncrypted()) {
zipFile.setPassword(password.toCharArray());
} else {
promise.reject(null, String.format("Zip file: %s is not password protected", zipFilePath));
}
List fileHeaderList = zipFile.getFileHeaders();
List extractedFileNames = new ArrayList<>();
int totalFiles = fileHeaderList.size();
updateProgress(0, 1, zipFilePath); // force 0%
for (int i = 0; i < totalFiles; i++) {
FileHeader fileHeader = (FileHeader) fileHeaderList.get(i);
File fout = new File(destDirectory, fileHeader.getFileName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
zipFile.extractFile(fileHeader, destDirectory);
if (!fileHeader.isDirectory()) {
extractedFileNames.add(fileHeader.getFileName());
}
updateProgress(i + 1, totalFiles, zipFilePath);
}
promise.resolve(Arguments.fromList(extractedFileNames));
} catch (Exception ex) {
updateProgress(0, 1, zipFilePath); // force 0%
promise.reject(null, String.format("Failed to unzip file, due to: %s", getStackTrace(ex)));
}
}
}).start();
}
@ReactMethod
public void unzip(final String zipFilePath, final String destDirectory, final String charset, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
// Check the file exists
FileInputStream inputStream = null;
try {
inputStream = new FileInputStream(zipFilePath);
new File(zipFilePath);
} catch (FileNotFoundException | NullPointerException e) {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException ignored) {
}
}
promise.reject(null, "Couldn't open file " + zipFilePath + ". ");
return;
}
try {
// Find the total uncompressed size of every file in the zip, so we can
// get an accurate progress measurement
final long totalUncompressedBytes = getUncompressedSize(zipFilePath, charset);
File destDir = new File(destDirectory);
if (!destDir.exists()) {
//noinspection ResultOfMethodCallIgnored
destDir.mkdirs();
}
updateProgress(0, 1, zipFilePath); // force 0%
// We use arrays here so we can update values
// from inside the callback
final long[] extractedBytes = {0};
final int[] lastPercentage = {0};
ZipFile zipFile = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
zipFile = new ZipFile(zipFilePath, Charset.forName(charset));
} else {
zipFile = new ZipFile(zipFilePath);
}
final Enumeration<? extends ZipEntry> entries = zipFile.entries();
Log.d(TAG, "Zip has " + zipFile.size() + " entries");
while (entries.hasMoreElements()) {
final ZipEntry entry = entries.nextElement();
if (entry.isDirectory()) continue;
StreamUtil.ProgressCallback cb = new StreamUtil.ProgressCallback() {
@Override
public void onCopyProgress(long bytesRead) {
extractedBytes[0] += bytesRead;
int lastTime = lastPercentage[0];
int percentDone = (int) ((double) extractedBytes[0] * 100 / (double) totalUncompressedBytes);
// update at most once per percent.
if (percentDone > lastTime) {
lastPercentage[0] = percentDone;
updateProgress(extractedBytes[0], totalUncompressedBytes, zipFilePath);
}
}
};
File fout = new File(destDirectory, entry.getName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
if (!fout.exists()) {
//noinspection ResultOfMethodCallIgnored
(new File(fout.getParent())).mkdirs();
}
InputStream in = null;
BufferedOutputStream Bout = null;
try {
in = zipFile.getInputStream(entry);
Bout = new BufferedOutputStream(new FileOutputStream(fout));
StreamUtil.copy(in, Bout, cb);
Bout.close();
in.close();
} catch (IOException ex) {
if (in != null) {
try {
in.close();
} catch (Exception ignored) {
}
}
if (Bout != null) {
try {
Bout.close();
} catch (Exception ignored) {
}
}
}
}
zipFile.close();
updateProgress(1, 1, zipFilePath); // force 100%
promise.resolve(destDirectory);
} catch (Exception ex) {
updateProgress(0, 1, zipFilePath); // force 0%
promise.reject(null, "Failed to extract file " + ex.getLocalizedMessage());
}
}
}).start();
}
/**
* Extract a zip held in the assets directory.
* <p>
* Note that the progress value isn't as accurate as when unzipping
* from a file. When reading a zip from a stream, we can't
* get accurate uncompressed sizes for files (ZipEntry#getCompressedSize() returns -1).
* <p>
* Instead, we compare the number of bytes extracted to the size of the compressed zip file.
* In most cases this means the progress 'stays on' 100% for a little bit (compressedSize < uncompressed size)
*/
@ReactMethod
public void unzipAssets(final String assetsPath, final String destDirectory, final Promise promise) {
new Thread(new Runnable() {
@Override
public void run() {
InputStream assetsInputStream;
final long size;
try {
assetsInputStream = getReactApplicationContext().getAssets().open(assetsPath);
AssetFileDescriptor fileDescriptor = getReactApplicationContext().getAssets().openFd(assetsPath);
size = fileDescriptor.getLength();
} catch (IOException e) {
promise.reject(null, String.format("Asset file `%s` could not be opened", assetsPath));
return;
}
try {
try {
File destDir = new File(destDirectory);
if (!destDir.exists()) {
//noinspection ResultOfMethodCallIgnored
destDir.mkdirs();
}
ZipInputStream zipIn = new ZipInputStream(assetsInputStream);
BufferedInputStream bin = new BufferedInputStream(zipIn);
ZipEntry entry;
final long[] extractedBytes = {0};
final int[] lastPercentage = {0};
updateProgress(0, 1, assetsPath); // force 0%
File fout;
while ((entry = zipIn.getNextEntry()) != null) {
if (entry.isDirectory()) continue;
fout = new File(destDirectory, entry.getName());
String canonicalPath = fout.getCanonicalPath();
String destDirCanonicalPath = (new File(destDirectory).getCanonicalPath()) + File.separator;
if (!canonicalPath.startsWith(destDirCanonicalPath)) {
throw new SecurityException(String.format("Found Zip Path Traversal Vulnerability with %s", canonicalPath));
}
if (!fout.exists()) {
//noinspection ResultOfMethodCallIgnored
(new File(fout.getParent())).mkdirs();
}
final ZipEntry finalEntry = entry;
StreamUtil.ProgressCallback cb = new StreamUtil.ProgressCallback() {
@Override
public void onCopyProgress(long bytesRead) {
extractedBytes[0] += bytesRead;
int lastTime = lastPercentage[0];
int percentDone = (int) ((double) extractedBytes[0] * 100 / (double) size);
// update at most once per percent.
if (percentDone > lastTime) {
lastPercentage[0] = percentDone;
updateProgress(extractedBytes[0], size, finalEntry.getName());
}
}
};
FileOutputStream out = new FileOutputStream(fout);
BufferedOutputStream Bout = new BufferedOutputStream(out);
StreamUtil.copy(bin, Bout, cb);
Bout.close();
out.close();
}
updateProgress(1, 1, assetsPath); // force 100%
bin.close();
zipIn.close();
} catch (Exception ex) {
ex.printStackTrace();
updateProgress(0, 1, assetsPath); // force 0%
throw new Exception(String.format("Couldn't extract %s", assetsPath));
}
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
promise.resolve(destDirectory);
}
}).start();
}
@ReactMethod
public void zipFiles(final ReadableArray files, final String destDirectory, final Promise promise) {
zip(files.toArrayList(), destDirectory, promise);
}
@ReactMethod
public void zipFolder(final String folder, final String destFile, final Promise promise) {
ArrayList<Object> folderAsArrayList = new ArrayList<>();
folderAsArrayList.add(folder);
zip(folderAsArrayList, destFile, promise);
}
@ReactMethod
public void zipFilesWithPassword(final ReadableArray files, final String destFile, final String password,
String encryptionMethod, Promise promise) {
zipWithPassword(files.toArrayList(), destFile, password, encryptionMethod, promise);
}
@ReactMethod
public void zipFolderWithPassword(final String folder, final String destFile, final String password,
String encryptionMethod, Promise promise) {
ArrayList<Object> folderAsArrayList = new ArrayList<>();
folderAsArrayList.add(folder);
zipWithPassword(folderAsArrayList, destFile, password, encryptionMethod, promise);
}
private void zipWithPassword(final ArrayList<Object> filesOrDirectory, final String destFile, final String password,
String encryptionMethod, Promise promise) {
try{
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(CompressionMethod.DEFLATE);
parameters.setCompressionLevel(CompressionLevel.NORMAL);
String encParts[] = encryptionMethod.split("-");
if (password != null && !password.isEmpty()) {
parameters.setEncryptFiles(true);
if (encParts[0].equals("AES")) {
parameters.setEncryptionMethod(EncryptionMethod.AES);
if (encParts[1].equals("128")) {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_128);
} else if (encParts[1].equals("256")) {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_256);
} else {
parameters.setAesKeyStrength(AesKeyStrength.KEY_STRENGTH_128);
}
} else if (encryptionMethod.equals("STANDARD")) {
parameters.setEncryptionMethod(EncryptionMethod.ZIP_STANDARD_VARIANT_STRONG);
Log.d(TAG, "Standard Encryption");
} else {
parameters.setEncryptionMethod(EncryptionMethod.ZIP_STANDARD);
Log.d(TAG, "Encryption type not supported default to Standard Encryption");
}
} else {
promise.reject(null, "Password is empty");
}
processZip(filesOrDirectory, destFile, parameters, promise, password.toCharArray());
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
private void zip(final ArrayList<Object> filesOrDirectory, final String destFile, final Promise promise) {
try{
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(CompressionMethod.DEFLATE);
parameters.setCompressionLevel(CompressionLevel.NORMAL);
processZip(filesOrDirectory, destFile, parameters, promise, null);
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
private void processZip(final ArrayList<Object> entries, final String destFile, final ZipParameters parameters, final Promise promise, final char[] password) {
new Thread(new Runnable() {
@Override
public void run() {
try {
net.lingala.zip4j.ZipFile zipFile;
if (password != null) {
zipFile = new net.lingala.zip4j.ZipFile(destFile, password);
} else {
zipFile = new net.lingala.zip4j.ZipFile(destFile);
}
updateProgress(0, 100, destFile);
int totalFiles = 0;
int fileCounter = 0;
for (int i = 0; i < entries.size(); i++) {
File f = new File(entries.get(i).toString());
if (f.exists()) {
if (f.isDirectory()) {
List<File> files = Arrays.asList(f.listFiles());
totalFiles += files.size();
for (int j = 0; j < files.size(); j++) {
if (files.get(j).isDirectory()) {
zipFile.addFolder(files.get(j), parameters);
}
else {
zipFile.addFile(files.get(j), parameters);
}
fileCounter += 1;
updateProgress(fileCounter, totalFiles, destFile);
}
} else {
totalFiles += 1;
zipFile.addFile(f, parameters);
fileCounter += 1;
updateProgress(fileCounter, totalFiles, destFile);
}
}
else {
promise.reject(null, "File or folder does not exist");
}
updateProgress(1, 1, destFile); // force 100%
}
promise.resolve(destFile);
} catch (Exception ex) {
promise.reject(null, ex.getMessage());
return;
}
}
}).start();
}
protected void updateProgress(long extractedBytes, long totalSize, String zipFilePath) {
// Ensure progress can't overflow 1
double progress = Math.min((double) extractedBytes / (double) totalSize, 1);
Log.d(TAG, String.format("updateProgress: %.0f%%", progress * 100));
WritableMap map = Arguments.createMap();
map.putString(EVENT_KEY_FILENAME, zipFilePath);
map.putDouble(EVENT_KEY_PROGRESS, progress);
getReactApplicationContext().getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(PROGRESS_EVENT_NAME, map);
}
/**
* Return the uncompressed size of the ZipFile (only works for files on disk, not in assets)
*
* @return -1 on failure
*/
private long getUncompressedSize(String zipFilePath, String charset) {
long totalSize = 0;
try {
ZipFile zipFile = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
zipFile = new ZipFile(zipFilePath, Charset.forName(charset));
} else {
zipFile = new ZipFile(zipFilePath);
}
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
long size = entry.getSize();
if (size != -1) {
totalSize += size;
}
}
zipFile.close();
} catch (IOException ignored) {
return -1;
}
return totalSize;
}
/**
* Returns the exception stack trace as a string
*/
private String getStackTrace(Exception e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
return sw.toString();
}
}
|
#221 raise the file permission error properly
|
android/src/main/java/com/rnziparchive/RNZipArchiveModule.java
|
#221 raise the file permission error properly
|
|
Java
|
mit
|
5411704a52140fb510e9d001e5776dfec0b75f68
| 0
|
juliesouchet/StreetCar,juliesouchet/StreetCar
|
package main.java.game;
import java.awt.Color;
import java.awt.Point;
import java.net.MalformedURLException;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
import java.rmi.server.ExportException;
import java.rmi.server.UnicastRemoteObject;
import main.java.data.Data;
import main.java.data.LoginInfo;
import main.java.data.Tile;
import main.java.player.PlayerInterface;
import main.java.util.Copier;
/**============================================================
* Remote Application
* URL: rmi://ip:port/gameName
* @author kassuskley
==============================================================*/
@SuppressWarnings("serial")
public class Game extends UnicastRemoteObject implements GameInterface, Runnable
{
// --------------------------------------------
// Attributes:
// --------------------------------------------
public static final String gameMessageHeader = "Street Car application: ";
public final static int applicationPort = 5000;
public final static String applicationProtocol = "rmi";
private Data data;
private LoginInfo[] loggedPlayerTable;
private Engine engine;
private Thread engineThread;
private Object engineLock;
// --------------------------------------------
// Builder:
// --------------------------------------------
/**=======================================================================
* @return Creates a local application that can be called as a local object
* @throws RemoteException : network trouble (caught by the IHM)
* @throws ExceptionUnknownBoardName: (caught by the IHM)
* @throws RuntimeException :
=========================================================================*/
public Game(String gameName, String appIP, String boardName, int nbrBuildingInLine) throws RemoteException, ExceptionUnknownBoardName, RuntimeException
{
super();
String url = null;
try // Create the player's remote reference
{
url = applicationProtocol + "://" + appIP + ":" + applicationPort + "/" + gameName;
try {
java.rmi.registry.LocateRegistry.createRegistry(applicationPort);
} catch(ExportException e) { // registry is already created
// TODO: Close registry instead
}
Naming.rebind(url, this);
}
catch (MalformedURLException e) {e.printStackTrace(); System.exit(0);}
this.data = new Data(gameName, boardName, nbrBuildingInLine); // Init application
this.loggedPlayerTable = LoginInfo.getInitialLoggedPlayerTable();
this.engineLock = new Object();
this.engine = new Engine(this.engineLock);
this.engineThread = new Thread(this.engine);
this.engineThread .start();
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "URL = " + url);
System.out.println(gameMessageHeader + "ready");
System.out.println(gameMessageHeader + "Start waiting for connexion request");
System.out.println("===========================================================\n");
}
/**=======================================================================
* @return Creates a remote application cloned to the real application at the given ip
* @throws NotBoundException : The web host is not configured (throw RuntimeException)
* @throws RemoteException : The web host is offline (catched by IHM)
* @throws auther java.rmi.Exception: NetworkError (catched by IHM)
=========================================================================*/
public static GameInterface getRemoteGame(String appIP, String gameName) throws RemoteException, NotBoundException
{
String url = applicationProtocol + "://" + appIP + ":" + applicationPort + "/" + gameName;
//// System.setSecurityManager(new RMISecurityManager());
try
{
return (GameInterface) Naming.lookup(url);
}
catch (MalformedURLException e) {e.printStackTrace(); System.exit(0);}
return null;
}
// --------------------------------------------
// Local methods:
// --------------------------------------------
public void run()
{
}
// --------------------------------------------
// Public methods: may be called by the remote object
// Must implement "throws RemoteException"
// Must be declared in the interface "RemoteApplicationInterface"
// --------------------------------------------
public Data getData(String playerName) throws RemoteException
{
return this.data.getClone(playerName);
}
public LoginInfo[]getLoginInfo(String playerName) throws RemoteException
{
Copier<LoginInfo> cp = new Copier<LoginInfo>();
return cp.copyTab(loggedPlayerTable);
}
public void setLoginInfo(String playerName, int playerToChangeIndex, LoginInfo newPlayerInfo) throws RemoteException, ExceptionForbiddenAction, ExceptionForbiddenHostModification
{
if (!this.data.getHost().equals(playerName)) throw new ExceptionForbiddenAction();
if (playerToChangeIndex <= 1) throw new ExceptionForbiddenHostModification();
this.loggedPlayerTable[playerToChangeIndex] = newPlayerInfo.getClone();
////// TODO avertire le joueur qui a ete modife
}
public void onJoinGame(PlayerInterface player, boolean isHost) throws RemoteException, ExceptionFullParty, ExceptionUsedPlayerName, ExceptionUsedPlayerColor
{
if (this.data.getNbrPlayer() >= Data.maxNbrPlayer)
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, party is currently full.");
System.out.println("===========================================================\n");
throw new ExceptionFullParty();
}
else if (this.data.containsPlayer(player.getPlayerName()))
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, name already taken.");
System.out.println("===========================================================\n");
throw new ExceptionUsedPlayerName();
}
else if (this.usedColor(player.getColor()))
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, color \"" + player.getColor() + "\" already taken.");
System.out.println("===========================================================\n");
throw new ExceptionUsedPlayerColor();
}
else
{
this.data.addPlayer(player, player.getPlayerName(), player.getColor(), isHost);
System.out.println("\n===========================================================");
System.out.println(Game.gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(Game.gameMessageHeader + "accepted player");
System.out.println(Game.gameMessageHeader + "NbrPlayer: " + this.data.getNbrPlayer());
System.out.println("===========================================================\n");
}
}
public boolean onQuitGame(String playerName) throws RemoteException
{
String resS = null;
boolean res= false;
for (String name: this.data.getPlayerNameList())
{
if (name.equals(playerName))
{
this.data.removePlayer(name);
resS= "player logged out";
res = true;
break;
}
}
if (resS == null) {resS = "player not found in the local list"; res = false;}
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "quitGame");
System.out.println(gameMessageHeader + "logout result : " + resS);
System.out.println(gameMessageHeader + "playerName : " + playerName);
System.out.println("===========================================================\n");
return res;
}
public void hostStartGame(String playerName) throws RemoteException, ExceptionForbiddenAction
{
if (!this.data.getHost().equals(playerName)) throw new ExceptionForbiddenAction();
this.engine.addAction(playerName, this.data, "hostStartGame", null, null);
synchronized(this.engineLock)
{
try {this.engineLock.notify();}
catch(Exception e) {e.printStackTrace(); System.exit(0);}
}
}
// Version simple pour tester l'ia
//TODO Remplacer par public void placeTile(String playerName, int indexInHand, Point position, Direction rotation)
public void placeTile(String playerName, Tile t, Point position)throws RemoteException, ExceptionGameHasNotStarted, ExceptionNotYourTurn, ExceptionForbiddenAction
{
if (!this.data.isGameStarted()) throw new ExceptionGameHasNotStarted();
if (!this.data.isPlayerTurn(playerName)) throw new ExceptionNotYourTurn();
if (!this.data.isAcceptableTilePlacement(position.x, position.y, t)) throw new ExceptionForbiddenAction();
this.engine.addAction(playerName, this.data, "placeTile", position, t);
synchronized(this.engineLock)
{
try {this.engineLock.notify();}
catch(Exception e) {e.printStackTrace(); System.exit(0);}
}
}
// TODO Version simple pour tester l'ia
public Tile drawCard(String playerName, int nbrCards) throws RemoteException, ExceptionGameHasNotStarted, ExceptionNotYourTurn
{
if (!this.data.isGameStarted()) throw new ExceptionGameHasNotStarted();
if (!this.data.isPlayerTurn(playerName)) throw new ExceptionNotYourTurn();
// Rajouter d'autres exceptions
return this.data.drawCard();
}
// --------------------------------------------
// Private methods:
// --------------------------------------------
private boolean usedColor(Color c) throws RemoteException
{
PlayerInterface p;
for (String name: this.data.getPlayerNameList())
{
p = this.data.getPlayer(name);
if (p.getColor().equals(c)) return true;
}
return false;
}
}
|
src/main/java/game/Game.java
|
package main.java.game;
import java.awt.Color;
import java.awt.Point;
import java.net.MalformedURLException;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import main.java.data.Data;
import main.java.data.LoginInfo;
import main.java.data.Tile;
import main.java.player.PlayerInterface;
import main.java.util.Copier;
/**============================================================
* Remote Application
* URL: rmi://ip:port/gameName
* @author kassuskley
==============================================================*/
@SuppressWarnings("serial")
public class Game extends UnicastRemoteObject implements GameInterface, Runnable
{
// --------------------------------------------
// Attributes:
// --------------------------------------------
public static final String gameMessageHeader = "Street Car application: ";
public final static int applicationPort = 5000;
public final static String applicationProtocol = "rmi";
private Data data;
private LoginInfo[] loggedPlayerTable;
private Engine engine;
private Thread engineThread;
private Object engineLock;
// --------------------------------------------
// Builder:
// --------------------------------------------
/**=======================================================================
* @return Creates a local application that can be called as a local object
* @throws RemoteException : network trouble (caught by the IHM)
* @throws ExceptionUnknownBoardName: (caught by the IHM)
* @throws RuntimeException :
=========================================================================*/
public Game(String gameName, String appIP, String boardName, int nbrBuildingInLine) throws RemoteException, ExceptionUnknownBoardName, RuntimeException
{
super();
String url = null;
try // Create the player's remote reference
{
url = applicationProtocol + "://" + appIP + ":" + applicationPort + "/" + gameName;
java.rmi.registry.LocateRegistry.createRegistry(applicationPort);
Naming.rebind(url, this);
}
catch (MalformedURLException e) {e.printStackTrace(); System.exit(0);}
this.data = new Data(gameName, boardName, nbrBuildingInLine); // Init application
this.loggedPlayerTable = LoginInfo.getInitialLoggedPlayerTable();
this.engineLock = new Object();
this.engine = new Engine(this.engineLock);
this.engineThread = new Thread(this.engine);
this.engineThread .start();
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "URL = " + url);
System.out.println(gameMessageHeader + "ready");
System.out.println(gameMessageHeader + "Start waiting for connexion request");
System.out.println("===========================================================\n");
}
/**=======================================================================
* @return Creates a remote application cloned to the real application at the given ip
* @throws NotBoundException : The web host is not configured (throw RuntimeException)
* @throws RemoteException : The web host is offline (catched by IHM)
* @throws auther java.rmi.Exception: NetworkError (catched by IHM)
=========================================================================*/
public static GameInterface getRemoteGame(String appIP, String gameName) throws RemoteException, NotBoundException
{
String url = applicationProtocol + "://" + appIP + ":" + applicationPort + "/" + gameName;
//// System.setSecurityManager(new RMISecurityManager());
try
{
return (GameInterface) Naming.lookup(url);
}
catch (MalformedURLException e) {e.printStackTrace(); System.exit(0);}
return null;
}
// --------------------------------------------
// Local methods:
// --------------------------------------------
public void run()
{
}
// --------------------------------------------
// Public methods: may be called by the remote object
// Must implement "throws RemoteException"
// Must be declared in the interface "RemoteApplicationInterface"
// --------------------------------------------
public Data getData(String playerName) throws RemoteException
{
return this.data.getClone(playerName);
}
public LoginInfo[]getLoginInfo(String playerName) throws RemoteException
{
Copier<LoginInfo> cp = new Copier<LoginInfo>();
return cp.copyTab(loggedPlayerTable);
}
public void setLoginInfo(String playerName, int playerToChangeIndex, LoginInfo newPlayerInfo) throws RemoteException, ExceptionForbiddenAction, ExceptionForbiddenHostModification
{
if (!this.data.getHost().equals(playerName)) throw new ExceptionForbiddenAction();
if (playerToChangeIndex <= 1) throw new ExceptionForbiddenHostModification();
this.loggedPlayerTable[playerToChangeIndex] = newPlayerInfo.getClone();
////// TODO avertire le joueur qui a ete modife
}
public void onJoinGame(PlayerInterface player, boolean isHost) throws RemoteException, ExceptionFullParty, ExceptionUsedPlayerName, ExceptionUsedPlayerColor
{
if (this.data.getNbrPlayer() >= Data.maxNbrPlayer)
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, party is currently full.");
System.out.println("===========================================================\n");
throw new ExceptionFullParty();
}
else if (this.data.containsPlayer(player.getPlayerName()))
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, name already taken.");
System.out.println("===========================================================\n");
throw new ExceptionUsedPlayerName();
}
else if (this.usedColor(player.getColor()))
{
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(gameMessageHeader + "Refusing player, color \"" + player.getColor() + "\" already taken.");
System.out.println("===========================================================\n");
throw new ExceptionUsedPlayerColor();
}
else
{
this.data.addPlayer(player, player.getPlayerName(), player.getColor(), isHost);
System.out.println("\n===========================================================");
System.out.println(Game.gameMessageHeader + "join request from player : \"" + player.getPlayerName() + "\"");
System.out.println(Game.gameMessageHeader + "accepted player");
System.out.println(Game.gameMessageHeader + "NbrPlayer: " + this.data.getNbrPlayer());
System.out.println("===========================================================\n");
}
}
public boolean onQuitGame(String playerName) throws RemoteException
{
String resS = null;
boolean res= false;
for (String name: this.data.getPlayerNameList())
{
if (name.equals(playerName))
{
this.data.removePlayer(name);
resS= "player logged out";
res = true;
break;
}
}
if (resS == null) {resS = "player not found in the local list"; res = false;}
System.out.println("\n===========================================================");
System.out.println(gameMessageHeader + "quitGame");
System.out.println(gameMessageHeader + "logout result : " + resS);
System.out.println(gameMessageHeader + "playerName : " + playerName);
System.out.println("===========================================================\n");
return res;
}
public void hostStartGame(String playerName) throws RemoteException, ExceptionForbiddenAction
{
if (!this.data.getHost().equals(playerName)) throw new ExceptionForbiddenAction();
this.engine.addAction(playerName, this.data, "hostStartGame", null, null);
synchronized(this.engineLock)
{
try {this.engineLock.notify();}
catch(Exception e) {e.printStackTrace(); System.exit(0);}
}
}
// Version simple pour tester l'ia
//TODO Remplacer par public void placeTile(String playerName, int indexInHand, Point position, Direction rotation)
public void placeTile(String playerName, Tile t, Point position)throws RemoteException, ExceptionGameHasNotStarted, ExceptionNotYourTurn, ExceptionForbiddenAction
{
if (!this.data.isGameStarted()) throw new ExceptionGameHasNotStarted();
if (!this.data.isPlayerTurn(playerName)) throw new ExceptionNotYourTurn();
if (!this.data.isAcceptableTilePlacement(position.x, position.y, t)) throw new ExceptionForbiddenAction();
this.engine.addAction(playerName, this.data, "placeTile", position, t);
synchronized(this.engineLock)
{
try {this.engineLock.notify();}
catch(Exception e) {e.printStackTrace(); System.exit(0);}
}
}
// TODO Version simple pour tester l'ia
public Tile drawCard(String playerName, int nbrCards) throws RemoteException, ExceptionGameHasNotStarted, ExceptionNotYourTurn
{
if (!this.data.isGameStarted()) throw new ExceptionGameHasNotStarted();
if (!this.data.isPlayerTurn(playerName)) throw new ExceptionNotYourTurn();
// Rajouter d'autres exceptions
return this.data.drawCard();
}
// --------------------------------------------
// Private methods:
// --------------------------------------------
private boolean usedColor(Color c) throws RemoteException
{
PlayerInterface p;
for (String name: this.data.getPlayerNameList())
{
p = this.data.getPlayer(name);
if (p.getColor().equals(c)) return true;
}
return false;
}
}
|
Valentin changes
|
src/main/java/game/Game.java
|
Valentin changes
|
|
Java
|
mit
|
7376bdf626ef4026dbc8bf2f151f94458903fce2
| 0
|
Sirtrack/construct,Sirtrack/construct,xiaoliang2016/construct,xiaoliang2016/construct,ZiglioNZ/construct,ZiglioNZ/construct
|
package construct.lib;
import java.io.UnsupportedEncodingException;
import construct.exception.ValueError;
public class Binary {
static byte[][] _char_to_bin = new byte[256][8];
static{
for( int i = 0; i<256; i++)
{
char ch = (char)i;
byte[] bin = int_to_bin(i, 8);
_char_to_bin[i] = bin;
}
}
public static byte[] int_to_bin( int number ){
return int_to_bin( number, 32 );
}
public static byte[] int_to_bin( int number, int width ){
if( number < 0 ){
number += (1 << width);
}
int i = width - 1;
byte[] bits = new byte[width];//["\x00"] * width;
while( i >= 0 ){
bits[i] = (byte)(number & 1);
number >>= 1;
i -= 1;
}
return bits;
}
public static int bin_to_int( byte[] bits ){
return bin_to_int( bits, false );
}
public static int bin_to_int( byte[] bits, boolean signed ){
int number = 0;
int bias = 0;
int i = 0;
if( bits.length == 0 )
return 0;
if( signed && bits[0] == (byte)1 ){
i++;
bias = 1 << (bits.length - 1);
}
for( int j = i; j < bits.length; j++ )
{
number <<= 1;
number |= bits[j];
}
return number - bias;
}
public static byte[] swap_bytes( byte[] bits) {
return swap_bytes( bits, 8 );
}
public static byte[] swap_bytes( byte[] bits, int bytesize ) {
int i = 0;
int l = bits.length;
byte[] output = new byte[l];
int j = output.length - 1;
while( i < l ){
output[j] = bits[i];
i++;
j--;
}
return output;
}
public static Encoder BinaryEncoder(){
return new Encoder(){
public byte[] encode(String data) {
return decode_bin(data.getBytes()).getBytes();
}
};
}
public static Decoder BinaryDecoder(){
return new Decoder(){
public String decode(byte[] data) {
byte[] out;
try {
out = encode_bin(new String(data, "ISO-8859-1"));
return new String(out, "ISO-8859-1");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException( "UnsupportedEncodingException: " + e.getMessage() );
}
}
};
}
public static byte[] encode_bin( String data ) {
byte[] out = new byte[8 * data.length() ];
for( int i = 0; i < data.length(); i++ ){
char ch = data.charAt(i);
byte[] conv = _char_to_bin[ ch ];
System.arraycopy(conv, 0, out, i*8, 8);
}
return out;
}
public static String decode_bin( byte[] data ){
if( (data.length & 7) != 0 )
throw new ValueError("Data length must be a multiple of 8" );
StringBuilder sb = new StringBuilder();
for( int i = 0; i< data.length; i+=8 ){
char ch = 0;
for( int j = 0; j<8; j++){
ch = (char)(ch<<1);
ch |= data[i+j];
}
sb.append(ch);
}
return sb.toString();
}
}
|
src/main/construct/lib/Binary.java
|
package construct.lib;
import construct.exception.ValueError;
public class Binary {
static byte[][] _char_to_bin = new byte[256][8];
static{
for( int i = 0; i<256; i++)
{
char ch = (char)i;
byte[] bin = int_to_bin(i, 8);
_char_to_bin[i] = bin;
}
}
public static byte[] int_to_bin( int number ){
return int_to_bin( number, 32 );
}
public static byte[] int_to_bin( int number, int width ){
if( number < 0 ){
number += (1 << width);
}
int i = width - 1;
byte[] bits = new byte[width];//["\x00"] * width;
while( i >= 0 ){
bits[i] = (byte)(number & 1);
number >>= 1;
i -= 1;
}
return bits;
}
public static int bin_to_int( byte[] bits ){
return bin_to_int( bits, false );
}
public static int bin_to_int( byte[] bits, boolean signed ){
int number = 0;
int bias = 0;
int i = 0;
if( bits.length == 0 )
return 0;
if( signed && bits[0] == (byte)1 ){
i++;
bias = 1 << (bits.length - 1);
}
for( int j = i; j < bits.length; j++ )
{
number <<= 1;
number |= bits[j];
}
return number - bias;
}
public static byte[] swap_bytes( byte[] bits) {
return swap_bytes( bits, 8 );
}
public static byte[] swap_bytes( byte[] bits, int bytesize ) {
int i = 0;
int l = bits.length;
byte[] output = new byte[l];
int j = output.length - 1;
while( i < l ){
output[j] = bits[i];
i++;
j--;
}
return output;
}
public static Encoder BinaryEncoder(){
return new Encoder(){
public byte[] encode(String data) {
return decode_bin(data.getBytes()).getBytes();
}
};
}
public static Decoder BinaryDecoder(){
return new Decoder(){
public String decode(byte[] data) {
byte[] out = encode_bin(new String(data));
return new String(out);
}
};
}
public static byte[] encode_bin( String data ) {
byte[] out = new byte[8 * data.length() ];
for( int i = 0; i < data.length(); i++ ){
char ch = data.charAt(i);
byte[] conv = _char_to_bin[ ch ];
System.arraycopy(conv, 0, out, i*8, 8);
}
return out;
}
public static String decode_bin( byte[] data ){
if( (data.length & 7) != 0 )
throw new ValueError("Data length must be a multiple of 8" );
StringBuilder sb = new StringBuilder();
for( int i = 0; i< data.length; i+=8 ){
char ch = 0;
for( int j = 0; j<8; j++){
ch = (char)(ch<<1);
ch |= data[i+j];
}
sb.append(ch);
}
return sb.toString();
}
}
|
Fixed character set bug
|
src/main/construct/lib/Binary.java
|
Fixed character set bug
|
|
Java
|
unknown
|
2af62fb003c523399c6cf6645cd8cd7d224899da
| 0
|
udevbe/westmalle,udevbe/westmalle,udevbe/westford,udevbe/westford,udevbe/westford,udevbe/westford,udevbe/westmalle,Zubnix/westmalle,udevbe/westmalle
|
//Copyright 2015 Erik De Rijcke
//
//Licensed under the Apache License,Version2.0(the"License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing,software
//distributed under the License is distributed on an"AS IS"BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
package org.westmalle.wayland.protocol;
import com.sun.jna.Pointer;
import org.freedesktop.wayland.server.Client;
import org.freedesktop.wayland.server.DestroyListener;
import org.freedesktop.wayland.server.Display;
import org.freedesktop.wayland.server.Resource;
import org.freedesktop.wayland.server.WlShellResource;
import org.freedesktop.wayland.server.WlShellSurfaceResource;
import org.freedesktop.wayland.server.WlSurfaceResource;
import org.freedesktop.wayland.server.jna.WaylandServerLibrary;
import org.freedesktop.wayland.server.jna.WaylandServerLibraryMapping;
import org.freedesktop.wayland.shared.WlShellError;
import org.freedesktop.wayland.util.InterfaceMeta;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.westmalle.wayland.core.Role;
import org.westmalle.wayland.core.Surface;
import org.westmalle.wayland.wlshell.ShellSurface;
import org.westmalle.wayland.wlshell.ShellSurfaceFactory;
import java.util.Optional;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
@PrepareForTest({
//following classes have static methods, so we have to powermock them:
WaylandServerLibrary.class,
InterfaceMeta.class,
//following classes are final, so we have to powermock them:
WlShellSurfaceFactory.class,
ShellSurfaceFactory.class
})
public class WlShellTest {
@Mock
private Display display;
@Mock
private WlShellSurfaceFactory wlShellSurfaceFactory;
@Mock
private WaylandServerLibraryMapping waylandServerLibraryMapping;
@Mock
private InterfaceMeta interfaceMeta;
@Mock
private Pointer globalPointer;
@Mock
private ShellSurfaceFactory shellSurfaceFactory;
@Mock
private WlCompositor wlCompositor;
private WlShell wlShell;
@Before
public void setUp() throws Exception {
PowerMockito.mockStatic(WaylandServerLibrary.class,
InterfaceMeta.class);
when(InterfaceMeta.get((Class<?>) any())).thenReturn(this.interfaceMeta);
when(WaylandServerLibrary.INSTANCE()).thenReturn(this.waylandServerLibraryMapping);
when(this.waylandServerLibraryMapping.wl_global_create(any(),
any(),
anyInt(),
any(),
any())).thenReturn(this.globalPointer);
this.wlShell = new WlShell(this.display,
this.wlShellSurfaceFactory,
this.shellSurfaceFactory,
this.wlCompositor);
}
@Test
public void testGetShellSurfacePreviousNonShellSurfaceRole() throws Exception {
//given
final WlShellResource wlShellResource = mock(WlShellResource.class);
final int id = 123;
final WlSurfaceResource wlSurfaceResource = mock(WlSurfaceResource.class);
final WlSurface wlSurface = mock(WlSurface.class);
final Surface surface = mock(Surface.class);
final Role role = mock(Role.class);
final Optional<Role> roleOptional = Optional.of(role);
final Resource displayResource = mock(Resource.class);
final Client client = mock(Client.class);
final int version = 3;
when(client.getObject(Display.OBJECT_ID)).thenReturn(displayResource);
when(wlShellResource.getClient()).thenReturn(client);
when(wlShellResource.getVersion()).thenReturn(version);
when(wlSurfaceResource.getImplementation()).thenReturn(wlSurface);
when(wlSurface.getSurface()).thenReturn(surface);
when(surface.getRole()).thenReturn(roleOptional);
//when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verifyZeroInteractions(this.shellSurfaceFactory);
verifyZeroInteractions(this.wlShellSurfaceFactory);
verify(displayResource).postError(eq(WlShellError.ROLE.getValue()),
anyString());
}
@Test
public void testGetShellSurfaceNoPreviousRole() throws Exception {
//given
final WlShellResource wlShellResource = mock(WlShellResource.class);
final int id = 123;
final WlSurfaceResource wlSurfaceResource = mock(WlSurfaceResource.class);
final WlSurface wlSurface = mock(WlSurface.class);
final Surface surface = mock(Surface.class);
final Optional<Role> roleOptional = Optional.empty();
final Client client = mock(Client.class);
final int version = 3;
when(wlShellResource.getClient()).thenReturn(client);
when(wlShellResource.getVersion()).thenReturn(version);
when(wlSurfaceResource.getImplementation()).thenReturn(wlSurface);
when(wlSurface.getSurface()).thenReturn(surface);
when(surface.getRole()).thenReturn(roleOptional);
final WlShellSurface wlShellSurface = mock(WlShellSurface.class);
final ShellSurface shellSurface = mock(ShellSurface.class);
when(wlShellSurface.getShellSurface()).thenReturn(shellSurface);
when(this.wlShellSurfaceFactory.create(shellSurface,
wlSurfaceResource)).thenReturn(wlShellSurface);
final WlShellSurfaceResource wlShellSurfaceResource = mock(WlShellSurfaceResource.class);
when(wlShellSurface.add(any(),
anyInt(),
anyInt())).thenReturn(wlShellSurfaceResource);
when(this.shellSurfaceFactory.create(eq(this.wlCompositor),
anyInt())).thenReturn(shellSurface);
//when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verify(wlShellSurface).add(client,
version,
id);
verify(surface).setRole(shellSurface);
final ArgumentCaptor<DestroyListener> surfaceResourceDestroyListenerCaptor = ArgumentCaptor.forClass(DestroyListener.class);
final ArgumentCaptor<DestroyListener> shellSurfaceResourceDestroyListenerCaptor = ArgumentCaptor.forClass(DestroyListener.class);
verify(wlSurfaceResource).register(surfaceResourceDestroyListenerCaptor.capture());
verify(wlShellSurfaceResource).register(shellSurfaceResourceDestroyListenerCaptor.capture());
//and when
final DestroyListener surfaceDestroyListener = surfaceResourceDestroyListenerCaptor.getValue();
surfaceDestroyListener.handle();
final DestroyListener shellSurfaceDestroyListener = shellSurfaceResourceDestroyListenerCaptor.getValue();
shellSurfaceDestroyListener.handle();
//then
verify(wlShellSurfaceResource).destroy();
//and when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verify(wlShellSurface,
times(2)).add(client,
version,
id);
}
@Test
public void testOnBindClient() throws Exception {
//given
final Pointer resourcePointer = mock(Pointer.class);
when(this.waylandServerLibraryMapping.wl_resource_create(any(),
any(),
anyInt(),
anyInt())).thenReturn(resourcePointer);
//when
final WlShellResource wlShellResource = this.wlShell.onBindClient(mock(Client.class),
1,
1);
//then
assertThat(wlShellResource).isNotNull();
assertThat(wlShellResource.getImplementation()).isSameAs(this.wlShell);
}
@Test
public void testCreate() throws Exception {
//given
final Client client = mock(Client.class);
final int version = 2;
final int id = 7;
//when
final WlShellResource wlShellResource = this.wlShell.create(client,
version,
id);
//then
assertThat(wlShellResource).isNotNull();
assertThat(wlShellResource.getImplementation()).isSameAs(this.wlShell);
}
}
|
wayland/src/test/java/org/westmalle/wayland/protocol/WlShellTest.java
|
//Copyright 2015 Erik De Rijcke
//
//Licensed under the Apache License,Version2.0(the"License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing,software
//distributed under the License is distributed on an"AS IS"BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
package org.westmalle.wayland.protocol;
import com.sun.jna.Pointer;
import org.freedesktop.wayland.server.Client;
import org.freedesktop.wayland.server.DestroyListener;
import org.freedesktop.wayland.server.Display;
import org.freedesktop.wayland.server.Resource;
import org.freedesktop.wayland.server.WlShellResource;
import org.freedesktop.wayland.server.WlShellSurfaceResource;
import org.freedesktop.wayland.server.WlSurfaceResource;
import org.freedesktop.wayland.server.jna.WaylandServerLibrary;
import org.freedesktop.wayland.server.jna.WaylandServerLibraryMapping;
import org.freedesktop.wayland.shared.WlShellError;
import org.freedesktop.wayland.util.InterfaceMeta;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.westmalle.wayland.core.Role;
import org.westmalle.wayland.core.Surface;
import org.westmalle.wayland.wlshell.ShellSurface;
import org.westmalle.wayland.wlshell.ShellSurfaceFactory;
import java.util.Optional;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
@PrepareForTest({
//following classes have static methods, so we have to powermock them:
WaylandServerLibrary.class,
InterfaceMeta.class,
//following classes are final, so we have to powermock them:
WlShellSurfaceFactory.class,
ShellSurfaceFactory.class
})
public class WlShellTest {
@Mock
private Display display;
@Mock
private WlShellSurfaceFactory wlShellSurfaceFactory;
@Mock
private WaylandServerLibraryMapping waylandServerLibraryMapping;
@Mock
private InterfaceMeta interfaceMeta;
@Mock
private Pointer globalPointer;
@Mock
private ShellSurfaceFactory shellSurfaceFactory;
@Mock
private WlCompositor wlCompositor;
private WlShell wlShell;
@Before
public void setUp() throws Exception {
PowerMockito.mockStatic(WaylandServerLibrary.class,
InterfaceMeta.class);
when(InterfaceMeta.get((Class<?>) any())).thenReturn(this.interfaceMeta);
when(WaylandServerLibrary.INSTANCE()).thenReturn(this.waylandServerLibraryMapping);
when(this.waylandServerLibraryMapping.wl_global_create(any(),
any(),
anyInt(),
any(),
any())).thenReturn(this.globalPointer);
this.wlShell = new WlShell(this.display,
this.wlShellSurfaceFactory,
this.shellSurfaceFactory,
this.wlCompositor);
}
@Test
public void testGetShellSurfacePreviousNonShellSurfaceRole() throws Exception {
//given
final WlShellResource wlShellResource = mock(WlShellResource.class);
final int id = 123;
final WlSurfaceResource wlSurfaceResource = mock(WlSurfaceResource.class);
final WlSurface wlSurface = mock(WlSurface.class);
final Surface surface = mock(Surface.class);
final Role role = mock(Role.class);
final Optional<Role> roleOptional = Optional.of(role);
final Resource displayResource = mock(Resource.class);
final Client client = mock(Client.class);
final int version = 3;
when(client.getObject(Display.OBJECT_ID)).thenReturn(displayResource);
when(wlShellResource.getClient()).thenReturn(client);
when(wlShellResource.getVersion()).thenReturn(version);
when(wlSurfaceResource.getImplementation()).thenReturn(wlSurface);
when(wlSurface.getSurface()).thenReturn(surface);
when(surface.getRole()).thenReturn(roleOptional);
//when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verifyZeroInteractions(this.shellSurfaceFactory);
verifyZeroInteractions(this.wlShellSurfaceFactory);
verify(displayResource).postError(eq(WlShellError.ROLE.getValue()),
anyString());
}
@Test
public void testGetShellSurfaceNoPreviousRole() throws Exception {
//given
final WlShellResource wlShellResource = mock(WlShellResource.class);
final int id = 123;
final WlSurfaceResource wlSurfaceResource = mock(WlSurfaceResource.class);
final WlSurface wlSurface = mock(WlSurface.class);
final Surface surface = mock(Surface.class);
final Optional<Role> roleOptional = Optional.empty();
final Client client = mock(Client.class);
final int version = 3;
when(wlShellResource.getClient()).thenReturn(client);
when(wlShellResource.getVersion()).thenReturn(version);
when(wlSurfaceResource.getImplementation()).thenReturn(wlSurface);
when(wlSurface.getSurface()).thenReturn(surface);
when(surface.getRole()).thenReturn(roleOptional);
final WlShellSurface wlShellSurface = mock(WlShellSurface.class);
final ShellSurface shellSurface = mock(ShellSurface.class);
when(wlShellSurface.getShellSurface()).thenReturn(shellSurface);
when(this.wlShellSurfaceFactory.create(shellSurface,
wlSurfaceResource)).thenReturn(wlShellSurface);
final WlShellSurfaceResource wlShellSurfaceResource = mock(WlShellSurfaceResource.class);
when(wlShellSurface.add(any(),
anyInt(),
anyInt())).thenReturn(wlShellSurfaceResource);
when(this.shellSurfaceFactory.create(eq(this.wlCompositor),
anyInt())).thenReturn(shellSurface);
//when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verify(wlShellSurface).add(client,
version,
id);
verify(surface).setRole(shellSurface);
final ArgumentCaptor<DestroyListener> surfaceResourceDestroyListenerCaptor = ArgumentCaptor.forClass(DestroyListener.class);
final ArgumentCaptor<DestroyListener> shellSurfaceResourceDestroyListenerCaptor = ArgumentCaptor.forClass(DestroyListener.class);
verify(wlSurfaceResource).register(surfaceResourceDestroyListenerCaptor.capture());
verify(wlShellSurfaceResource).register(shellSurfaceResourceDestroyListenerCaptor.capture());
//and when
final DestroyListener surfaceDestroyListener = surfaceResourceDestroyListenerCaptor.getValue();
surfaceDestroyListener.handle();
final DestroyListener shellSurfaceDestroyListener = shellSurfaceResourceDestroyListenerCaptor.getValue();
shellSurfaceDestroyListener.handle();
//then
verify(wlShellSurfaceResource).destroy();
//and when
this.wlShell.getShellSurface(wlShellResource,
id,
wlSurfaceResource);
//then
verify(wlShellSurface,
times(2)).add(client,
version,
id);
}
@Test
public void testOnBindClient() throws Exception {
//given
final Pointer resourcePointer = mock(Pointer.class);
when(this.waylandServerLibraryMapping.wl_resource_create(any(),
any(),
anyInt(),
anyInt())).thenReturn(resourcePointer);
//when
final WlShellResource wlShellResource = this.wlShell.onBindClient(mock(Client.class),
1,
1);
//then
assertThat(wlShellResource).isNotNull();
assertThat(wlShellResource.getImplementation()).isSameAs(this.wlShell);
}
@Test
public void testCreate() throws Exception {
//given
final Client client = mock(Client.class);
final int version = 2;
final int id = 7;
//when
final WlShellResource wlShellResource = this.wlShell.create(client,
version,
id);
//then
assertThat(wlShellResource).isNotNull();
assertThat(wlShellResource.getImplementation()).isSameAs(this.wlShell);
}
}
|
add missing import
|
wayland/src/test/java/org/westmalle/wayland/protocol/WlShellTest.java
|
add missing import
|
|
Java
|
agpl-3.0
|
1f85fcadf1942fc0681265437225ebb883b255b5
| 0
|
elki-project/elki,elki-project/elki,elki-project/elki
|
package de.lmu.ifi.dbs.elki.varianceanalysis;
import java.util.Collection;
import java.util.Iterator;
import de.lmu.ifi.dbs.elki.data.RealVector;
import de.lmu.ifi.dbs.elki.database.Database;
import de.lmu.ifi.dbs.elki.distance.DoubleDistance;
import de.lmu.ifi.dbs.elki.math.linearalgebra.Matrix;
import de.lmu.ifi.dbs.elki.properties.Properties;
import de.lmu.ifi.dbs.elki.utilities.QueryResult;
import de.lmu.ifi.dbs.elki.utilities.Util;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.ClassParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.ParameterException;
import de.lmu.ifi.dbs.elki.varianceanalysis.weightfunctions.ConstantWeight;
import de.lmu.ifi.dbs.elki.varianceanalysis.weightfunctions.WeightFunction;
public class WeightedCovarianceMatrixBuilder<V extends RealVector<V, ?>> extends CovarianceMatrixBuilder<V> {
/**
* OptionID for {@link #WEIGHT_PARAM}
*/
public static final OptionID WEIGHT_ID = OptionID.getOrCreateOptionID("pca.weight", "Classname of the weight function to use in PCA " + Properties.KDD_FRAMEWORK_PROPERTIES.restrictionString(WeightFunction.class) + ".");
/**
* Parameter to specify the weight function to use in weighted PCA, must
* extend {@link de.lmu.ifi.dbs.elki.varianceanalysis.weightfunction}.
* <p>
* Key: {@code -pca.weight}
* </p>
*/
private final ClassParameter<WeightFunction> WEIGHT_PARAM = new ClassParameter<WeightFunction>(WEIGHT_ID, WeightFunction.class, ConstantWeight.class.getName());
/**
* Holds the weight function.
*/
public WeightFunction<V> weightfunction;
/**
*
*/
public WeightedCovarianceMatrixBuilder() {
super();
addOption(WEIGHT_PARAM);
}
/**
* @see de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizable#setParameters(String[])
*/
@SuppressWarnings("unchecked")
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
weightfunction = WEIGHT_PARAM.instantiateClass();
setParameters(args, remainingParameters);
return remainingParameters;
}
/**
* Weighted Covariance Matrix for a set of IDs. Since we are not supplied any
* distance information, we'll need to compute it ourselves. Covariance is
* tied to Euklidean distance, so it probably does not make much sense to add
* support for other distance functions?
*/
public Matrix processIds(Collection<Integer> ids, Database<V> database) {
int dim = database.dimensionality();
// collecting the sums in each dimension
double[] sums = new double[dim];
// collecting the products of any two dimensions
double[][] squares = new double[dim][dim];
// for collecting weights
double weightsum = 0.0;
// get centroid
V centroid = Util.centroid((Database<V>) database, ids);
// find maximum distance
double maxdist = 0.0;
double stddev = 0.0;
{
for(Iterator<Integer> it = ids.iterator(); it.hasNext();) {
V obj = database.get(it.next());
double distance = 0.0;
// TODO: this is a hardcoded Euklidean distance.
for(int d = 0; d < dim; d++) {
double delta = centroid.getValue(d + 1).doubleValue() - obj.getValue(d + 1).doubleValue();
distance += delta * delta;
}
stddev += distance; // still squared distance!
distance = java.lang.Math.sqrt(distance);
if(distance > maxdist)
maxdist = distance;
}
if(maxdist == 0.0)
maxdist = 1.0;
// compute standard deviation.
stddev = Math.sqrt(stddev / ids.size());
}
int i = 0;
for(Iterator<Integer> it = ids.iterator(); it.hasNext(); i++) {
V obj = database.get(it.next());
// TODO: hard coded distance... make parametrizable?
double distance = 0.0;
for(int d = 0; d < dim; d++) {
double delta = centroid.getValue(d + 1).doubleValue() - obj.getValue(d + 1).doubleValue();
distance += delta * delta;
}
distance = java.lang.Math.sqrt(distance);
double weight = weightfunction.getWeight(distance, maxdist, stddev);
for(int d1 = 0; d1 < dim; d1++) {
/* We're exploiting symmetry here, start with d2 == d1 */
for(int d2 = d1; d2 < dim; d2++) {
squares[d1][d2] += obj.getValue(d1 + 1).doubleValue() * obj.getValue(d2 + 1).doubleValue() * weight;
}
sums[d1] += obj.getValue(d1 + 1).doubleValue() * weight;
}
weightsum += weight;
}
// TODO: if weightsum == 0.0, the matrix will be empty,
// do we need to manually default to identity matrix then?
assert (weightsum > 0.0);
for(int d1 = 0; d1 < dim; d1++) {
for(int d2 = d1; d2 < dim; d2++) {
// squares[d1][d2] = squares[d1][d2] / weightsum - (sums[d1] /
// weightsum) * (sums[d2] / weightsum);
squares[d1][d2] = squares[d1][d2] - sums[d1] * sums[d2] / weightsum;
// use symmetry
squares[d2][d1] = squares[d1][d2];
}
}
return new Matrix(squares);
}
/**
* Compute Covariance Matrix for a QueryResult Collection
*
* By default it will just collect the ids and run processIds
*
* @param results a collection of QueryResults
* @param database the database used
* @param k number of elements to process
* @return Covariance Matrix
*/
public Matrix processQueryResults(Collection<QueryResult<DoubleDistance>> results, Database<V> database, int k) {
int dim = database.dimensionality();
// collecting the sums in each dimension
double[] sums = new double[dim];
// collecting the products of any two dimensions
double[][] squares = new double[dim][dim];
// for collecting weights
double weightsum = 0.0;
// avoid bad parameters
if (k > results.size()) k = results.size();
// find maximum distance
double maxdist = 0.0;
double stddev = 0.0;
{
int i = 0;
for(Iterator<QueryResult<DoubleDistance>> it = results.iterator(); it.hasNext() && i < k; i++) {
QueryResult<DoubleDistance> res = it.next();
double dist = res.getDistance().getValue();
stddev += dist * dist;
if(dist > maxdist)
maxdist = dist;
}
if(maxdist == 0.0)
maxdist = 1.0;
stddev = Math.sqrt(stddev / k);
}
// calculate weighted PCA
int i = 0;
for(Iterator<QueryResult<DoubleDistance>> it = results.iterator(); it.hasNext() && i < k; i++) {
QueryResult<DoubleDistance> res = it.next();
V obj = database.get(res.getID());
double weight = weightfunction.getWeight(res.getDistance().getValue(), maxdist, stddev);
for(int d1 = 0; d1 < dim; d1++) {
/* We're exploiting symmetry here, start with d2 == d1 */
for(int d2 = d1; d2 < dim; d2++) {
squares[d1][d2] += obj.getValue(d1 + 1).doubleValue() * obj.getValue(d2 + 1).doubleValue() * weight;
}
sums[d1] += obj.getValue(d1 + 1).doubleValue() * weight;
}
weightsum += weight;
}
for(int d1 = 0; d1 < dim; d1++) {
for(int d2 = d1; d2 < dim; d2++) {
// squares[d1][d2] = squares[d1][d2] / weightsum - (sums[d1] /
// weightsum) * (sums[d2] / weightsum);
squares[d1][d2] = squares[d1][d2] - sums[d1] * sums[d2] / weightsum;
// use symmetry
squares[d2][d1] = squares[d1][d2];
}
}
return new Matrix(squares);
}
}
|
src/de/lmu/ifi/dbs/elki/varianceanalysis/WeightedCovarianceMatrixBuilder.java
|
package de.lmu.ifi.dbs.elki.varianceanalysis;
import java.util.Collection;
import java.util.Iterator;
import de.lmu.ifi.dbs.elki.data.RealVector;
import de.lmu.ifi.dbs.elki.database.Database;
import de.lmu.ifi.dbs.elki.distance.DoubleDistance;
import de.lmu.ifi.dbs.elki.math.linearalgebra.Matrix;
import de.lmu.ifi.dbs.elki.properties.Properties;
import de.lmu.ifi.dbs.elki.utilities.QueryResult;
import de.lmu.ifi.dbs.elki.utilities.UnableToComplyException;
import de.lmu.ifi.dbs.elki.utilities.Util;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.ClassParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.ParameterException;
import de.lmu.ifi.dbs.elki.varianceanalysis.weightfunctions.ConstantWeight;
import de.lmu.ifi.dbs.elki.varianceanalysis.weightfunctions.WeightFunction;
public class WeightedCovarianceMatrixBuilder<V extends RealVector<V, ?>> extends CovarianceMatrixBuilder<V> {
/**
* OptionID for {@link #WEIGHT_PARAM}
*/
public static final OptionID WEIGHT_ID = OptionID.getOrCreateOptionID("pca.weight", "Classname of the weight function to use in PCA " + Properties.KDD_FRAMEWORK_PROPERTIES.restrictionString(WeightFunction.class) + ".");
/**
* Parameter to specify the weight function to use in weighted PCA, must
* extend {@link de.lmu.ifi.dbs.elki.varianceanalysis.weightfunction}.
* <p>
* Key: {@code -pca.weight}
* </p>
*/
private final ClassParameter<WeightFunction> WEIGHT_PARAM = new ClassParameter<WeightFunction>(WEIGHT_ID, WeightFunction.class, ConstantWeight.class.getName());
/**
* Holds the weight function.
*/
public WeightFunction<V> weightfunction;
/**
*
*/
public WeightedCovarianceMatrixBuilder() {
super();
addOption(WEIGHT_PARAM);
}
/**
* @see de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizable#setParameters(String[])
*/
@SuppressWarnings("unchecked")
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
weightfunction = WEIGHT_PARAM.instantiateClass();
setParameters(args, remainingParameters);
return remainingParameters;
}
/**
* Weighted Covariance Matrix for a set of IDs. Since we are not supplied any
* distance information, we'll need to compute it ourselves. Covariance is
* tied to Euklidean distance, so it probably does not make much sense to add
* support for other distance functions?
*/
public Matrix processIds(Collection<Integer> ids, Database<V> database) {
int dim = database.dimensionality();
// collecting the sums in each dimension
double[] sums = new double[dim];
// collecting the products of any two dimensions
double[][] squares = new double[dim][dim];
// for collecting weights
double weightsum = 0.0;
// get centroid
V centroid = Util.centroid((Database<V>) database, ids);
// find maximum distance
double maxdist = 0.0;
double stddev = 0.0;
{
for(Iterator<Integer> it = ids.iterator(); it.hasNext();) {
V obj = database.get(it.next());
double distance = 0.0;
// TODO: this is a hardcoded Euklidean distance.
for(int d = 0; d < dim; d++) {
double delta = centroid.getValue(d + 1).doubleValue() - obj.getValue(d + 1).doubleValue();
distance += delta * delta;
}
stddev += distance; // still squared distance!
distance = java.lang.Math.sqrt(distance);
if(distance > maxdist)
maxdist = distance;
}
if(maxdist == 0.0)
maxdist = 1.0;
// compute standard deviation.
stddev = Math.sqrt(stddev / ids.size());
}
int i = 0;
for(Iterator<Integer> it = ids.iterator(); it.hasNext(); i++) {
V obj = database.get(it.next());
// TODO: hard coded distance... make parametrizable?
double distance = 0.0;
for(int d = 0; d < dim; d++) {
double delta = centroid.getValue(d + 1).doubleValue() - obj.getValue(d + 1).doubleValue();
distance += delta * delta;
}
distance = java.lang.Math.sqrt(distance);
double weight = weightfunction.getWeight(distance, maxdist, stddev);
for(int d1 = 0; d1 < dim; d1++) {
/* We're exploiting symmetry here, start with d2 == d1 */
for(int d2 = d1; d2 < dim; d2++) {
squares[d1][d2] += obj.getValue(d1 + 1).doubleValue() * obj.getValue(d2 + 1).doubleValue() * weight;
}
sums[d1] += obj.getValue(d1 + 1).doubleValue() * weight;
}
weightsum += weight;
}
// TODO: if weightsum == 0.0, the matrix will be empty,
// do we need to manually default to identity matrix then?
assert (weightsum > 0.0);
for(int d1 = 0; d1 < dim; d1++) {
for(int d2 = d1; d2 < dim; d2++) {
// squares[d1][d2] = squares[d1][d2] / weightsum - (sums[d1] /
// weightsum) * (sums[d2] / weightsum);
squares[d1][d2] = squares[d1][d2] - sums[d1] * sums[d2] / weightsum;
// use symmetry
squares[d2][d1] = squares[d1][d2];
}
}
return new Matrix(squares);
}
/**
* Compute Covariance Matrix for a QueryResult Collection
*
* By default it will just collect the ids and run processIds
*
* @param results a collection of QueryResults
* @param database the database used
* @param k number of elements to process
* @return Covariance Matrix
*/
public Matrix processQueryResults(Collection<QueryResult<DoubleDistance>> results, Database<V> database, int k) {
int dim = database.dimensionality();
// collecting the sums in each dimension
double[] sums = new double[dim];
// collecting the products of any two dimensions
double[][] squares = new double[dim][dim];
// for collecting weights
double weightsum = 0.0;
// avoid bad parameters
if (k > results.size()) k = results.size();
// find maximum distance
double maxdist = 0.0;
double stddev = 0.0;
{
int i = 0;
for(Iterator<QueryResult<DoubleDistance>> it = results.iterator(); it.hasNext() && i < k; i++) {
QueryResult<DoubleDistance> res = it.next();
double dist = res.getDistance().getValue();
stddev += dist * dist;
if(dist > maxdist)
maxdist = dist;
}
if(maxdist == 0.0)
maxdist = 1.0;
stddev = Math.sqrt(stddev / k);
}
// calculate weighted PCA
int i = 0;
for(Iterator<QueryResult<DoubleDistance>> it = results.iterator(); it.hasNext() && i < k; i++) {
QueryResult<DoubleDistance> res = it.next();
V obj = database.get(res.getID());
double weight = weightfunction.getWeight(res.getDistance().getValue(), maxdist, stddev);
for(int d1 = 0; d1 < dim; d1++) {
/* We're exploiting symmetry here, start with d2 == d1 */
for(int d2 = d1; d2 < dim; d2++) {
squares[d1][d2] += obj.getValue(d1 + 1).doubleValue() * obj.getValue(d2 + 1).doubleValue() * weight;
}
sums[d1] += obj.getValue(d1 + 1).doubleValue() * weight;
}
weightsum += weight;
}
for(int d1 = 0; d1 < dim; d1++) {
for(int d2 = d1; d2 < dim; d2++) {
// squares[d1][d2] = squares[d1][d2] / weightsum - (sums[d1] /
// weightsum) * (sums[d2] / weightsum);
squares[d1][d2] = squares[d1][d2] - sums[d1] * sums[d2] / weightsum;
// use symmetry
squares[d2][d1] = squares[d1][d2];
}
}
return new Matrix(squares);
}
}
|
remove warning: unused import
|
src/de/lmu/ifi/dbs/elki/varianceanalysis/WeightedCovarianceMatrixBuilder.java
|
remove warning: unused import
|
|
Java
|
agpl-3.0
|
def6eb57ea2437f4314bb370c28164632a5bee67
| 0
|
tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,rdkgit/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,aihua/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2005 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2007 Jan 29: Indenting - dj@opennms.org
// Aug 23, 2004: Created this file.
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <license@opennms.org>
// http://www.opennms.org/
// http://www.opennms.com/
//
// Tab Size = 8
//
package org.opennms.netmgt.rrd;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.ConfigFileConstants;
/**
* Provides access to the rrd configuration data.
*/
public class RrdConfig {
private static Properties m_properties = null;
/**
* This loads the configuration file.
*
* @return a Properties object representing the configuration properties
* @throws IOException
*/
private static synchronized Properties getProperties() throws IOException {
if (m_properties == null) {
File configFile = ConfigFileConstants.getFile(ConfigFileConstants.RRD_CONFIG_FILE_NAME);
loadProperties(new FileInputStream(configFile));
}
return m_properties;
}
public static synchronized void loadProperties(InputStream in) throws IOException {
Properties properties = new Properties(System.getProperties());
properties.load(in);
in.close();
m_properties = properties;
}
public static synchronized void setProperties(Properties properties) {
m_properties = properties;
}
/**
* Get a string valued property, returning default value if it is not set.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static String getProperty(String name, String defaultVal) {
Category log = ThreadCategory.getInstance(RrdConfig.class);
try {
return getProperties().getProperty(name, defaultVal);
} catch (IOException e) {
log.error("Unable to read property " + name + " returning defaultValue: " + defaultVal, e);
return defaultVal;
}
}
/**
* Get a boolean valued property, returning default value if it is not set
* or is set to an invalid value.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static boolean getProperty(String name, boolean defaultVal) {
return "true".equalsIgnoreCase(getProperty(name, (defaultVal ? "true" : "false")));
}
/**
* Get a int valued property, returning default value if it is not set or is
* set to an invalid value.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static int getProperty(String name, int defaultVal) {
String val = getProperty(name, (String) null);
if (val != null) {
try {
return Integer.decode(val).intValue();
} catch (NumberFormatException e) {
}
}
return defaultVal;
}
/**
* Get a long valued property, returning default value if it is not set or
* is set to an invalid value
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static long getProperty(String name, long defaultVal) {
String val = getProperty(name, (String) null);
if (val != null) {
try {
return Long.decode(val).longValue();
} catch (NumberFormatException e) {
}
}
return defaultVal;
}
}
|
opennms-rrd/opennms-rrd-api/src/main/java/org/opennms/netmgt/rrd/RrdConfig.java
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2005 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// Aug 23, 2004: Created this file.
//
// Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <license@opennms.org>
// http://www.opennms.org/
// http://www.opennms.com/
//
// Tab Size = 8
//
package org.opennms.netmgt.rrd;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.ConfigFileConstants;
/**
* Provides access to the rrd configuration data.
*/
public class RrdConfig {
private static Properties m_properties = null;
/**
* This loads the configuration file.
*
* @return a Properties object representing the configuration properties
* @throws IOException
*/
private static synchronized Properties getProperties() throws IOException {
if (m_properties == null) {
File configFile = ConfigFileConstants.getFile(ConfigFileConstants.RRD_CONFIG_FILE_NAME);
loadProperties(new FileInputStream(configFile));
}
return m_properties;
}
public static synchronized void loadProperties(InputStream in) throws IOException {
Properties properties = new Properties(System.getProperties());
properties.load(in);
in.close();
m_properties = properties;
}
public static synchronized void setProperties(Properties properties) {
m_properties = properties;
}
/**
* Get a string valued property, returning default value if it is not set.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static String getProperty(String name, String defaultVal) {
Category log = ThreadCategory.getInstance(RrdConfig.class);
try {
return getProperties().getProperty(name, defaultVal);
} catch (IOException e) {
log.error("Unable to read property " + name + " returning defaultValue: " + defaultVal, e);
return defaultVal;
}
}
/**
* Get a boolean valued property, returning default value if it is not set
* or is set to an invalid value.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static boolean getProperty(String name, boolean defaultVal) {
return "true".equalsIgnoreCase(getProperty(name, (defaultVal ? "true" : "false")));
}
/**
* Get a int valued property, returning default value if it is not set or is
* set to an invalid value.
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static int getProperty(String name, int defaultVal) {
String val = getProperty(name, (String) null);
if (val != null) {
try {
return Integer.decode(val).intValue();
} catch (NumberFormatException e) {
}
}
return defaultVal;
}
/**
* Get a long valued property, returning default value if it is not set or
* is set to an invalid value
*
* @param name
* the property name
* @param defaultVal
* the default value to use if the property is not set
* @return the value of the property
*/
public static long getProperty(String name, long defaultVal) {
String val = getProperty(name, (String) null);
if (val != null) {
try {
return Long.decode(val).longValue();
} catch (NumberFormatException e) {
}
}
return defaultVal;
}
}
|
Indenting
|
opennms-rrd/opennms-rrd-api/src/main/java/org/opennms/netmgt/rrd/RrdConfig.java
|
Indenting
|
|
Java
|
lgpl-2.1
|
c998157bbb8e674ad3a644dc0b270dddb7cb1ed7
| 0
|
vaibhav345/lenskit,vaibhav345/lenskit,kluver/lenskit,kluver/lenskit,vaibhav345/lenskit,kluver/lenskit,kluver/lenskit,kluver/lenskit,vaibhav345/lenskit
|
/*
* LensKit, an open source recommender systems toolkit.
* Copyright 2010-2014 LensKit Contributors. See CONTRIBUTORS.md.
* Work on LensKit has been funded by the National Science Foundation under
* grants IIS 05-34939, 08-08692, 08-12148, and 10-17697.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.lenskit.data.dao.file;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.text.StrTokenizer;
import org.lenskit.data.entities.*;
import javax.annotation.Nullable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Delimited text column entity format.
*/
public class DelimitedColumnEntityFormat implements EntityFormat {
private String delimiter = "\t";
private int headerLines;
private boolean readHeader;
private EntityType entityType = EntityType.forName("rating");
private Class<? extends EntityBuilder> entityBuilder = BasicEntityBuilder.class;
private Constructor<? extends EntityBuilder> entityBuilderCtor;
private List<TypedName<?>> columns;
private Map<String,TypedName<?>> labeledColumns;
/**
* Get the delimiter for the entity format.
* @return The entity format delimiter.
*/
public String getDelimiter() {
return delimiter;
}
/**
* Set the delimiter to use for this entity format.
* @param delim The delimiter to use.
*/
public void setDelimiter(String delim) {
delimiter = delim;
}
/**
* Set whether to read a header.
* @param header `true` to read a header line from the file.
*/
public void setHeader(boolean header) {
readHeader = header;
headerLines = 1;
}
/**
* Get the number of header lines to read.
* @return The number of header lines to read.
*/
public int getHeaderLines() {
return headerLines;
}
/**
* Set the number of header lines to read. Setting this **disables** {@link #setHeader(boolean)}.
* @param lines The number of header lines to read.
*/
public void setHeaderLines(int lines) {
headerLines = lines;
readHeader = false;
}
/**
* Query whether this format uses the header line(s).
* @return `true` if the reader will parse a header line.
*/
boolean usesHeader() {
return readHeader;
}
/**
* Set the entity type.
* @param type The entity type.
*/
public void setEntityType(EntityType type) {
entityType = type;
}
/**
* Get the entity type.
* @return The entity type.
*/
@Override
public EntityType getEntityType() {
return entityType;
}
/**
* Set the entity builder class.
* @param builder The entity builder class.
*/
public void setEntityBuilder(Class<? extends EntityBuilder> builder) {
entityBuilder = builder;
}
/**
* Get the entity builder class.
* @return The entity builder class.
*/
public Class<? extends EntityBuilder> getEntityBuilder() {
return entityBuilder;
}
/**
* Instantiate a new entity builder.
* @return A new entity builder.
*/
public EntityBuilder newEntityBuilder() {
if (entityBuilderCtor == null || !entityBuilderCtor.getDeclaringClass().equals(entityBuilder)) {
try {
entityBuilderCtor = entityBuilder.getConstructor(EntityType.class);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("cannot find suitable constructor for " + entityBuilder);
}
}
try {
return entityBuilderCtor.newInstance(entityType);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("could not instantiate entity builder", e);
}
}
/**
* Associate an attribute with the next column. The first time this is called it specifies the label for the
* first column. The second time this is called it specifies the second column label and so forth.
*
* Once this method has been called, {@link #addColumn(String, TypedName)} cannot be called.
*
* @param attr The attribute to add as a column, or `null` to skip the next column.
* @throws IllegalStateException if non-labeled columns have already been specified.
*/
public void addColumn(@Nullable TypedName<?> attr) {
if (columns == null) {
Preconditions.checkState(labeledColumns == null, "mixed labeled and unlabeled columns");
columns = new ArrayList<>();
}
columns.add(attr);
}
/**
* Add columns to a format. This is exactly equivalent to calling {@link #addColumn(TypedName)} for each
* column.
*
* @param columns The columns to add.
*/
public void addColumns(TypedName<?>... columns) {
for (TypedName<?> col: columns) {
addColumn(col);
}
}
/**
* Add a column.
* @param label The header label.
* @param attr The attribute to add as a column, or `null` to skip the next column.
* @throws IllegalStateException if non-labeled columns have already been specified.
*/
public void addColumn(String label, @Nullable TypedName<?> attr) {
if (labeledColumns == null) {
Preconditions.checkState(columns == null, "mixed labeled and unlabeled columns");
labeledColumns = new LinkedHashMap<>();
}
labeledColumns.put(label, attr);
}
@Override
public LineEntityParser makeParser(List<String> header) {
assert header.size() == getHeaderLines();
if (usesHeader() && labeledColumns != null) {
assert header.size() == 1;
List<TypedName<?>> cols = new ArrayList<>();
StrTokenizer tok = new StrTokenizer(header.get(0), delimiter);
while (tok.hasNext()) {
String label = tok.next();
cols.add(labeledColumns.get(label));
}
return new OrderedParser(cols, tok);
} else {
return new OrderedParser(columns, new StrTokenizer("", delimiter));
}
}
private class OrderedParser extends LineEntityParser {
int lineNo = 0;
StrTokenizer tokenizer;
List<TypedName<?>> fileColumns;
public OrderedParser(List<TypedName<?>> columns, StrTokenizer tok) {
fileColumns = columns;
tokenizer = tok;
}
@Override
public Entity parse(String line) {
tokenizer.reset(line);
lineNo += 1;
EntityBuilder builder = newEntityBuilder()
.setId(lineNo);
// since ID is already set, a subsequent ID column will properly override
for (TypedName column: fileColumns) {
String value = tokenizer.nextToken();
if (value != null && column != null) {
builder.setAttribute(column, column.parseString(value));
}
}
return builder.build();
}
}
}
|
lenskit-core/src/main/java/org/lenskit/data/dao/file/DelimitedColumnEntityFormat.java
|
/*
* LensKit, an open source recommender systems toolkit.
* Copyright 2010-2014 LensKit Contributors. See CONTRIBUTORS.md.
* Work on LensKit has been funded by the National Science Foundation under
* grants IIS 05-34939, 08-08692, 08-12148, and 10-17697.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.lenskit.data.dao.file;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.text.StrTokenizer;
import org.lenskit.data.entities.*;
import javax.annotation.Nullable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Delimited text column entity format.
*/
public class DelimitedColumnEntityFormat implements EntityFormat {
private String delimiter = "\t";
private int headerLines;
private boolean readHeader;
private EntityType entityType = EntityType.forName("rating");
private Class<? extends EntityBuilder> entityBuilder = BasicEntityBuilder.class;
private Constructor<? extends EntityBuilder> entityBuilderCtor;
private List<TypedName<?>> columns;
private Map<String,TypedName<?>> labeledColumns;
/**
* Get the delimiter for the entity format.
* @return The entity format delimiter.
*/
public String getDelimiter() {
return delimiter;
}
/**
* Set the delimiter to use for this entity format.
* @param delim The delimiter to use.
*/
public void setDelimiter(String delim) {
delimiter = delim;
}
/**
* Set whether to read a header.
* @param header `true` to read a header line from the file.
*/
public void setHeader(boolean header) {
readHeader = header;
headerLines = 1;
}
/**
* Get the number of header lines to read.
* @return The number of header lines to read.
*/
public int getHeaderLines() {
return headerLines;
}
/**
* Set the number of header lines to read. Setting this **disables** {@link #setHeader(boolean)}.
* @param lines The number of header lines to read.
*/
public void setHeaderLines(int lines) {
headerLines = lines;
readHeader = false;
}
/**
* Query whether this format uses the header line(s).
* @return `true` if the reader will parse a header line.
*/
boolean usesHeader() {
return readHeader;
}
/**
* Set the entity type.
* @param type The entity type.
*/
public void setEntityType(EntityType type) {
entityType = type;
}
/**
* Get the entity type.
* @return The entity type.
*/
@Override
public EntityType getEntityType() {
return entityType;
}
/**
* Set the entity builder class.
* @param builder The entity builder class.
*/
public void setEntityBuilder(Class<? extends EntityBuilder> builder) {
entityBuilder = builder;
}
/**
* Get the entity builder class.
* @return The entity builder class.
*/
public Class<? extends EntityBuilder> getEntityBuilder() {
return entityBuilder;
}
/**
* Instantiate a new entity builder.
* @return A new entity builder.
*/
public EntityBuilder newEntityBuilder() {
if (entityBuilderCtor == null || !entityBuilderCtor.getDeclaringClass().equals(entityBuilder)) {
try {
entityBuilderCtor = entityBuilder.getConstructor(EntityType.class);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("cannot find suitable constructor for " + entityBuilder);
}
}
try {
return entityBuilderCtor.newInstance(entityType);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("could not instantiate entity builder", e);
}
}
/**
* Add a column.
* @param attr The attribute to add as a column, or `null` to skip the next column.
* @throws IllegalStateException if non-labeled columns have already been specified.
*/
public void addColumn(@Nullable TypedName<?> attr) {
if (columns == null) {
Preconditions.checkState(labeledColumns == null, "mixed labeled and unlabeled columns");
columns = new ArrayList<>();
}
columns.add(attr);
}
/**
* Add columns to a format.
* @param columns The columns to add.
*/
public void addColumns(TypedName<?>... columns) {
for (TypedName<?> col: columns) {
addColumn(col);
}
}
/**
* Add a column.
* @param label The header label.
* @param attr The attribute to add as a column, or `null` to skip the next column.
* @throws IllegalStateException if non-labeled columns have already been specified.
*/
public void addColumn(String label, @Nullable TypedName<?> attr) {
if (labeledColumns == null) {
Preconditions.checkState(columns == null, "mixed labeled and unlabeled columns");
labeledColumns = new LinkedHashMap<>();
}
labeledColumns.put(label, attr);
}
@Override
public LineEntityParser makeParser(List<String> header) {
assert header.size() == getHeaderLines();
if (usesHeader() && labeledColumns != null) {
assert header.size() == 1;
List<TypedName<?>> cols = new ArrayList<>();
StrTokenizer tok = new StrTokenizer(header.get(0), delimiter);
while (tok.hasNext()) {
String label = tok.next();
cols.add(labeledColumns.get(label));
}
return new OrderedParser(cols, tok);
} else {
return new OrderedParser(columns, new StrTokenizer("", delimiter));
}
}
private class OrderedParser extends LineEntityParser {
int lineNo = 0;
StrTokenizer tokenizer;
List<TypedName<?>> fileColumns;
public OrderedParser(List<TypedName<?>> columns, StrTokenizer tok) {
fileColumns = columns;
tokenizer = tok;
}
@Override
public Entity parse(String line) {
tokenizer.reset(line);
lineNo += 1;
EntityBuilder builder = newEntityBuilder()
.setId(lineNo);
// since ID is already set, a subsequent ID column will properly override
for (TypedName column: fileColumns) {
String value = tokenizer.nextToken();
if (value != null && column != null) {
builder.setAttribute(column, column.parseString(value));
}
}
return builder.build();
}
}
}
|
Improve delimited format addColumn docs
|
lenskit-core/src/main/java/org/lenskit/data/dao/file/DelimitedColumnEntityFormat.java
|
Improve delimited format addColumn docs
|
|
Java
|
lgpl-2.1
|
17ae996aa6c5ac207290c681e44de5190aeda6d5
| 0
|
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
|
package org.intermine.objectstore.query;
/*
* Copyright (C) 2002-2012 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.List;
/**
* Operations used in building constraints.
*
* TODO: These should be enums so that they are switchable.
*
* @author Mark Woodbridge
* @author Matthew Wakeling
*/
public final class ConstraintOp
{
private static List<ConstraintOp> values = new ArrayList<ConstraintOp>();
private final String name;
/** Require that the two arguments are equal, regardless of case for strings */
public static final ConstraintOp EQUALS = new ConstraintOp("=");
/** Require that the two arguments are exactly equal */
public static final ConstraintOp EXACT_MATCH = new ConstraintOp("==");
/** Require that the two arguments are not equal, ignoring case for strings */
public static final ConstraintOp NOT_EQUALS = new ConstraintOp("!=");
/** Require that the two arguments are not equal */
public static final ConstraintOp STRICT_NOT_EQUALS = new ConstraintOp("!==");
/** Require that the first argument is less than the second */
public static final ConstraintOp LESS_THAN = new ConstraintOp("<");
/** Require that the first argument is less than or equal to the second */
public static final ConstraintOp LESS_THAN_EQUALS = new ConstraintOp("<=");
/** Require that the first argument is greater than the second */
public static final ConstraintOp GREATER_THAN = new ConstraintOp(">");
/** Require that the first argument is greater than or equal to the second */
public static final ConstraintOp GREATER_THAN_EQUALS = new ConstraintOp(">=");
/** Require that the two arguments match */
public static final ConstraintOp MATCHES = new ConstraintOp("LIKE");
/** Require that the two arguments do not match */
public static final ConstraintOp DOES_NOT_MATCH = new ConstraintOp("NOT LIKE");
/** Require that the argument is null */
public static final ConstraintOp IS_NULL = new ConstraintOp("IS NULL");
/** Synonym for IS NULL **/
public static final ConstraintOp IS_EMPTY = IS_NULL;
/** Require that the argument is not null */
public static final ConstraintOp IS_NOT_NULL = new ConstraintOp("IS NOT NULL");
/** Synonym for IS NOT NULL **/
public static final ConstraintOp IS_NOT_EMPTY = IS_NOT_NULL;
/** Require that the first argument contains the second */
public static final ConstraintOp CONTAINS = new ConstraintOp("CONTAINS");
/** Require that the first argument does not contain the second */
public static final ConstraintOp DOES_NOT_CONTAIN = new ConstraintOp("DOES NOT CONTAIN");
/** Require that the first argument is IN the second */
public static final ConstraintOp IN = new ConstraintOp("IN");
/** Require that the first argument is NOT IN the second */
public static final ConstraintOp NOT_IN = new ConstraintOp("NOT IN");
/** Subquery exists */
public static final ConstraintOp EXISTS = CONTAINS;
/** Subquery does not exist */
public static final ConstraintOp DOES_NOT_EXIST = DOES_NOT_CONTAIN;
/** Combine constraints with the AND operation */
public static final ConstraintOp AND = new ConstraintOp("AND");
/** Combine constraints with the OR operation */
public static final ConstraintOp OR = new ConstraintOp("OR");
/** Combine constraints with the NAND operation */
public static final ConstraintOp NAND = new ConstraintOp("NAND");
/** Combine constraints with the NOR operation */
public static final ConstraintOp NOR = new ConstraintOp("NOR");
/** Special operation indicating a bag upload step should be used, for the webapp only. */
public static final ConstraintOp LOOKUP = new ConstraintOp("LOOKUP");
/** Require that a range overlaps another range */
public static final ConstraintOp OVERLAPS = new ConstraintOp("OVERLAPS");
/** Require that a range does not overlap another range */
public static final ConstraintOp DOES_NOT_OVERLAP = new ConstraintOp("DOES NOT OVERLAP");
/** Require that the first argument is one of a list a values */
public static final ConstraintOp ONE_OF = new ConstraintOp("ONE OF");
/** Require that the first argument is not one of a list of values */
public static final ConstraintOp NONE_OF = new ConstraintOp("NONE OF");
/** Require that the first argument lie entirely within the second. **/
public static final ConstraintOp WITHIN = new ConstraintOp("WITHIN");
/** Require that some part of the first argument lie outside the second. **/
public static final ConstraintOp OUTSIDE = new ConstraintOp("OUTSIDE");
/** Require that the first argument be of the type named by the right argument **/
public static final ConstraintOp ISA = new ConstraintOp("ISA");
/** Require that the first argument be of the type named by the right argument **/
public static final ConstraintOp ISNT = new ConstraintOp("ISNT");
/** Require that the left argument has at least one of the right argument. **/
public static final ConstraintOp HAS = new ConstraintOp("HAS");
/** Require that the left argument does not have any of the right argument. **/
public static final ConstraintOp DOES_NOT_HAVE = new ConstraintOp("DOES NOT HAVE");
private ConstraintOp(String name) {
this.name = name;
values.add(this);
}
/**
* Get the String representation of this ConstraintOp
* @return a String
*/
@Override
public String toString() {
return name;
}
/**
* Get an index for this ConstraintOp
* TODO: expunge this horror. Webapp code should NOT BE IN THE OBJECTSTORE!
* (Only for use in webapp)
* @return the index
*/
public Integer getIndex() {
return new Integer(values.indexOf(this));
}
/**
* Convert an index to a ConstraintOp
* (Only for use in webapp)
* @param index the index
* @return the ConstraintOp
*/
public static ConstraintOp getOpForIndex(Integer index) {
return values.get(index.intValue());
}
/**
* Get the the internal list of ConstraintOps
* (Only for use in webapp)
* @return the List of ConstraintOps
*/
public static List<ConstraintOp> getValues() {
return values;
}
/**
* Get the negated op
*
* @return the negated op
*/
public ConstraintOp negate() {
if (this == EQUALS) {
return NOT_EQUALS;
} else if (this == EXACT_MATCH) {
return STRICT_NOT_EQUALS;
} else if (this == NOT_EQUALS) {
return EQUALS;
} else if (this == STRICT_NOT_EQUALS) {
return EXACT_MATCH;
} else if (this == LESS_THAN) {
return GREATER_THAN_EQUALS;
} else if (this == GREATER_THAN_EQUALS) {
return LESS_THAN;
} else if (this == GREATER_THAN) {
return LESS_THAN_EQUALS;
} else if (this == LESS_THAN_EQUALS) {
return GREATER_THAN;
} else if (this == MATCHES) {
return DOES_NOT_MATCH;
} else if (this == DOES_NOT_MATCH) {
return MATCHES;
} else if (this == IS_NULL) {
return IS_NOT_NULL;
} else if (this == IS_NOT_NULL) {
return IS_NULL;
} else if (this == CONTAINS) {
return DOES_NOT_CONTAIN;
} else if (this == DOES_NOT_CONTAIN) {
return CONTAINS;
} else if (this == IN) {
return NOT_IN;
} else if (this == NOT_IN) {
return IN;
} else if (this == AND) {
return NAND;
} else if (this == NAND) {
return AND;
} else if (this == OR) {
return NOR;
} else if (this == NOR) {
return OR;
} else if (this == ONE_OF) {
return NONE_OF;
} else if (this == NONE_OF) {
return ONE_OF;
} else if (this == WITHIN) {
return OUTSIDE;
} else if (this == OUTSIDE) {
return WITHIN;
} else if (this == ISA) {
return ISNT;
} else if (this == ISNT) {
return ISA;
}
throw new IllegalArgumentException("Unknown op");
}
/**
* Get ConstraintOp for given operation code.
* @param operationCode operation as string
* @return ConstraintOp if operation code is valid else null
*/
public static ConstraintOp getConstraintOp(String operationCode) {
if (operationCode == null) {
return null;
}
String opCode = operationCode.trim().toUpperCase();
for (ConstraintOp op : values) {
if (op.getName().equalsIgnoreCase(opCode)) {
return op;
}
}
return null;
}
private String getName() {
return name;
}
}
|
intermine/objectstore/main/src/org/intermine/objectstore/query/ConstraintOp.java
|
package org.intermine.objectstore.query;
/*
* Copyright (C) 2002-2012 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.List;
/**
* Operations used in building constraints.
*
* @author Mark Woodbridge
* @author Matthew Wakeling
*/
public final class ConstraintOp
{
private static List<ConstraintOp> values = new ArrayList<ConstraintOp>();
private final String name;
/** Require that the two arguments are equal, regardless of case for strings */
public static final ConstraintOp EQUALS = new ConstraintOp("=");
/** Require that the two arguments are exactly equal */
public static final ConstraintOp EXACT_MATCH = new ConstraintOp("==");
/** Require that the two arguments are not equal, ignoring case for strings */
public static final ConstraintOp NOT_EQUALS = new ConstraintOp("!=");
/** Require that the two arguments are not equal */
public static final ConstraintOp STRICT_NOT_EQUALS = new ConstraintOp("!==");
/** Require that the first argument is less than the second */
public static final ConstraintOp LESS_THAN = new ConstraintOp("<");
/** Require that the first argument is less than or equal to the second */
public static final ConstraintOp LESS_THAN_EQUALS = new ConstraintOp("<=");
/** Require that the first argument is greater than the second */
public static final ConstraintOp GREATER_THAN = new ConstraintOp(">");
/** Require that the first argument is greater than or equal to the second */
public static final ConstraintOp GREATER_THAN_EQUALS = new ConstraintOp(">=");
/** Require that the two arguments match */
public static final ConstraintOp MATCHES = new ConstraintOp("LIKE");
/** Require that the two arguments do not match */
public static final ConstraintOp DOES_NOT_MATCH = new ConstraintOp("NOT LIKE");
/** Require that the argument is null */
public static final ConstraintOp IS_NULL = new ConstraintOp("IS NULL");
/** Synonym for IS NULL **/
public static final ConstraintOp IS_EMPTY = IS_NULL;
/** Require that the argument is not null */
public static final ConstraintOp IS_NOT_NULL = new ConstraintOp("IS NOT NULL");
/** Synonym for IS NOT NULL **/
public static final ConstraintOp IS_NOT_EMPTY = IS_NOT_NULL;
/** Require that the first argument contains the second */
public static final ConstraintOp CONTAINS = new ConstraintOp("CONTAINS");
/** Require that the first argument does not contain the second */
public static final ConstraintOp DOES_NOT_CONTAIN = new ConstraintOp("DOES NOT CONTAIN");
/** Require that the first argument is IN the second */
public static final ConstraintOp IN = new ConstraintOp("IN");
/** Require that the first argument is NOT IN the second */
public static final ConstraintOp NOT_IN = new ConstraintOp("NOT IN");
/** Subquery exists */
public static final ConstraintOp EXISTS = CONTAINS;
/** Subquery does not exist */
public static final ConstraintOp DOES_NOT_EXIST = DOES_NOT_CONTAIN;
/** Combine constraints with the AND operation */
public static final ConstraintOp AND = new ConstraintOp("AND");
/** Combine constraints with the OR operation */
public static final ConstraintOp OR = new ConstraintOp("OR");
/** Combine constraints with the NAND operation */
public static final ConstraintOp NAND = new ConstraintOp("NAND");
/** Combine constraints with the NOR operation */
public static final ConstraintOp NOR = new ConstraintOp("NOR");
/** Special operation indicating a bag upload step should be used, for the webapp only. */
public static final ConstraintOp LOOKUP = new ConstraintOp("LOOKUP");
/** Require that a range overlaps another range */
public static final ConstraintOp OVERLAPS = new ConstraintOp("OVERLAPS");
/** Require that a range does not overlap another range */
public static final ConstraintOp DOES_NOT_OVERLAP = new ConstraintOp("DOES NOT OVERLAP");
/** Require that the first argument is one of a list a values */
public static final ConstraintOp ONE_OF = new ConstraintOp("ONE OF");
/** Require that the first argument is not one of a list of values */
public static final ConstraintOp NONE_OF = new ConstraintOp("NONE OF");
/** Require that the first argument lie entirely within the second. **/
public static final ConstraintOp WITHIN = new ConstraintOp("WITHIN");
/** Require that some part of the first argument lie outside the second. **/
public static final ConstraintOp OUTSIDE = new ConstraintOp("OUTSIDE");
/** Require that the first argument be of the type named by the right argument **/
public static final ConstraintOp ISA = new ConstraintOp("ISA");
/** Require that the first argument be of the type named by the right argument **/
public static final ConstraintOp ISNT = new ConstraintOp("ISNT");
/** Require that the left argument has at least one of the right argument. **/
public static final ConstraintOp HAS = new ConstraintOp("HAS");
/** Require that the left argument does not have any of the right argument. **/
public static final ConstraintOp DOES_NOT_HAVE = new ConstraintOp("DOES NOT HAVE");
private ConstraintOp(String name) {
this.name = name;
values.add(this);
}
/**
* Get the String representation of this ConstraintOp
* @return a String
*/
@Override
public String toString() {
return name;
}
/**
* Get an index for this ConstraintOp
* (Only for use in webapp)
* @return the index
*/
public Integer getIndex() {
return new Integer(values.indexOf(this));
}
/**
* Convert an index to a ConstraintOp
* (Only for use in webapp)
* @param index the index
* @return the ConstraintOp
*/
public static ConstraintOp getOpForIndex(Integer index) {
return values.get(index.intValue());
}
/**
* Get the the internal list of ConstraintOps
* (Only for use in webapp)
* @return the List of ConstraintOps
*/
public static List<ConstraintOp> getValues() {
return values;
}
/**
* Get the negated op
*
* @return the negated op
*/
public ConstraintOp negate() {
if (this == EQUALS) {
return NOT_EQUALS;
} else if (this == EXACT_MATCH) {
return STRICT_NOT_EQUALS;
} else if (this == NOT_EQUALS) {
return EQUALS;
} else if (this == STRICT_NOT_EQUALS) {
return EXACT_MATCH;
} else if (this == LESS_THAN) {
return GREATER_THAN_EQUALS;
} else if (this == GREATER_THAN_EQUALS) {
return LESS_THAN;
} else if (this == GREATER_THAN) {
return LESS_THAN_EQUALS;
} else if (this == LESS_THAN_EQUALS) {
return GREATER_THAN;
} else if (this == MATCHES) {
return DOES_NOT_MATCH;
} else if (this == DOES_NOT_MATCH) {
return MATCHES;
} else if (this == IS_NULL) {
return IS_NOT_NULL;
} else if (this == IS_NOT_NULL) {
return IS_NULL;
} else if (this == CONTAINS) {
return DOES_NOT_CONTAIN;
} else if (this == DOES_NOT_CONTAIN) {
return CONTAINS;
} else if (this == IN) {
return NOT_IN;
} else if (this == NOT_IN) {
return IN;
} else if (this == AND) {
return NAND;
} else if (this == NAND) {
return AND;
} else if (this == OR) {
return NOR;
} else if (this == NOR) {
return OR;
} else if (this == ONE_OF) {
return NONE_OF;
} else if (this == NONE_OF) {
return ONE_OF;
} else if (this == WITHIN) {
return OUTSIDE;
} else if (this == OUTSIDE) {
return WITHIN;
} else if (this == ISA) {
return ISNT;
} else if (this == ISNT) {
return ISA;
}
throw new IllegalArgumentException("Unknown op");
}
/**
* Get ConstraintOp for given operation code.
* @param operationCode operation as string
* @return ConstraintOp if operation code is valid else null
*/
public static ConstraintOp getConstraintOp(String operationCode) {
if (operationCode == null) {
return null;
}
String opCode = operationCode.trim().toUpperCase();
for (ConstraintOp op : values) {
if (op.getName().equalsIgnoreCase(opCode)) {
return op;
}
}
return null;
}
private String getName() {
return name;
}
}
|
Added some notes to the future about changes needed in this class
Former-commit-id: fd10174838809cd1c6674042e43ed347220b969a
|
intermine/objectstore/main/src/org/intermine/objectstore/query/ConstraintOp.java
|
Added some notes to the future about changes needed in this class
|
|
Java
|
apache-2.0
|
8edd3ec573649443c2dc0747cf3cddc1608d43d6
| 0
|
apache/commons-codec,apache/commons-codec,apache/commons-codec
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec.net;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.nio.charset.Charset;
import java.util.Arrays;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.EncoderException;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Percent codec test cases.
*/
public class PercentCodecTest {
@Test
public void testBasicEncodeDecode() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = "abcdABCD";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
byte[] decoded = percentCodec.decode(encoded);
final String decodedS = new String(decoded, "UTF-8");
assertEquals("Basic PercentCodec encoding test", input, encodedS);
assertEquals("Basic PercentCodec decoding test", input, decodedS);
}
@Test
@Ignore
public void testBasicSpace() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = " ";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
Assert.assertArrayEquals("%20".getBytes(Charset.forName("UTF-8")), encoded);
}
@Test
public void testConfigurablePercentEncoder() throws Exception {
final String input = "abc123_-.*\u03B1\u03B2";
PercentCodec percentCodec = new PercentCodec("abcdef".getBytes("UTF-8"), false);
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
assertEquals("Configurable PercentCodec encoding test", "%61%62%63123_-.*%CE%B1%CE%B2", encodedS);
final byte[] decoded = percentCodec.decode(encoded);
assertEquals("Configurable PercentCodec decoding test", new String(decoded, "UTF-8"), input);
}
@Test
public void testDecodeInvalidEncodedResultDecoding() throws Exception {
String inputS = "\u03B1\u03B2";
PercentCodec percentCodec = new PercentCodec();
byte[] encoded = percentCodec.encode(inputS.getBytes("UTF-8"));
try {
percentCodec.decode(Arrays.copyOf(encoded, encoded.length-1)); //exclude one byte
} catch (Exception e) {
assertTrue(DecoderException.class.isInstance(e) &&
ArrayIndexOutOfBoundsException.class.isInstance(e.getCause()));
}
}
@Test
public void testDecodeNullObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
assertEquals(percentCodec.decode((Object) null), null);
}
@Test(expected = DecoderException.class)
public void testDecodeUnsupportedObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
percentCodec.decode("test");
}
@Test
public void testEncodeNullObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
assertEquals(percentCodec.encode((Object) null), null);
}
@Test(expected = EncoderException.class)
public void testEncodeUnsupportedObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
percentCodec.encode("test");
}
@Test
public void testPercentEncoderDecoderWithNullOrEmptyInput() throws Exception {
PercentCodec percentCodec = new PercentCodec(null, true);
assertEquals("Null input value encoding test", percentCodec.encode(null), null);
assertEquals("Null input value decoding test", percentCodec.decode(null), null);
byte[] emptyInput = "".getBytes("UTF-8");
assertEquals("Empty input value encoding test", percentCodec.encode(emptyInput), emptyInput);
assertTrue("Empty input value decoding test", Arrays.equals(percentCodec.decode(emptyInput), emptyInput));
}
@Test
public void testPercentEncoderDecoderWithPlusForSpace() throws Exception {
final String input = "a b c d";
PercentCodec percentCodec = new PercentCodec(null, true);
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
assertEquals("PercentCodec plus for space encoding test", "a+b+c+d", encodedS);
byte[] decode = percentCodec.decode(encoded);
assertEquals("PercentCodec plus for space decoding test", new String(decode, "UTF-8"), input);
}
@Test
public void testSafeCharEncodeDecodeObject() throws Exception {
PercentCodec percentCodec = new PercentCodec(null, true);
final String input = "abc123_-.*";
Object encoded = percentCodec.encode((Object) input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String((byte[]) encoded, "UTF-8");
Object decoded = percentCodec.decode(encoded);
final String decodedS = new String((byte[]) decoded, "UTF-8");
assertEquals("Basic PercentCodec safe char encoding test", input, encodedS);
assertEquals("Basic PercentCodec safe char decoding test", input, decodedS);
}
@Test
public void testUnsafeCharEncodeDecode() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = "\u03B1\u03B2\u03B3\u03B4\u03B5\u03B6% ";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
byte[] decoded = percentCodec.decode(encoded);
final String decodedS = new String(decoded, "UTF-8");
assertEquals("Basic PercentCodec unsafe char encoding test", "%CE%B1%CE%B2%CE%B3%CE%B4%CE%B5%CE%B6%25 ", encodedS);
assertEquals("Basic PercentCodec unsafe char decoding test", input, decodedS);
}
}
|
src/test/java/org/apache/commons/codec/net/PercentCodecTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec.net;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.nio.charset.Charset;
import java.util.Arrays;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.EncoderException;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Percent codec test cases.
*/
public class PercentCodecTest {
@Test
public void testBasicEncodeDecode() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = "abcdABCD";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
byte[] decoded = percentCodec.decode(encoded);
final String decodedS = new String(decoded, "UTF-8");
assertEquals("Basic PercentCodec encoding test", input, encodedS);
assertEquals("Basic PercentCodec decoding test", input, decodedS);
}
@Test
@Ignore
public void testBasicSpace() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = " ";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
Assert.assertArrayEquals("%20".getBytes(Charset.forName("UTF-8")), encoded);
}
@Test
public void testSafeCharEncodeDecodeObject() throws Exception {
PercentCodec percentCodec = new PercentCodec(null, true);
final String input = "abc123_-.*";
Object encoded = percentCodec.encode((Object) input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String((byte[]) encoded, "UTF-8");
Object decoded = percentCodec.decode(encoded);
final String decodedS = new String((byte[]) decoded, "UTF-8");
assertEquals("Basic PercentCodec safe char encoding test", input, encodedS);
assertEquals("Basic PercentCodec safe char decoding test", input, decodedS);
}
@Test
public void testUnsafeCharEncodeDecode() throws Exception {
PercentCodec percentCodec = new PercentCodec();
final String input = "\u03B1\u03B2\u03B3\u03B4\u03B5\u03B6% ";
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
byte[] decoded = percentCodec.decode(encoded);
final String decodedS = new String(decoded, "UTF-8");
assertEquals("Basic PercentCodec unsafe char encoding test", "%CE%B1%CE%B2%CE%B3%CE%B4%CE%B5%CE%B6%25 ", encodedS);
assertEquals("Basic PercentCodec unsafe char decoding test", input, decodedS);
}
@Test
public void testConfigurablePercentEncoder() throws Exception {
final String input = "abc123_-.*\u03B1\u03B2";
PercentCodec percentCodec = new PercentCodec("abcdef".getBytes("UTF-8"), false);
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
assertEquals("Configurable PercentCodec encoding test", "%61%62%63123_-.*%CE%B1%CE%B2", encodedS);
final byte[] decoded = percentCodec.decode(encoded);
assertEquals("Configurable PercentCodec decoding test", new String(decoded, "UTF-8"), input);
}
@Test
public void testPercentEncoderDecoderWithNullOrEmptyInput() throws Exception {
PercentCodec percentCodec = new PercentCodec(null, true);
assertEquals("Null input value encoding test", percentCodec.encode(null), null);
assertEquals("Null input value decoding test", percentCodec.decode(null), null);
byte[] emptyInput = "".getBytes("UTF-8");
assertEquals("Empty input value encoding test", percentCodec.encode(emptyInput), emptyInput);
assertTrue("Empty input value decoding test", Arrays.equals(percentCodec.decode(emptyInput), emptyInput));
}
@Test
public void testPercentEncoderDecoderWithPlusForSpace() throws Exception {
final String input = "a b c d";
PercentCodec percentCodec = new PercentCodec(null, true);
byte[] encoded = percentCodec.encode(input.getBytes(Charset.forName("UTF-8")));
final String encodedS = new String(encoded, "UTF-8");
assertEquals("PercentCodec plus for space encoding test", "a+b+c+d", encodedS);
byte[] decode = percentCodec.decode(encoded);
assertEquals("PercentCodec plus for space decoding test", new String(decode, "UTF-8"), input);
}
@Test(expected = EncoderException.class)
public void testEncodeUnsupportedObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
percentCodec.encode("test");
}
@Test
public void testEncodeNullObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
assertEquals(percentCodec.encode((Object) null), null);
}
@Test(expected = DecoderException.class)
public void testDecodeUnsupportedObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
percentCodec.decode("test");
}
@Test
public void testDecodeNullObject() throws Exception {
PercentCodec percentCodec = new PercentCodec();
assertEquals(percentCodec.decode((Object) null), null);
}
@Test
public void testDecodeInvalidEncodedResultDecoding() throws Exception {
String inputS = "\u03B1\u03B2";
PercentCodec percentCodec = new PercentCodec();
byte[] encoded = percentCodec.encode(inputS.getBytes("UTF-8"));
try {
percentCodec.decode(Arrays.copyOf(encoded, encoded.length-1)); //exclude one byte
} catch (Exception e) {
assertTrue(DecoderException.class.isInstance(e) &&
ArrayIndexOutOfBoundsException.class.isInstance(e.getCause()));
}
}
}
|
[CODEC-240] Add Percent-Encoding Codec (described in RFC3986 and RFC7578). Sort members.
git-svn-id: cde5a1597f50f50feab6f72941f6b219c34291a1@1814516 13f79535-47bb-0310-9956-ffa450edef68
|
src/test/java/org/apache/commons/codec/net/PercentCodecTest.java
|
[CODEC-240] Add Percent-Encoding Codec (described in RFC3986 and RFC7578). Sort members.
|
|
Java
|
apache-2.0
|
1111e897412520ed0a2834d7f5d77b6a04df6625
| 0
|
sanyaade-g2g-repos/orientdb,joansmith/orientdb,orientechnologies/orientdb,rprabhat/orientdb,mbhulin/orientdb,mbhulin/orientdb,alonsod86/orientdb,mmacfadden/orientdb,alonsod86/orientdb,wyzssw/orientdb,intfrr/orientdb,cstamas/orientdb,rprabhat/orientdb,cstamas/orientdb,wouterv/orientdb,mmacfadden/orientdb,cstamas/orientdb,joansmith/orientdb,sanyaade-g2g-repos/orientdb,allanmoso/orientdb,intfrr/orientdb,wyzssw/orientdb,rprabhat/orientdb,rprabhat/orientdb,tempbottle/orientdb,alonsod86/orientdb,allanmoso/orientdb,allanmoso/orientdb,tempbottle/orientdb,wouterv/orientdb,cstamas/orientdb,joansmith/orientdb,tempbottle/orientdb,giastfader/orientdb,sanyaade-g2g-repos/orientdb,orientechnologies/orientdb,giastfader/orientdb,giastfader/orientdb,giastfader/orientdb,joansmith/orientdb,wyzssw/orientdb,intfrr/orientdb,orientechnologies/orientdb,mbhulin/orientdb,tempbottle/orientdb,wouterv/orientdb,alonsod86/orientdb,mmacfadden/orientdb,wyzssw/orientdb,wouterv/orientdb,sanyaade-g2g-repos/orientdb,intfrr/orientdb,allanmoso/orientdb,orientechnologies/orientdb,mmacfadden/orientdb,mbhulin/orientdb
|
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.common.comparator;
import java.util.Comparator;
/**
* Comparator that calls {@link Comparable#compareTo(Object)} methods for getting results for all {@link Comparable} types.
* Otherwise result of {@link Comparator} that returned from {@link OComparatorFactory} will be used.
*
* The special case is null values. Null is treated as smallest value against other values. If both arguments are null they are
* treated as equal.
*
* @author Andrey Lomakin
* @since 03.07.12
*/
public class ODefaultComparator implements Comparator<Object> {
public static final ODefaultComparator INSTANCE = new ODefaultComparator();
@SuppressWarnings("unchecked")
public int compare(final Object objectOne, final Object objectTwo) {
if (objectOne == null) {
if (objectTwo == null)
return 0;
else
return -1;
} else if (objectTwo == null)
return 1;
if (objectOne == objectTwo)
// FAST COMPARISON
return 0;
if (objectOne instanceof Comparable)
return ((Comparable<Object>) objectOne).compareTo(objectTwo);
final Comparator<?> comparator = OComparatorFactory.INSTANCE.getComparator(objectOne.getClass());
if (comparator != null)
return ((Comparator<Object>) comparator).compare(objectOne, objectTwo);
throw new IllegalStateException("Object of class" + objectOne.getClass().getName() + " can not be compared");
}
}
|
core/src/main/java/com/orientechnologies/common/comparator/ODefaultComparator.java
|
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.common.comparator;
import java.util.Comparator;
/**
* Comparator that calls {@link Comparable#compareTo(Object)} methods for getting results for all {@link Comparable} types.
* Otherwise result of {@link Comparator} that returned from {@link OComparatorFactory} will be used.
*
* The special case is null values. Null is treated as smallest value against other values. If both arguments are null they are
* treated as equal.
*
* @author Andrey Lomakin
* @since 03.07.12
*/
public class ODefaultComparator implements Comparator<Object> {
public static final ODefaultComparator INSTANCE = new ODefaultComparator();
@SuppressWarnings("unchecked")
public int compare(final Object objectOne, final Object objectTwo) {
if (objectOne == null) {
if (objectTwo == null)
return 0;
else
return -1;
} else if (objectTwo == null)
return 1;
if (objectOne instanceof Comparable)
return ((Comparable<Object>) objectOne).compareTo(objectTwo);
final Comparator<?> comparator = OComparatorFactory.INSTANCE.getComparator(objectOne.getClass());
if (comparator != null)
return ((Comparator<Object>) comparator).compare(objectOne, objectTwo);
throw new IllegalStateException("Object of class" + objectOne.getClass().getName() + " can not be compared");
}
}
|
Minor: small optimization
|
core/src/main/java/com/orientechnologies/common/comparator/ODefaultComparator.java
|
Minor: small optimization
|
|
Java
|
apache-2.0
|
4edb2b36807aec2ee7b3fae6a01146ee0faaea3a
| 0
|
jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2
|
/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.dao.index;
import foam.core.FObject;
import foam.dao.Sink;
import foam.mlang.order.Comparator;
import foam.mlang.predicate.Predicate;
public class ProxyIndex
implements Index
{
protected Index delegate_;
public ProxyIndex(Index index) {
setDelegate(index);
}
public Index getDelegate() {
return delegate_;
}
public void setDelegate(foam.dao.index.Index val) {
delegate_ = val;
}
@Override
public void onAdd(Sink sink) {
getDelegate().onAdd(sink);
}
@Override
public Object put(Object state, FObject value) {
return getDelegate().put(state, value);
}
@Override
public Object remove(Object state, FObject value) {
return getDelegate().remove(state, value);
}
@Override
public Object removeAll() {
return getDelegate().removeAll();
}
@Override
public FindPlan planFind(Object state, Object key) {
return getDelegate().planFind(state, key);
}
@Override
public SelectPlan planSelect(Object state, Sink sink, long skip, long limit, Comparator order, Predicate predicate) {
return getDelegate().planSelect(state, sink, skip, limit, order, predicate);
}
@Override
public long size(Object state) {
return getDelegate().size(state);
}
}
|
src/foam/dao/index/ProxyIndex.java
|
/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.dao.index;
import foam.core.FObject;
import foam.dao.Sink;
import foam.mlang.order.Comparator;
import foam.mlang.predicate.Predicate;
public class ProxyIndex
implements Index
{
protected Index delegate_;
private boolean delegateIsSet_ = false;
public ProxyIndex(Index index) {
setDelegate(index);
}
public Index getDelegate() {
if ( ! delegateIsSet_ ) {
return null;
}
return delegate_;
}
public void setDelegate(foam.dao.index.Index val) {
delegate_ = val;
delegateIsSet_ = true;
}
@Override
public void onAdd(Sink sink) {
getDelegate().onAdd(sink);
}
@Override
public Object put(Object state, FObject value) {
return getDelegate().put(state, value);
}
@Override
public Object remove(Object state, FObject value) {
return getDelegate().remove(state, value);
}
@Override
public Object removeAll() {
return getDelegate().removeAll();
}
@Override
public FindPlan planFind(Object state, Object key) {
return getDelegate().planFind(state, key);
}
@Override
public SelectPlan planSelect(Object state, Sink sink, long skip, long limit, Comparator order, Predicate predicate) {
return getDelegate().planSelect(state, sink, skip, limit, order, predicate);
}
@Override
public long size(Object state) {
return getDelegate().size(state);
}
}
|
Removed delegateIsSet
|
src/foam/dao/index/ProxyIndex.java
|
Removed delegateIsSet
|
|
Java
|
apache-2.0
|
f272dcadb98ff0f911d8b960c61c71468a9c8de9
| 0
|
mchaston/OakFunds,mchaston/OakFunds,mchaston/OakFunds,mchaston/OakFunds,mchaston/OakFunds,mchaston/OakFunds,mchaston/OakFunds
|
/*
* Copyright 2014 Miles Chaston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.chaston.oakfunds.xsrf;
import com.google.appengine.api.utils.SystemProperty;
import com.google.inject.Inject;
import org.chaston.oakfunds.security.AuthenticationException;
import org.chaston.oakfunds.security.UserAuthenticator;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.security.GeneralSecurityException;
import java.util.logging.Logger;
/**
* TODO(mchaston): write JavaDocs
*/
public class XsrfUtil {
private static final Logger LOG = Logger.getLogger(XsrfUtil.class.getName());
private final UserAuthenticator userAuthenticator;
private final XsrfSigner xsrfSigner;
@Inject
XsrfUtil(UserAuthenticator userAuthenticator, XsrfSigner xsrfSigner) {
this.userAuthenticator = userAuthenticator;
this.xsrfSigner = xsrfSigner;
}
void addXsrfToken(HttpServletResponse response)
throws IOException, ServletException {
PrintWriter writer = response.getWriter();
if (userAuthenticator.isUserLoggedIn()) {
// When logged in, add the XSRF cookie
StringBuilder cookie = new StringBuilder();
try {
cookie.append("XSRF-TOKEN=").append(xsrfSigner.sign(createMaterial()));
} catch (GeneralSecurityException e) {
throw new ServletException("Failure to create XSRF signature.");
}
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Production) {
// Only force secure on production as development is not on HTTPS.
cookie.append(";secure");
}
writer.println("document.cookie=\"" + cookie + "\";");
}
}
public boolean verifyXsrfToken(HttpServletRequest request) throws ServletException {
return verifyXsrfToken(request.getHeader("X-XSRF-TOKEN"));
}
public boolean verifyXsrfToken(String token) throws ServletException {
if (token == null || token.isEmpty()) {
return false;
}
try {
return xsrfSigner.verify(createMaterial(), token);
} catch (GeneralSecurityException e) {
LOG.warning("Unable to verify XSRF token.");
return false;
}
}
private String createMaterial() throws ServletException {
try {
return userAuthenticator.getAuthenticatedUser().getIdentifier();
} catch (AuthenticationException e) {
throw new ServletException("Failed to authenticate user", e);
}
}
}
|
src/org/chaston/oakfunds/xsrf/XsrfUtil.java
|
/*
* Copyright 2014 Miles Chaston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.chaston.oakfunds.xsrf;
import com.google.appengine.api.utils.SystemProperty;
import com.google.inject.Inject;
import org.chaston.oakfunds.security.AuthenticationException;
import org.chaston.oakfunds.security.UserAuthenticator;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.security.GeneralSecurityException;
import java.util.logging.Logger;
/**
* TODO(mchaston): write JavaDocs
*/
public class XsrfUtil {
private static final Logger LOG = Logger.getLogger(XsrfUtil.class.getName());
private final UserAuthenticator userAuthenticator;
private final XsrfSigner xsrfSigner;
@Inject
XsrfUtil(UserAuthenticator userAuthenticator, XsrfSigner xsrfSigner) {
this.userAuthenticator = userAuthenticator;
this.xsrfSigner = xsrfSigner;
}
public void addXsrfToken(HttpServletResponse response)
throws IOException, ServletException {
PrintWriter writer = response.getWriter();
if (userAuthenticator.isUserLoggedIn()) {
// When logged in, add the XSRF cookie
StringBuilder cookie = new StringBuilder();
try {
cookie.append("XSRF-TOKEN=").append(xsrfSigner.sign(createMaterial()));
} catch (GeneralSecurityException e) {
throw new ServletException("Failure to create XSRF signature.");
}
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Production) {
// Only force secure on production as development is not on HTTPS.
cookie.append(";secure");
}
writer.println("document.cookie=\"" + cookie + "\";");
}
}
public boolean verifyXsrfToken(HttpServletRequest request) throws ServletException {
return verifyXsrfToken(request.getHeader("X-XSRF-TOKEN"));
}
public boolean verifyXsrfToken(String token) throws ServletException {
if (token == null || token.isEmpty()) {
return false;
}
try {
return xsrfSigner.verify(createMaterial(), token);
} catch (GeneralSecurityException e) {
LOG.warning("Unable to verify XSRF token.");
return false;
}
}
private String createMaterial() throws ServletException {
try {
return userAuthenticator.getAuthenticatedUser().getIdentifier();
} catch (AuthenticationException e) {
throw new ServletException("Failed to authenticate user", e);
}
}
}
|
Tweaked the XSRF utils.
|
src/org/chaston/oakfunds/xsrf/XsrfUtil.java
|
Tweaked the XSRF utils.
|
|
Java
|
apache-2.0
|
299a0c4a1768efbca2c61abe45152e8b80f7b69d
| 0
|
matrix-org/matrix-android-sdk,matrix-org/matrix-android-sdk,matrix-org/matrix-android-sdk
|
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk;
import android.content.Context;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.call.MXCallsManager;
import org.matrix.androidsdk.crypto.MXCrypto;
import org.matrix.androidsdk.crypto.MXCryptoConfig;
import org.matrix.androidsdk.data.DataRetriever;
import org.matrix.androidsdk.data.MyUser;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.RoomSummary;
import org.matrix.androidsdk.data.RoomTag;
import org.matrix.androidsdk.data.comparator.RoomComparatorWithTag;
import org.matrix.androidsdk.data.cryptostore.IMXCryptoStore;
import org.matrix.androidsdk.data.cryptostore.MXFileCryptoStore;
import org.matrix.androidsdk.data.metrics.MetricsListener;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.data.store.MXStoreListener;
import org.matrix.androidsdk.db.MXLatestChatMessageCache;
import org.matrix.androidsdk.db.MXMediasCache;
import org.matrix.androidsdk.groups.GroupsManager;
import org.matrix.androidsdk.network.NetworkConnectivityReceiver;
import org.matrix.androidsdk.rest.callback.ApiCallback;
import org.matrix.androidsdk.rest.callback.ApiFailureCallback;
import org.matrix.androidsdk.rest.callback.SimpleApiCallback;
import org.matrix.androidsdk.rest.client.AccountDataRestClient;
import org.matrix.androidsdk.rest.client.CallRestClient;
import org.matrix.androidsdk.rest.client.CryptoRestClient;
import org.matrix.androidsdk.rest.client.EventsRestClient;
import org.matrix.androidsdk.rest.client.FilterRestClient;
import org.matrix.androidsdk.rest.client.GroupsRestClient;
import org.matrix.androidsdk.rest.client.LoginRestClient;
import org.matrix.androidsdk.rest.client.MediaScanRestClient;
import org.matrix.androidsdk.rest.client.PresenceRestClient;
import org.matrix.androidsdk.rest.client.ProfileRestClient;
import org.matrix.androidsdk.rest.client.PushRulesRestClient;
import org.matrix.androidsdk.rest.client.PushersRestClient;
import org.matrix.androidsdk.rest.client.RoomsRestClient;
import org.matrix.androidsdk.rest.client.ThirdPidRestClient;
import org.matrix.androidsdk.rest.model.CreateRoomParams;
import org.matrix.androidsdk.rest.model.CreateRoomResponse;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.MatrixError;
import org.matrix.androidsdk.rest.model.ReceiptData;
import org.matrix.androidsdk.rest.model.RoomDirectoryVisibility;
import org.matrix.androidsdk.rest.model.RoomMember;
import org.matrix.androidsdk.rest.model.User;
import org.matrix.androidsdk.rest.model.Versions;
import org.matrix.androidsdk.rest.model.bingrules.BingRule;
import org.matrix.androidsdk.rest.model.filter.FilterBody;
import org.matrix.androidsdk.rest.model.filter.FilterResponse;
import org.matrix.androidsdk.rest.model.login.Credentials;
import org.matrix.androidsdk.rest.model.login.LoginFlow;
import org.matrix.androidsdk.rest.model.login.RegistrationFlowResponse;
import org.matrix.androidsdk.rest.model.message.MediaMessage;
import org.matrix.androidsdk.rest.model.message.Message;
import org.matrix.androidsdk.rest.model.pid.DeleteDeviceAuth;
import org.matrix.androidsdk.rest.model.pid.DeleteDeviceParams;
import org.matrix.androidsdk.rest.model.search.SearchResponse;
import org.matrix.androidsdk.rest.model.search.SearchUsersResponse;
import org.matrix.androidsdk.rest.model.sync.DevicesListResponse;
import org.matrix.androidsdk.rest.model.sync.RoomResponse;
import org.matrix.androidsdk.sync.DefaultEventsThreadListener;
import org.matrix.androidsdk.sync.EventsThread;
import org.matrix.androidsdk.sync.EventsThreadListener;
import org.matrix.androidsdk.util.BingRulesManager;
import org.matrix.androidsdk.util.ContentManager;
import org.matrix.androidsdk.util.ContentUtils;
import org.matrix.androidsdk.util.FilterUtil;
import org.matrix.androidsdk.util.JsonUtils;
import org.matrix.androidsdk.util.Log;
import org.matrix.androidsdk.util.UnsentEventsManager;
import org.matrix.androidsdk.util.VersionsUtil;
import org.matrix.olm.OlmManager;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Class that represents one user's session with a particular home server.
* There can potentially be multiple sessions for handling multiple accounts.
*/
public class MXSession {
private static final String LOG_TAG = MXSession.class.getSimpleName();
private DataRetriever mDataRetriever;
private MXDataHandler mDataHandler;
private EventsThread mEventsThread;
private final Credentials mCredentials;
// Api clients
private EventsRestClient mEventsRestClient;
private ProfileRestClient mProfileRestClient;
private PresenceRestClient mPresenceRestClient;
private RoomsRestClient mRoomsRestClient;
private final PushRulesRestClient mPushRulesRestClient;
private PushersRestClient mPushersRestClient;
private final ThirdPidRestClient mThirdPidRestClient;
private final CallRestClient mCallRestClient;
private final AccountDataRestClient mAccountDataRestClient;
private final CryptoRestClient mCryptoRestClient;
private final LoginRestClient mLoginRestClient;
private final GroupsRestClient mGroupsRestClient;
private final MediaScanRestClient mMediaScanRestClient;
private final FilterRestClient mFilterRestClient;
private ApiFailureCallback mFailureCallback;
private ContentManager mContentManager;
public MXCallsManager mCallsManager;
private MetricsListener mMetricsListener;
private Context mAppContent;
private NetworkConnectivityReceiver mNetworkConnectivityReceiver;
private UnsentEventsManager mUnsentEventsManager;
private MXLatestChatMessageCache mLatestChatMessageCache;
private MXMediasCache mMediasCache;
private BingRulesManager mBingRulesManager = null;
private boolean mIsAliveSession = true;
// online status
private boolean mIsOnline = false;
private int mSyncTimeout = 0;
private int mSyncDelay = 0;
private final HomeServerConnectionConfig mHsConfig;
// True if file encryption is enabled
private boolean mEnableFileEncryption;
// the application is launched from a notification
// so, mEventsThread.start might be not ready
private boolean mIsBgCatchupPending = false;
private FilterBody mCurrentFilter = new FilterBody();
// tell if the data save mode is enabled
private boolean mUseDataSaveMode;
// the groups manager
private GroupsManager mGroupsManager;
// load the crypto libs.
public static OlmManager mOlmManager = new OlmManager();
/**
* Create a basic session for direct API calls.
*
* @param hsConfig the home server connection config
*/
private MXSession(HomeServerConnectionConfig hsConfig) {
mCredentials = hsConfig.getCredentials();
mHsConfig = hsConfig;
mEventsRestClient = new EventsRestClient(hsConfig);
mProfileRestClient = new ProfileRestClient(hsConfig);
mPresenceRestClient = new PresenceRestClient(hsConfig);
mRoomsRestClient = new RoomsRestClient(hsConfig);
mPushRulesRestClient = new PushRulesRestClient(hsConfig);
mPushersRestClient = new PushersRestClient(hsConfig);
mThirdPidRestClient = new ThirdPidRestClient(hsConfig);
mCallRestClient = new CallRestClient(hsConfig);
mAccountDataRestClient = new AccountDataRestClient(hsConfig);
mCryptoRestClient = new CryptoRestClient(hsConfig);
mLoginRestClient = new LoginRestClient(hsConfig);
mGroupsRestClient = new GroupsRestClient(hsConfig);
mMediaScanRestClient = new MediaScanRestClient(hsConfig);
mFilterRestClient = new FilterRestClient(hsConfig);
}
/**
* Create a user session with a data handler.
* Private, please use the MxSession.Builder now
*
* @param hsConfig the home server connection config
* @param dataHandler the data handler
* @param appContext the application context
*/
private MXSession(HomeServerConnectionConfig hsConfig, MXDataHandler dataHandler, Context appContext) {
this(hsConfig);
mDataHandler = dataHandler;
mDataHandler.getStore().addMXStoreListener(new MXStoreListener() {
@Override
public void onStoreReady(String accountId) {
Log.d(LOG_TAG, "## onStoreReady()");
getDataHandler().onStoreReady();
}
@Override
public void onStoreCorrupted(String accountId, String description) {
Log.d(LOG_TAG, "## onStoreCorrupted() : token " + getDataHandler().getStore().getEventStreamToken());
// nothing was saved
if (null == getDataHandler().getStore().getEventStreamToken()) {
getDataHandler().onStoreReady();
}
}
@Override
public void postProcess(String accountId) {
getDataHandler().checkPermanentStorageData();
// test if the crypto instance has already been created
if (null == mCrypto) {
MXFileCryptoStore store = new MXFileCryptoStore(mEnableFileEncryption);
store.initWithCredentials(mAppContent, mCredentials);
if (store.hasData() || mEnableCryptoWhenStartingMXSession) {
Log.d(LOG_TAG, "## postProcess() : create the crypto instance for session " + this);
checkCrypto();
} else {
Log.e(LOG_TAG, "## postProcess() : no crypto data");
}
} else {
Log.e(LOG_TAG, "## postProcess() : mCrypto is already created");
}
}
@Override
public void onReadReceiptsLoaded(final String roomId) {
final List<ReceiptData> receipts = mDataHandler.getStore().getEventReceipts(roomId, null, false, false);
final List<String> senders = new ArrayList<>();
for (ReceiptData receipt : receipts) {
senders.add(receipt.userId);
}
mDataHandler.onReceiptEvent(roomId, senders);
}
});
// Initialize a data retriever with rest clients
mDataRetriever = new DataRetriever();
mDataRetriever.setRoomsRestClient(mRoomsRestClient);
mDataHandler.setDataRetriever(mDataRetriever);
mDataHandler.setProfileRestClient(mProfileRestClient);
mDataHandler.setPresenceRestClient(mPresenceRestClient);
mDataHandler.setThirdPidRestClient(mThirdPidRestClient);
mDataHandler.setRoomsRestClient(mRoomsRestClient);
mDataHandler.setEventsRestClient(mEventsRestClient);
mDataHandler.setAccountDataRestClient(mAccountDataRestClient);
// application context
mAppContent = appContext;
mNetworkConnectivityReceiver = new NetworkConnectivityReceiver();
mNetworkConnectivityReceiver.checkNetworkConnection(appContext);
mDataHandler.setNetworkConnectivityReceiver(mNetworkConnectivityReceiver);
mAppContent.registerReceiver(mNetworkConnectivityReceiver, new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
mBingRulesManager = new BingRulesManager(this, mNetworkConnectivityReceiver);
mDataHandler.setPushRulesManager(mBingRulesManager);
mUnsentEventsManager = new UnsentEventsManager(mNetworkConnectivityReceiver, mDataHandler);
mContentManager = new ContentManager(hsConfig, mUnsentEventsManager);
//
mCallsManager = new MXCallsManager(this, mAppContent);
mDataHandler.setCallsManager(mCallsManager);
// the rest client
mEventsRestClient.setUnsentEventsManager(mUnsentEventsManager);
mProfileRestClient.setUnsentEventsManager(mUnsentEventsManager);
mPresenceRestClient.setUnsentEventsManager(mUnsentEventsManager);
mRoomsRestClient.setUnsentEventsManager(mUnsentEventsManager);
mPushRulesRestClient.setUnsentEventsManager(mUnsentEventsManager);
mThirdPidRestClient.setUnsentEventsManager(mUnsentEventsManager);
mCallRestClient.setUnsentEventsManager(mUnsentEventsManager);
mAccountDataRestClient.setUnsentEventsManager(mUnsentEventsManager);
mCryptoRestClient.setUnsentEventsManager(mUnsentEventsManager);
mLoginRestClient.setUnsentEventsManager(mUnsentEventsManager);
mGroupsRestClient.setUnsentEventsManager(mUnsentEventsManager);
// return the default cache manager
mLatestChatMessageCache = new MXLatestChatMessageCache(mCredentials.userId);
mMediasCache = new MXMediasCache(mContentManager, mNetworkConnectivityReceiver, mCredentials.userId, appContext);
mDataHandler.setMediasCache(mMediasCache);
mMediaScanRestClient.setMxStore(mDataHandler.getStore());
mMediasCache.setMediaScanRestClient(mMediaScanRestClient);
mGroupsManager = new GroupsManager(mDataHandler, mGroupsRestClient);
mDataHandler.setGroupsManager(mGroupsManager);
}
private void checkIfAlive() {
synchronized (this) {
if (!mIsAliveSession) {
// Create an Exception to log the stack trace
Log.e(LOG_TAG, "Use of a released session", new Exception("Use of a released session"));
//throw new AssertionError("Should not used a cleared mxsession ");
}
}
}
/**
* Init the user-agent used by the REST requests.
*
* @param context the application context
*/
public static void initUserAgent(Context context) {
RestClient.initUserAgent(context);
}
/**
* Provides the lib version.
*
* @param longFormat true to have a long format i.e with date and time.
* @return the SDK version.
*/
public String getVersion(boolean longFormat) {
checkIfAlive();
String versionName = BuildConfig.VERSION_NAME;
if (!TextUtils.isEmpty(versionName)) {
String gitVersion = mAppContent.getResources().getString(R.string.git_sdk_revision);
if (longFormat) {
String date = mAppContent.getResources().getString(R.string.git_sdk_revision_date);
versionName += " (" + gitVersion + "-" + date + ")";
} else {
versionName += " (" + gitVersion + ")";
}
}
return versionName;
}
/**
* Provides the crypto lib version.
*
* @param context the context
* @param longFormat true to have a long version (with date and time)
* @return the crypto lib version
*/
public String getCryptoVersion(Context context, boolean longFormat) {
String version = "";
if (null != mOlmManager) {
version = longFormat ? mOlmManager.getDetailedVersion(context) : mOlmManager.getVersion();
}
return version;
}
/**
* Get the data handler.
*
* @return the data handler.
*/
public MXDataHandler getDataHandler() {
checkIfAlive();
return mDataHandler;
}
/**
* Get the user credentials.
*
* @return the credentials
*/
public Credentials getCredentials() {
checkIfAlive();
return mCredentials;
}
/**
* Get the API client for requests to the events API.
*
* @return the events API client
*/
public EventsRestClient getEventsApiClient() {
checkIfAlive();
return mEventsRestClient;
}
/**
* Get the API client for requests to the profile API.
*
* @return the profile API client
*/
public ProfileRestClient getProfileApiClient() {
checkIfAlive();
return mProfileRestClient;
}
/**
* Get the API client for requests to the presence API.
*
* @return the presence API client
*/
public PresenceRestClient getPresenceApiClient() {
checkIfAlive();
return mPresenceRestClient;
}
public FilterRestClient getFilterRestClient() {
checkIfAlive();
return mFilterRestClient;
}
/**
* Refresh the presence info of a dedicated user.
*
* @param userId the user userID.
* @param callback the callback.
*/
public void refreshUserPresence(final String userId, final ApiCallback<Void> callback) {
mPresenceRestClient.getPresence(userId, new SimpleApiCallback<User>(callback) {
@Override
public void onSuccess(User user) {
User currentUser = mDataHandler.getStore().getUser(userId);
if (null != currentUser) {
currentUser.presence = user.presence;
currentUser.currently_active = user.currently_active;
currentUser.lastActiveAgo = user.lastActiveAgo;
} else {
currentUser = user;
}
currentUser.setLatestPresenceTs(System.currentTimeMillis());
mDataHandler.getStore().storeUser(currentUser);
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
/**
* Get the API client for requests to the bing rules API.
*
* @return the bing rules API client
*/
public PushRulesRestClient getBingRulesApiClient() {
checkIfAlive();
return mPushRulesRestClient;
}
public ThirdPidRestClient getThirdPidRestClient() {
checkIfAlive();
return mThirdPidRestClient;
}
public CallRestClient getCallRestClient() {
checkIfAlive();
return mCallRestClient;
}
public PushersRestClient getPushersRestClient() {
checkIfAlive();
return mPushersRestClient;
}
public CryptoRestClient getCryptoRestClient() {
checkIfAlive();
return mCryptoRestClient;
}
public HomeServerConnectionConfig getHomeServerConfig() {
checkIfAlive();
return mHsConfig;
}
/**
* Get the API client for requests to the rooms API.
*
* @return the rooms API client
*/
public RoomsRestClient getRoomsApiClient() {
checkIfAlive();
return mRoomsRestClient;
}
public MediaScanRestClient getMediaScanRestClient() {
checkIfAlive();
return mMediaScanRestClient;
}
protected void setEventsApiClient(EventsRestClient eventsRestClient) {
checkIfAlive();
mEventsRestClient = eventsRestClient;
}
protected void setProfileApiClient(ProfileRestClient profileRestClient) {
checkIfAlive();
mProfileRestClient = profileRestClient;
}
protected void setPresenceApiClient(PresenceRestClient presenceRestClient) {
checkIfAlive();
mPresenceRestClient = presenceRestClient;
}
protected void setRoomsApiClient(RoomsRestClient roomsRestClient) {
checkIfAlive();
mRoomsRestClient = roomsRestClient;
}
public MXLatestChatMessageCache getLatestChatMessageCache() {
checkIfAlive();
return mLatestChatMessageCache;
}
public MXMediasCache getMediasCache() {
checkIfAlive();
return mMediasCache;
}
/**
* Provides the application caches size.
*
* @param context the context
* @param callback the asynchronous callback
*/
public static void getApplicationSizeCaches(final Context context, final ApiCallback<Long> callback) {
AsyncTask<Void, Void, Long> task = new AsyncTask<Void, Void, Long>() {
@Override
protected Long doInBackground(Void... params) {
return ContentUtils.getDirectorySize(context, context.getApplicationContext().getFilesDir().getParentFile(), 5);
}
@Override
protected void onPostExecute(Long result) {
Log.d(LOG_TAG, "## getCacheSize() : " + result);
if (null != callback) {
callback.onSuccess(result);
}
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (final Exception e) {
Log.e(LOG_TAG, "## getApplicationSizeCaches() : failed " + e.getMessage(), e);
task.cancel(true);
(new android.os.Handler(Looper.getMainLooper())).post(new Runnable() {
@Override
public void run() {
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
}
/**
* Clear the application cache
*/
private void clearApplicationCaches(Context context) {
mDataHandler.clear();
// network event will not be listened anymore
try {
mAppContent.unregisterReceiver(mNetworkConnectivityReceiver);
} catch (Exception e) {
Log.e(LOG_TAG, "## clearApplicationCaches() : unregisterReceiver failed " + e.getMessage(), e);
}
mNetworkConnectivityReceiver.removeListeners();
// auto resent messages will not be resent
mUnsentEventsManager.clear();
mLatestChatMessageCache.clearCache(context);
mMediasCache.clear();
if (null != mCrypto) {
mCrypto.close();
}
}
/**
* Clear the session data synchronously.
*
* @param context the context
*/
public void clear(final Context context) {
clear(context, null);
}
/**
* Clear the session data.
* if the callback is null, the clear is synchronous.
*
* @param context the context
* @param callback the asynchronous callback
*/
public void clear(final Context context, final ApiCallback<Void> callback) {
synchronized (this) {
if (!mIsAliveSession) {
Log.e(LOG_TAG, "## clear() was already called");
return;
}
mIsAliveSession = false;
}
// stop events stream
stopEventStream();
if (null == callback) {
clearApplicationCaches(context);
} else {
// clear the caches in a background thread to avoid blocking the UI thread
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
clearApplicationCaches(context);
return null;
}
@Override
protected void onPostExecute(Void args) {
if (null != callback) {
callback.onSuccess(null);
}
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (final Exception e) {
Log.e(LOG_TAG, "## clear() failed " + e.getMessage(), e);
task.cancel(true);
(new android.os.Handler(Looper.getMainLooper())).post(new Runnable() {
@Override
public void run() {
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
}
}
/**
* Remove the medias older than the provided timestamp.
*
* @param context the context
* @param timestamp the timestamp (in seconds)
*/
public void removeMediasBefore(final Context context, final long timestamp) {
// list the files to keep even if they are older than the provided timestamp
// because their upload failed
final Set<String> filesToKeep = new HashSet<>();
IMXStore store = getDataHandler().getStore();
Collection<Room> rooms = store.getRooms();
for (Room room : rooms) {
Collection<Event> events = store.getRoomMessages(room.getRoomId());
if (null != events) {
for (Event event : events) {
try {
Message message = null;
if (TextUtils.equals(Event.EVENT_TYPE_MESSAGE, event.getType())) {
message = JsonUtils.toMessage(event.getContent());
} else if (TextUtils.equals(Event.EVENT_TYPE_STICKER, event.getType())) {
message = JsonUtils.toStickerMessage(event.getContent());
}
if (null != message && message instanceof MediaMessage) {
MediaMessage mediaMessage = (MediaMessage) message;
if (mediaMessage.isThumbnailLocalContent()) {
filesToKeep.add(Uri.parse(mediaMessage.getThumbnailUrl()).getPath());
}
if (mediaMessage.isLocalContent()) {
filesToKeep.add(Uri.parse(mediaMessage.getUrl()).getPath());
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "## removeMediasBefore() : failed " + e.getMessage(), e);
}
}
}
}
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
long length = getMediasCache().removeMediasBefore(timestamp, filesToKeep);
// delete also the log files
// they might be large
File logsDir = Log.getLogDirectory();
if (null != logsDir) {
File[] logFiles = logsDir.listFiles();
if (null != logFiles) {
for (File file : logFiles) {
if (ContentUtils.getLastAccessTime(file) < timestamp) {
length += file.length();
file.delete();
}
}
}
}
if (0 != length) {
Log.d(LOG_TAG, "## removeMediasBefore() : save " + android.text.format.Formatter.formatFileSize(context, length));
} else {
Log.d(LOG_TAG, "## removeMediasBefore() : useless");
}
return null;
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (Exception e) {
Log.e(LOG_TAG, "## removeMediasBefore() : failed " + e.getMessage(), e);
task.cancel(true);
}
}
/**
* @return true if the session is active i.e. has not been cleared after a logout.
*/
public boolean isAlive() {
synchronized (this) {
return mIsAliveSession;
}
}
/**
* Get the content manager (for uploading and downloading content) associated with the session.
*
* @return the content manager
*/
public ContentManager getContentManager() {
checkIfAlive();
return mContentManager;
}
/**
* Get the session's current user. The MyUser object provides methods for updating user properties which are not possible for other users.
*
* @return the session's MyUser object
*/
public MyUser getMyUser() {
checkIfAlive();
return mDataHandler.getMyUser();
}
/**
* Get the session's current userid.
*
* @return the session's MyUser id
*/
public String getMyUserId() {
checkIfAlive();
if (null != mDataHandler.getMyUser()) {
return mDataHandler.getMyUser().user_id;
}
return null;
}
/**
* Start the event stream (events thread that listens for events) with an event listener.
*
* @param anEventsListener the event listener or null if using a DataHandler
* @param networkConnectivityReceiver the network connectivity listener.
* @param initialToken the initial sync token (null to start from scratch)
*/
public void startEventStream(final EventsThreadListener anEventsListener,
final NetworkConnectivityReceiver networkConnectivityReceiver,
final String initialToken) {
checkIfAlive();
// reported by a rageshake issue
// startEventStream might be called several times
// when the service is killed and automatically restarted.
// It might be restarted by itself and by android at the same time.
synchronized (LOG_TAG) {
if (mEventsThread != null) {
if (!mEventsThread.isAlive()) {
mEventsThread = null;
Log.e(LOG_TAG, "startEventStream() : create a new EventsThread");
} else {
// https://github.com/vector-im/riot-android/issues/1331
mEventsThread.cancelKill();
Log.e(LOG_TAG, "Ignoring startEventStream() : Thread already created.");
return;
}
}
if (mDataHandler == null) {
Log.e(LOG_TAG, "Error starting the event stream: No data handler is defined");
return;
}
Log.d(LOG_TAG, "startEventStream : create the event stream");
final EventsThreadListener fEventsListener = (null == anEventsListener) ? new DefaultEventsThreadListener(mDataHandler) : anEventsListener;
mEventsThread = new EventsThread(mAppContent, mEventsRestClient, fEventsListener, initialToken);
setSyncFilter(mCurrentFilter);
mEventsThread.setMetricsListener(mMetricsListener);
mEventsThread.setNetworkConnectivityReceiver(networkConnectivityReceiver);
mEventsThread.setIsOnline(mIsOnline);
mEventsThread.setServerLongPollTimeout(mSyncTimeout);
mEventsThread.setSyncDelay(mSyncDelay);
if (mFailureCallback != null) {
mEventsThread.setFailureCallback(mFailureCallback);
}
if (mCredentials.accessToken != null && !mEventsThread.isAlive()) {
// GA issue
try {
mEventsThread.start();
} catch (Exception e) {
Log.e(LOG_TAG, "## startEventStream() : mEventsThread.start failed " + e.getMessage(), e);
}
if (mIsBgCatchupPending) {
Log.d(LOG_TAG, "startEventStream : start a catchup");
mIsBgCatchupPending = false;
// catchup retrieve any available messages before stop the sync
mEventsThread.catchup();
}
}
}
}
/**
* Refresh the access token
*/
public void refreshToken() {
checkIfAlive();
mProfileRestClient.refreshTokens(new ApiCallback<Credentials>() {
@Override
public void onSuccess(Credentials info) {
Log.d(LOG_TAG, "refreshToken : succeeds.");
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "refreshToken : onNetworkError " + e.getMessage(), e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "refreshToken : onMatrixError " + e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "refreshToken : onMatrixError " + e.getMessage(), e);
}
});
}
/**
* Update the online status
*
* @param isOnline true if the client must be seen as online
*/
public void setIsOnline(boolean isOnline) {
if (isOnline != mIsOnline) {
mIsOnline = isOnline;
if (null != mEventsThread) {
mEventsThread.setIsOnline(isOnline);
}
}
}
/**
* @return true if the client is seen as "online"
*/
public boolean isOnline() {
return mIsOnline;
}
/**
* Update the heartbeat request timeout.
*
* @param ms the delay in ms
*/
public void setSyncTimeout(int ms) {
mSyncTimeout = ms;
if (null != mEventsThread) {
mEventsThread.setServerLongPollTimeout(ms);
}
}
/**
* @return the heartbeat request timeout
*/
public int getSyncTimeout() {
return mSyncTimeout;
}
/**
* Set a delay between two sync requests.
*
* @param ms the delay in ms
*/
public void setSyncDelay(int ms) {
mSyncDelay = ms;
if (null != mEventsThread) {
mEventsThread.setSyncDelay(ms);
}
}
/**
* @return the delay between two sync requests.
*/
public int getSyncDelay() {
return mSyncDelay;
}
/**
* Update the data save mode.
*
* @param enabled true to enable the data save mode
*/
public void setUseDataSaveMode(boolean enabled) {
mUseDataSaveMode = enabled;
if (mEventsThread != null) {
setSyncFilter(mCurrentFilter);
}
}
/**
* Allows setting the filter used by the EventsThread
*
* @param filter the content of the filter param on sync requests
*/
public synchronized void setSyncFilter(FilterBody filter) {
Log.d(LOG_TAG, "setSyncFilter ## " + filter);
mCurrentFilter = filter;
// Enable Data save mode and/or LazyLoading
FilterUtil.enableDataSaveMode(mCurrentFilter, mUseDataSaveMode);
FilterUtil.enableLazyLoading(mCurrentFilter, mDataHandler.isLazyLoadingEnabled());
convertFilterToFilterId();
}
/**
* Convert a filter to a filterId
* Either it is already known to the server, or send the filter to the server to get a filterId
*/
private void convertFilterToFilterId() {
// Ensure mCurrentFilter has not been updated in the same time
final String wantedJsonFilter = mCurrentFilter.toJSONString();
// Check if the current filter is known by the server, to directly use the filterId
String filterId = getDataHandler().getStore().getFilters().get(wantedJsonFilter);
if (TextUtils.isEmpty(filterId)) {
// enable the filter in JSON representation so do not block sync until the filter response is there
mEventsThread.setFilterOrFilterId(wantedJsonFilter);
// Send the filter to the server
mFilterRestClient.uploadFilter(getMyUserId(), mCurrentFilter, new SimpleApiCallback<FilterResponse>() {
@Override
public void onSuccess(FilterResponse filter) {
// Store the couple filter/filterId
getDataHandler().getStore().addFilter(wantedJsonFilter, filter.filterId);
// Ensure the filter is still corresponding to the current filter
if (TextUtils.equals(wantedJsonFilter, mCurrentFilter.toJSONString())) {
// Tell the event thread to use the id now
mEventsThread.setFilterOrFilterId(filter.filterId);
}
}
});
} else {
// Tell the event thread to use the id now
mEventsThread.setFilterOrFilterId(filterId);
}
}
/**
* Refresh the network connection information.
* On android version older than 6.0, the doze mode might have killed the network connection.
*/
public void refreshNetworkConnection() {
if (null != mNetworkConnectivityReceiver) {
// mNetworkConnectivityReceiver is a broadcastReceiver
// but some users reported that the network updates were not dispatched
mNetworkConnectivityReceiver.checkNetworkConnection(mAppContent);
}
}
/**
* Shorthand for {@link #startEventStream(EventsThreadListener, NetworkConnectivityReceiver, String)} with no eventListener
* using a DataHandler and no specific failure callback.
*
* @param initialToken the initial sync token (null to sync from scratch).
*/
public void startEventStream(String initialToken) {
checkIfAlive();
startEventStream(null, mNetworkConnectivityReceiver, initialToken);
}
/**
* Gracefully stop the event stream.
*/
public void stopEventStream() {
if (null != mCallsManager) {
mCallsManager.stopTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "stopEventStream");
mEventsThread.kill();
mEventsThread = null;
} else {
Log.e(LOG_TAG, "stopEventStream : mEventsThread is already null");
}
}
/**
* Pause the event stream
*/
public void pauseEventStream() {
checkIfAlive();
if (null != mCallsManager) {
mCallsManager.pauseTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "pauseEventStream");
mEventsThread.pause();
} else {
Log.e(LOG_TAG, "pauseEventStream : mEventsThread is null");
}
if (null != getMediasCache()) {
getMediasCache().clearTmpDecryptedMediaCache();
}
if (null != mGroupsManager) {
mGroupsManager.onSessionPaused();
}
}
/**
* @return the current sync token
*/
public String getCurrentSyncToken() {
return (null != mEventsThread) ? mEventsThread.getCurrentSyncToken() : null;
}
/**
* Resume the event stream
*/
public void resumeEventStream() {
checkIfAlive();
if (null != mNetworkConnectivityReceiver) {
// mNetworkConnectivityReceiver is a broadcastReceiver
// but some users reported that the network updates were not dispatched
mNetworkConnectivityReceiver.checkNetworkConnection(mAppContent);
}
if (null != mCallsManager) {
mCallsManager.unpauseTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "## resumeEventStream() : unpause");
mEventsThread.unpause();
} else {
Log.e(LOG_TAG, "resumeEventStream : mEventsThread is null");
}
if (mIsBgCatchupPending) {
mIsBgCatchupPending = false;
Log.d(LOG_TAG, "## resumeEventStream() : cancel bg sync");
}
if (null != getMediasCache()) {
getMediasCache().clearShareDecryptedMediaCache();
}
if (null != mGroupsManager) {
mGroupsManager.onSessionResumed();
}
}
/**
* Trigger a catchup
*/
public void catchupEventStream() {
checkIfAlive();
if (null != mEventsThread) {
Log.d(LOG_TAG, "catchupEventStream");
mEventsThread.catchup();
} else {
Log.e(LOG_TAG, "catchupEventStream : mEventsThread is null so catchup when the thread will be created");
mIsBgCatchupPending = true;
}
}
/**
* Set a global failure callback implementation.
*
* @param failureCallback the failure callback
*/
public void setFailureCallback(ApiFailureCallback failureCallback) {
checkIfAlive();
mFailureCallback = failureCallback;
if (mEventsThread != null) {
mEventsThread.setFailureCallback(failureCallback);
}
}
/**
* Create a new room.
*
* @param callback the async callback once the room is ready
*/
public void createRoom(final ApiCallback<String> callback) {
createRoom(null, null, null, callback);
}
/**
* Create a new room with given properties. Needs the data handler.
*
* @param name the room name
* @param topic the room topic
* @param alias the room alias
* @param callback the async callback once the room is ready
*/
public void createRoom(String name, String topic, String alias, final ApiCallback<String> callback) {
createRoom(name, topic, RoomDirectoryVisibility.DIRECTORY_VISIBILITY_PRIVATE, alias, RoomState.GUEST_ACCESS_CAN_JOIN, null, callback);
}
/**
* Create a new room with given properties. Needs the data handler.
*
* @param name the room name
* @param topic the room topic
* @param visibility the room visibility
* @param alias the room alias
* @param guestAccess the guest access rule (see {@link RoomState#GUEST_ACCESS_CAN_JOIN} or {@link RoomState#GUEST_ACCESS_FORBIDDEN})
* @param algorithm the crypto algorithm (null to create an unencrypted room)
* @param callback the async callback once the room is ready
*/
public void createRoom(String name,
String topic,
String visibility,
String alias,
String guestAccess,
String algorithm,
final ApiCallback<String> callback) {
checkIfAlive();
CreateRoomParams params = new CreateRoomParams();
params.name = !TextUtils.isEmpty(name) ? name : null;
params.topic = !TextUtils.isEmpty(topic) ? topic : null;
params.visibility = !TextUtils.isEmpty(visibility) ? visibility : null;
params.roomAliasName = !TextUtils.isEmpty(alias) ? alias : null;
params.guest_access = !TextUtils.isEmpty(guestAccess) ? guestAccess : null;
params.addCryptoAlgorithm(algorithm);
createRoom(params, callback);
}
/**
* Create an encrypted room.
*
* @param algorithm the encryption algorithm.
* @param callback the async callback once the room is ready
*/
public void createEncryptedRoom(String algorithm, final ApiCallback<String> callback) {
CreateRoomParams params = new CreateRoomParams();
params.addCryptoAlgorithm(algorithm);
createRoom(params, callback);
}
/**
* Create a direct message room with one participant.<br>
* The participant can be a user ID or mail address. Once the room is created, on success, the room
* is set as a "direct message" with the participant.
*
* @param aParticipantUserId user ID (or user mail) to be invited in the direct message room
* @param aCreateRoomCallBack async call back response
* @return true if the invite was performed, false otherwise
*/
public boolean createDirectMessageRoom(final String aParticipantUserId, final ApiCallback<String> aCreateRoomCallBack) {
return createDirectMessageRoom(aParticipantUserId, null, aCreateRoomCallBack);
}
/**
* Create a direct message room with one participant.<br>
* The participant can be a user ID or mail address. Once the room is created, on success, the room
* is set as a "direct message" with the participant.
*
* @param aParticipantUserId user ID (or user mail) to be invited in the direct message room
* @param algorithm the crypto algorithm (null to create an unencrypted room)
* @param aCreateRoomCallBack async call back response
* @return true if the invite was performed, false otherwise
*/
public boolean createDirectMessageRoom(final String aParticipantUserId, final String algorithm, final ApiCallback<String> aCreateRoomCallBack) {
boolean retCode = false;
if (!TextUtils.isEmpty(aParticipantUserId)) {
retCode = true;
CreateRoomParams params = new CreateRoomParams();
params.addCryptoAlgorithm(algorithm);
params.setDirectMessage();
params.addParticipantIds(mHsConfig, Arrays.asList(aParticipantUserId));
createRoom(params, aCreateRoomCallBack);
}
return retCode;
}
/**
* Finalise the created room as a direct chat one.
*
* @param roomId the room id
* @param userId the user id
* @param callback the asynchronous callback
*/
private void finalizeDMRoomCreation(final String roomId, String userId, final ApiCallback<String> callback) {
final String fRoomId = roomId;
toggleDirectChatRoom(roomId, userId, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
Room room = getDataHandler().getRoom(fRoomId);
if (null != room) {
room.markAllAsRead(null);
}
if (null != callback) {
callback.onSuccess(fRoomId);
}
}
});
}
/**
* Create a new room with given properties.
*
* @param params the creation parameters.
* @param callback the async callback once the room is ready
*/
public void createRoom(final CreateRoomParams params, final ApiCallback<String> callback) {
mRoomsRestClient.createRoom(params, new SimpleApiCallback<CreateRoomResponse>(callback) {
@Override
public void onSuccess(CreateRoomResponse info) {
final String roomId = info.roomId;
final Room createdRoom = mDataHandler.getRoom(roomId);
// the creation events are not be called during the creation
if (!createdRoom.isJoined()) {
createdRoom.setOnInitialSyncCallback(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
createdRoom.markAllAsRead(null);
if (params.isDirect()) {
finalizeDMRoomCreation(roomId, params.getFirstInvitedUserId(), callback);
} else {
callback.onSuccess(roomId);
}
}
});
} else {
createdRoom.markAllAsRead(null);
if (params.isDirect()) {
finalizeDMRoomCreation(roomId, params.getFirstInvitedUserId(), callback);
} else {
callback.onSuccess(roomId);
}
}
}
});
}
/**
* Join a room by its roomAlias
*
* @param roomIdOrAlias the room alias
* @param callback the async callback once the room is joined. The RoomId is provided.
*/
public void joinRoom(String roomIdOrAlias, final ApiCallback<String> callback) {
checkIfAlive();
// sanity check
if ((null != mDataHandler) && (null != roomIdOrAlias)) {
mDataRetriever.getRoomsRestClient().joinRoom(roomIdOrAlias, new SimpleApiCallback<RoomResponse>(callback) {
@Override
public void onSuccess(final RoomResponse roomResponse) {
final String roomId = roomResponse.roomId;
Room joinedRoom = mDataHandler.getRoom(roomId);
// wait until the initial sync is done
if (!joinedRoom.isJoined()) {
joinedRoom.setOnInitialSyncCallback(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
callback.onSuccess(roomId);
}
});
} else {
// to initialise the notification counters
joinedRoom.markAllAsRead(null);
callback.onSuccess(roomId);
}
}
});
}
}
/**
* Send the read receipts to the latest room messages.
*
* @param rooms the rooms list
* @param callback the asynchronous callback
*/
public void markRoomsAsRead(final Collection<Room> rooms, final ApiCallback<Void> callback) {
if ((null == rooms) || (0 == rooms.size())) {
if (null != callback) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
callback.onSuccess(null);
}
});
}
return;
}
markRoomsAsRead(rooms.iterator(), callback);
}
/**
* Send the read receipts to the latest room messages.
*
* @param roomsIterator the rooms list iterator
* @param callback the asynchronous callback
*/
private void markRoomsAsRead(final Iterator roomsIterator, final ApiCallback<Void> callback) {
if (roomsIterator.hasNext()) {
Room room = (Room) roomsIterator.next();
boolean isRequestSent = false;
if (mNetworkConnectivityReceiver.isConnected()) {
isRequestSent = room.markAllAsRead(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void anything) {
markRoomsAsRead(roomsIterator, callback);
}
});
} else {
// update the local data
room.sendReadReceipt();
}
if (!isRequestSent) {
markRoomsAsRead(roomsIterator, callback);
}
} else {
if (null != callback) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
callback.onSuccess(null);
}
});
}
}
}
/**
* Retrieve user matrix id from a 3rd party id.
*
* @param address the user id.
* @param media the media.
* @param callback the 3rd party callback
*/
public void lookup3Pid(String address, String media, final ApiCallback<String> callback) {
checkIfAlive();
mThirdPidRestClient.lookup3Pid(address, media, callback);
}
/**
* Retrieve user matrix id from a 3rd party id.
*
* @param addresses 3rd party ids
* @param mediums the medias.
* @param callback the 3rd parties callback
*/
public void lookup3Pids(List<String> addresses, List<String> mediums, ApiCallback<List<String>> callback) {
checkIfAlive();
mThirdPidRestClient.lookup3Pids(addresses, mediums, callback);
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param beforeLimit the number of events to get before the matching results.
* @param afterLimit the number of events to get after the matching results.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessageText(String text,
List<String> rooms,
int beforeLimit,
int afterLimit,
String nextBatch,
final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, rooms, beforeLimit, afterLimit, nextBatch, callback);
}
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessagesByText(String text, List<String> rooms, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, rooms, 0, 0, nextBatch, callback);
}
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessagesByText(String text, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, null, 0, 0, nextBatch, callback);
}
}
/**
* Cancel any pending search request
*/
public void cancelSearchMessagesByText() {
checkIfAlive();
mEventsRestClient.cancelSearchMessagesByText();
}
/**
* Perform a remote text search for a dedicated media types list
*
* @param name the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMediasByName(String name, List<String> rooms, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMediasByText(name, rooms, 0, 0, nextBatch, callback);
}
}
/**
* Cancel any pending file search request
*/
public void cancelSearchMediasByText() {
checkIfAlive();
mEventsRestClient.cancelSearchMediasByText();
}
/**
* Perform a remote users search by name / user id.
*
* @param name the text to search for.
* @param limit the maximum number of items to retrieve (can be null)
* @param userIdsFilter the user ids filter (can be null)
* @param callback the callback
*/
public void searchUsers(String name, Integer limit, Set<String> userIdsFilter, final ApiCallback<SearchUsersResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchUsers(name, limit, userIdsFilter, callback);
}
}
/**
* Cancel any pending user search
*/
public void cancelUsersSearch() {
checkIfAlive();
mEventsRestClient.cancelUsersSearch();
}
/**
* Return the fulfilled active BingRule for the event.
*
* @param event the event
* @return the fulfilled bingRule
*/
public BingRule fulfillRule(Event event) {
checkIfAlive();
return mBingRulesManager.fulfilledBingRule(event);
}
/**
* @return true if the calls are supported
*/
public boolean isVoipCallSupported() {
if (null != mCallsManager) {
return mCallsManager.isSupported();
} else {
return false;
}
}
/**
* Get the list of rooms that are tagged the specified tag.
* The returned array is ordered according to the room tag order.
*
* @param tag RoomTag.ROOM_TAG_XXX values
* @return the rooms list.
*/
public List<Room> roomsWithTag(final String tag) {
final List<Room> taggedRooms = new ArrayList<>();
// sanity check
if (null == mDataHandler.getStore()) {
return taggedRooms;
}
if (!TextUtils.equals(tag, RoomTag.ROOM_TAG_NO_TAG)) {
final Collection<Room> rooms = mDataHandler.getStore().getRooms();
for (Room room : rooms) {
if (null != room.getAccountData().roomTag(tag)) {
taggedRooms.add(room);
}
}
if (taggedRooms.size() > 0) {
Collections.sort(taggedRooms, new RoomComparatorWithTag(tag));
}
} else {
final Collection<Room> rooms = mDataHandler.getStore().getRooms();
for (Room room : rooms) {
if (!room.getAccountData().hasTags()) {
taggedRooms.add(room);
}
}
}
return taggedRooms;
}
/**
* Get the list of roomIds that are tagged the specified tag.
* The returned array is ordered according to the room tag order.
*
* @param tag RoomTag.ROOM_TAG_XXX values
* @return the room IDs list.
*/
public List<String> roomIdsWithTag(final String tag) {
List<Room> roomsWithTag = roomsWithTag(tag);
List<String> roomIdsList = new ArrayList<>();
for (Room room : roomsWithTag) {
roomIdsList.add(room.getRoomId());
}
return roomIdsList;
}
/**
* Compute the tag order to use for a room tag so that the room will appear in the expected position
* in the list of rooms stamped with this tag.
*
* @param index the targeted index of the room in the list of rooms with the tag `tag`.
* @param originIndex the origin index. Integer.MAX_VALUE if there is none.
* @param tag the tag
* @return the tag order to apply to get the expected position.
*/
public Double tagOrderToBeAtIndex(int index, int originIndex, String tag) {
// Algo (and the [0.0, 1.0] assumption) inspired from matrix-react-sdk:
// We sort rooms by the lexicographic ordering of the 'order' metadata on their tags.
// For convenience, we calculate this for now a floating point number between 0.0 and 1.0.
Double orderA = 0.0; // by default we're next to the beginning of the list
Double orderB = 1.0; // by default we're next to the end of the list too
List<Room> roomsWithTag = roomsWithTag(tag);
if (roomsWithTag.size() > 0) {
// when an object is moved down, the index must be incremented
// because the object will be removed from the list to be inserted after its destination
if ((originIndex != Integer.MAX_VALUE) && (originIndex < index)) {
index++;
}
if (index > 0) {
// Bound max index to the array size
int prevIndex = (index < roomsWithTag.size()) ? index : roomsWithTag.size();
RoomTag prevTag = roomsWithTag.get(prevIndex - 1).getAccountData().roomTag(tag);
if (null == prevTag.mOrder) {
Log.e(LOG_TAG, "computeTagOrderForRoom: Previous room in sublist has no ordering metadata. This should never happen.");
} else {
orderA = prevTag.mOrder;
}
}
if (index <= roomsWithTag.size() - 1) {
RoomTag nextTag = roomsWithTag.get(index).getAccountData().roomTag(tag);
if (null == nextTag.mOrder) {
Log.e(LOG_TAG, "computeTagOrderForRoom: Next room in sublist has no ordering metadata. This should never happen.");
} else {
orderB = nextTag.mOrder;
}
}
}
return (orderA + orderB) / 2.0;
}
/**
* Toggles the direct chat status of a room.<br>
* Create a new direct chat room in the account data section if the room does not exist,
* otherwise the room is removed from the account data section.
* Direct chat room user ID choice algorithm:<br>
* 1- oldest joined room member
* 2- oldest invited room member
* 3- the user himself
*
* @param roomId the room roomId
* @param aParticipantUserId the participant user id
* @param callback the asynchronous callback
*/
public void toggleDirectChatRoom(final String roomId,
@Nullable final String aParticipantUserId,
final ApiCallback<Void> callback) {
IMXStore store = getDataHandler().getStore();
Room room = store.getRoom(roomId);
if (null != room) {
if (getDataHandler().getDirectChatRoomIdsList().contains(roomId)) {
// The room is already seen as direct chat
removeDirectChatRoomFromAccountData(roomId, callback);
} else {
// The room was not yet seen as direct chat
if (null == aParticipantUserId) {
searchOtherUserInRoomToCreateDirectChat(room, new SimpleApiCallback<String>(callback) {
@Override
public void onSuccess(String info) {
addDirectChatRoomToAccountData(roomId, info, callback);
}
});
} else {
addDirectChatRoomToAccountData(roomId, aParticipantUserId, callback);
}
}
} else {
if (callback != null) {
callback.onUnexpectedError(new Exception("Unknown room"));
}
}
}
/**
* Search another user in the room to create a direct chat
*
* @param room the room to search in
* @param callback the callback to get the selected user id
*/
private void searchOtherUserInRoomToCreateDirectChat(@NonNull final Room room,
@NonNull final ApiCallback<String> callback) {
room.getActiveMembersAsync(new SimpleApiCallback<List<RoomMember>>(callback) {
@Override
public void onSuccess(List<RoomMember> members) {
// should never happen but it was reported by a GA issue
if (members.isEmpty()) {
callback.onUnexpectedError(new Exception("Error"));
return;
}
RoomMember directChatMember = null;
if (members.size() > 1) {
// sort algo: oldest join first, then oldest invited
Collections.sort(members, new Comparator<RoomMember>() {
@Override
public int compare(RoomMember r1, RoomMember r2) {
int res;
long diff;
if (RoomMember.MEMBERSHIP_JOIN.equals(r2.membership) && RoomMember.MEMBERSHIP_INVITE.equals(r1.membership)) {
res = 1;
} else if (r2.membership.equals(r1.membership)) {
diff = r1.getOriginServerTs() - r2.getOriginServerTs();
res = (0 == diff) ? 0 : ((diff > 0) ? 1 : -1);
} else {
res = -1;
}
return res;
}
});
int nextIndexSearch = 0;
// take the oldest join member
if (!TextUtils.equals(members.get(0).getUserId(), getMyUserId())) {
if (RoomMember.MEMBERSHIP_JOIN.equals(members.get(0).membership)) {
directChatMember = members.get(0);
}
} else {
nextIndexSearch = 1;
if (RoomMember.MEMBERSHIP_JOIN.equals(members.get(1).membership)) {
directChatMember = members.get(1);
}
}
// no join member found, test the oldest join member
if (null == directChatMember) {
if (RoomMember.MEMBERSHIP_INVITE.equals(members.get(nextIndexSearch).membership)) {
directChatMember = members.get(nextIndexSearch);
}
}
}
// last option: get the logged user
if (null == directChatMember) {
directChatMember = members.get(0);
}
callback.onSuccess(directChatMember.getUserId());
}
});
}
/**
* Add the room to the direct chat room list in AccountData
*
* @param roomId the room roomId
* @param chosenUserId userId of the direct chat room
* @param callback the asynchronous callback
*/
private void addDirectChatRoomToAccountData(String roomId,
@NonNull String chosenUserId,
ApiCallback<Void> callback) {
IMXStore store = getDataHandler().getStore();
Map<String, List<String>> params;
if (null != store.getDirectChatRoomsDict()) {
params = new HashMap<>(store.getDirectChatRoomsDict());
} else {
params = new HashMap<>();
}
List<String> roomIdsList = new ArrayList<>();
// search if there is an entry with the same user
if (params.containsKey(chosenUserId)) {
roomIdsList = new ArrayList<>(params.get(chosenUserId));
}
roomIdsList.add(roomId); // update room list with the new room
params.put(chosenUserId, roomIdsList);
// Store and upload the updated map
getDataHandler().setDirectChatRoomsMap(params, callback);
}
/**
* Remove the room to the direct chat room list in AccountData
*
* @param roomId the room roomId
* @param callback the asynchronous callback
*/
private void removeDirectChatRoomFromAccountData(String roomId,
ApiCallback<Void> callback) {
IMXStore store = getDataHandler().getStore();
Map<String, List<String>> params;
if (null != store.getDirectChatRoomsDict()) {
params = new HashMap<>(store.getDirectChatRoomsDict());
} else {
params = new HashMap<>();
}
// remove the current room from the direct chat list rooms
if (null != store.getDirectChatRoomsDict()) {
List<String> keysList = new ArrayList<>(params.keySet());
for (String key : keysList) {
List<String> roomIdsList = params.get(key);
if (roomIdsList.contains(roomId)) {
roomIdsList.remove(roomId);
if (roomIdsList.isEmpty()) {
// Remove this entry
params.remove(key);
}
}
}
} else {
// should not happen: if the room has to be removed, it means the room has been
// previously detected as being part of the listOfList
Log.e(LOG_TAG, "## removeDirectChatRoomFromAccountData(): failed to remove a direct chat room (not seen as direct chat room)");
if (callback != null) {
callback.onUnexpectedError(new Exception("Error"));
}
return;
}
// Store and upload the updated map
getDataHandler().setDirectChatRoomsMap(params, callback);
}
/**
* Update the account password
*
* @param oldPassword the former account password
* @param newPassword the new account password
* @param callback the callback
*/
public void updatePassword(String oldPassword, String newPassword, ApiCallback<Void> callback) {
mProfileRestClient.updatePassword(getMyUserId(), oldPassword, newPassword, callback);
}
/**
* Reset the password to a new one.
*
* @param newPassword the new password
* @param threepid_creds the three pids.
* @param callback the callback
*/
public void resetPassword(final String newPassword, final Map<String, String> threepid_creds, final ApiCallback<Void> callback) {
mProfileRestClient.resetPassword(newPassword, threepid_creds, callback);
}
/**
* Triggers a request to update the userIds to ignore
*
* @param userIds the userIds to ignore
* @param callback the callback
*/
private void updateUsers(List<String> userIds, ApiCallback<Void> callback) {
Map<String, Object> ignoredUsersDict = new HashMap<>();
for (String userId : userIds) {
ignoredUsersDict.put(userId, new HashMap<>());
}
Map<String, Object> params = new HashMap<>();
params.put(AccountDataRestClient.ACCOUNT_DATA_KEY_IGNORED_USERS, ignoredUsersDict);
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_IGNORED_USER_LIST, params, callback);
}
/**
* Tells if an user is in the ignored user ids list
*
* @param userId the user id to test
* @return true if the user is ignored
*/
public boolean isUserIgnored(String userId) {
if (null != userId) {
return getDataHandler().getIgnoredUserIds().indexOf(userId) >= 0;
}
return false;
}
/**
* Ignore a list of users.
*
* @param userIds the user ids list to ignore
* @param callback the result callback
*/
public void ignoreUsers(List<String> userIds, ApiCallback<Void> callback) {
List<String> curUserIdsToIgnore = getDataHandler().getIgnoredUserIds();
List<String> userIdsToIgnore = new ArrayList<>(getDataHandler().getIgnoredUserIds());
// something to add
if ((null != userIds) && (userIds.size() > 0)) {
// add the new one
for (String userId : userIds) {
if (userIdsToIgnore.indexOf(userId) < 0) {
userIdsToIgnore.add(userId);
}
}
// some items have been added
if (curUserIdsToIgnore.size() != userIdsToIgnore.size()) {
updateUsers(userIdsToIgnore, callback);
}
}
}
/**
* Unignore a list of users.
*
* @param userIds the user ids list to unignore
* @param callback the result callback
*/
public void unIgnoreUsers(List<String> userIds, ApiCallback<Void> callback) {
List<String> curUserIdsToIgnore = getDataHandler().getIgnoredUserIds();
List<String> userIdsToIgnore = new ArrayList<>(getDataHandler().getIgnoredUserIds());
// something to add
if ((null != userIds) && (userIds.size() > 0)) {
// add the new one
for (String userId : userIds) {
userIdsToIgnore.remove(userId);
}
// some items have been added
if (curUserIdsToIgnore.size() != userIdsToIgnore.size()) {
updateUsers(userIdsToIgnore, callback);
}
}
}
/**
* @return the network receiver.
*/
public NetworkConnectivityReceiver getNetworkConnectivityReceiver() {
return mNetworkConnectivityReceiver;
}
/**
* Ask the home server if the lazy loading of room members is supported.
*
* @param callback the callback, to be notified if the server actually support the lazy loading. True if supported
*/
public void canEnableLazyLoading(final ApiCallback<Boolean> callback) {
// Check that the server support the lazy loading
mLoginRestClient.getVersions(new SimpleApiCallback<Versions>(callback) {
@Override
public void onSuccess(Versions info) {
// Check if we can enable lazyLoading
callback.onSuccess(VersionsUtil.supportLazyLoadMembers(info));
}
});
}
/**
* Invalidate the access token, so that it can no longer be used for authorization.
*
* @param context the application context
* @param callback the callback success and failure callback
*/
public void logout(final Context context, final ApiCallback<Void> callback) {
synchronized (this) {
if (!mIsAliveSession) {
Log.e(LOG_TAG, "## logout() was already called");
return;
}
mIsAliveSession = false;
}
// Clear crypto data
// For security and because it will be no more useful as we will get a new device id
// on the next log in
enableCrypto(false, null);
mLoginRestClient.logout(new ApiCallback<JsonObject>() {
private void clearData() {
// required else the clear won't be done
mIsAliveSession = true;
clear(context, new SimpleApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
@Override
public void onSuccess(JsonObject info) {
Log.d(LOG_TAG, "## logout() : succeed -> clearing the application data ");
clearData();
}
private void onError(String errorMessage) {
Log.e(LOG_TAG, "## logout() : failed " + errorMessage);
clearData();
}
@Override
public void onNetworkError(Exception e) {
onError(e.getMessage());
}
@Override
public void onMatrixError(MatrixError e) {
onError(e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
onError(e.getMessage());
}
});
}
/**
* Deactivate the account.
*
* @param context the application context
* @param type type of authentication
* @param userPassword current password
* @param eraseUserData true to also erase all the user data
* @param callback the success and failure callback
*/
public void deactivateAccount(final Context context,
final String type,
final String userPassword,
final boolean eraseUserData,
final ApiCallback<Void> callback) {
mProfileRestClient.deactivateAccount(type, getMyUserId(), userPassword, eraseUserData, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## deactivateAccount() : succeed -> clearing the application data ");
// Clear crypto data
// For security and because it will be no more useful as we will get a new device id
// on the next log in
enableCrypto(false, null);
clear(context, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
});
}
/**
* Update the URL preview status by default
*
* @param status the status
* @param callback
*/
public void setURLPreviewStatus(final boolean status, final ApiCallback<Void> callback) {
Map<String, Object> params = new HashMap<>();
params.put(AccountDataRestClient.ACCOUNT_DATA_KEY_URL_PREVIEW_DISABLE, !status);
Log.d(LOG_TAG, "## setURLPreviewStatus() : status " + status);
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_PREVIEW_URLS, params, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## setURLPreviewStatus() : succeeds");
getDataHandler().getStore().setURLPreviewEnabled(status);
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage(), e);
callback.onNetworkError(e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage());
callback.onMatrixError(e);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage(), e);
callback.onUnexpectedError(e);
}
});
}
/**
* Add user widget to the user Account Data
*
* @param params
* @param callback
*/
public void addUserWidget(final Map<String, Object> params, final ApiCallback<Void> callback) {
Log.d(LOG_TAG, "## addUserWidget()");
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_WIDGETS, params, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## addUserWidget() : succeeds");
getDataHandler().getStore().setUserWidgets(params);
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage(), e);
callback.onNetworkError(e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage());
callback.onMatrixError(e);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage(), e);
callback.onUnexpectedError(e);
}
});
}
/**
* Tells if the global URL preview settings is enabled
*
* @return true if it is enabled.
*/
public boolean isURLPreviewEnabled() {
return getDataHandler().getStore().isURLPreviewEnabled();
}
/**
* Get user widget from user AccountData
*
* @return
*/
public Map<String, Object> getUserWidgets() {
return getDataHandler().getStore().getUserWidgets();
}
//==============================================================================================================
// Crypto
//==============================================================================================================
/**
* The module that manages E2E encryption.
* Null if the feature is not enabled
*/
private MXCrypto mCrypto;
/**
* @return the crypto instance
*/
public MXCrypto getCrypto() {
return mCrypto;
}
/**
* @return true if the crypto is enabled
*/
public boolean isCryptoEnabled() {
return null != mCrypto;
}
/**
* enable encryption by default when launching the session
*/
private boolean mEnableCryptoWhenStartingMXSession = false;
/**
* Enable the crypto when initializing a new session.
*/
public void enableCryptoWhenStarting() {
mEnableCryptoWhenStartingMXSession = true;
}
/**
* Optional set of parameters used to configure/customize the e2e encryption
*/
@Nullable
private static MXCryptoConfig sCryptoConfig;
/**
* Define the set of parameters used to configure/customize the e2e encryption
* This configuration must be set before instantiating the session
*/
public static void setCryptoConfig(@Nullable MXCryptoConfig cryptoConfig) {
sCryptoConfig = cryptoConfig;
}
/**
* When the encryption is toogled, the room summaries must be updated
* to display the right messages.
*/
private void decryptRoomSummaries() {
if (null != getDataHandler().getStore()) {
Collection<RoomSummary> summaries = getDataHandler().getStore().getSummaries();
for (RoomSummary summary : summaries) {
mDataHandler.decryptEvent(summary.getLatestReceivedEvent(), null);
}
}
}
/**
* Check if the crypto engine is properly initialized.
* Launch it it is was not yet done.
*/
public void checkCrypto() {
MXFileCryptoStore fileCryptoStore = new MXFileCryptoStore(mEnableFileEncryption);
fileCryptoStore.initWithCredentials(mAppContent, mCredentials);
if ((fileCryptoStore.hasData() || mEnableCryptoWhenStartingMXSession) && (null == mCrypto)) {
boolean isStoreLoaded = false;
try {
// open the store
fileCryptoStore.open();
isStoreLoaded = true;
} catch (UnsatisfiedLinkError e) {
Log.e(LOG_TAG, "## checkCrypto() failed " + e.getMessage(), e);
}
if (!isStoreLoaded) {
// load again the olm manager
// reported by rageshake, it seems that the olm lib is unloaded.
mOlmManager = new OlmManager();
try {
// open the store
fileCryptoStore.open();
isStoreLoaded = true;
} catch (UnsatisfiedLinkError e) {
Log.e(LOG_TAG, "## checkCrypto() failed 2 " + e.getMessage(), e);
}
}
if (!isStoreLoaded) {
Log.e(LOG_TAG, "## checkCrypto() : cannot enable the crypto because of olm lib");
return;
}
mCrypto = new MXCrypto(MXSession.this, fileCryptoStore, sCryptoConfig);
mDataHandler.setCrypto(mCrypto);
// the room summaries are not stored with decrypted content
decryptRoomSummaries();
Log.d(LOG_TAG, "## checkCrypto() : the crypto engine is ready");
} else if (mDataHandler.getCrypto() != mCrypto) {
Log.e(LOG_TAG, "## checkCrypto() : the data handler crypto was not initialized");
mDataHandler.setCrypto(mCrypto);
}
}
/**
* Enable / disable the crypto.
*
* @param cryptoEnabled true to enable the crypto
* @param callback the asynchronous callback called when the action has been done
*/
public void enableCrypto(boolean cryptoEnabled, final ApiCallback<Void> callback) {
if (cryptoEnabled != isCryptoEnabled()) {
if (cryptoEnabled) {
Log.d(LOG_TAG, "Crypto is enabled");
MXFileCryptoStore fileCryptoStore = new MXFileCryptoStore(mEnableFileEncryption);
fileCryptoStore.initWithCredentials(mAppContent, mCredentials);
fileCryptoStore.open();
mCrypto = new MXCrypto(this, fileCryptoStore, sCryptoConfig);
mCrypto.start(true, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
decryptRoomSummaries();
if (null != callback) {
callback.onSuccess(null);
}
}
});
} else if (null != mCrypto) {
Log.d(LOG_TAG, "Crypto is disabled");
IMXCryptoStore store = mCrypto.mCryptoStore;
mCrypto.close();
store.deleteStore();
mCrypto = null;
mDataHandler.setCrypto(null);
decryptRoomSummaries();
if (null != callback) {
callback.onSuccess(null);
}
}
mDataHandler.setCrypto(mCrypto);
} else {
if (null != callback) {
callback.onSuccess(null);
}
}
}
/**
* Retrieves the devices list
*
* @param callback the asynchronous callback
*/
public void getDevicesList(ApiCallback<DevicesListResponse> callback) {
mCryptoRestClient.getDevices(callback);
}
/**
* Set a device name.
*
* @param deviceId the device id
* @param deviceName the device name
* @param callback the asynchronous callback
*/
public void setDeviceName(final String deviceId, final String deviceName, final ApiCallback<Void> callback) {
mCryptoRestClient.setDeviceName(deviceId, deviceName, callback);
}
/**
* Delete a device
*
* @param deviceId the device id
* @param password the passwoerd
* @param callback the asynchronous callback.
*/
public void deleteDevice(final String deviceId, final String password, final ApiCallback<Void> callback) {
mCryptoRestClient.deleteDevice(deviceId, new DeleteDeviceParams(), new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
// should never happen
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onMatrixError(MatrixError matrixError) {
Log.d(LOG_TAG, "## deleteDevice() : onMatrixError " + matrixError.getMessage());
RegistrationFlowResponse registrationFlowResponse = null;
// expected status code is 401
if ((null != matrixError.mStatus) && (matrixError.mStatus == 401)) {
try {
registrationFlowResponse = JsonUtils.toRegistrationFlowResponse(matrixError.mErrorBodyAsString);
} catch (Exception castExcept) {
Log.e(LOG_TAG, "## deleteDevice(): Received status 401 - Exception - JsonUtils.toRegistrationFlowResponse()", castExcept);
}
} else {
Log.d(LOG_TAG, "## deleteDevice(): Received not expected status 401 =" + matrixError.mStatus);
}
List<String> stages = new ArrayList<>();
// check if the server response can be casted
if ((null != registrationFlowResponse)
&& (null != registrationFlowResponse.flows)
&& !registrationFlowResponse.flows.isEmpty()) {
for (LoginFlow flow : registrationFlowResponse.flows) {
if (null != flow.stages) {
stages.addAll(flow.stages);
}
}
}
if (!stages.isEmpty()) {
DeleteDeviceParams params = new DeleteDeviceParams();
params.auth = new DeleteDeviceAuth();
params.auth.session = registrationFlowResponse.session;
params.auth.user = mCredentials.userId;
params.auth.password = password;
Log.d(LOG_TAG, "## deleteDevice() : supported stages " + stages);
deleteDevice(deviceId, params, stages, callback);
} else {
if (null != callback) {
callback.onMatrixError(matrixError);
}
}
}
});
}
/**
* Delete a device.
*
* @param deviceId the device id.
* @param params the delete device params
* @param stages the supported stages
* @param callback the asynchronous callback
*/
private void deleteDevice(final String deviceId, final DeleteDeviceParams params, final List<String> stages, final ApiCallback<Void> callback) {
// test the first one
params.auth.type = stages.get(0);
stages.remove(0);
mCryptoRestClient.deleteDevice(deviceId, params, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onMatrixError(MatrixError matrixError) {
boolean has401Error = (null != matrixError.mStatus) && (matrixError.mStatus == 401);
// failed, try next flow type
if ((has401Error || TextUtils.equals(matrixError.errcode, MatrixError.FORBIDDEN) || TextUtils.equals(matrixError.errcode, MatrixError.UNKNOWN))
&& !stages.isEmpty()) {
deleteDevice(deviceId, params, stages, callback);
} else {
if (null != callback) {
callback.onMatrixError(matrixError);
}
}
}
});
}
/**
* Gets a bearer token from the homeserver that the user can
* present to a third party in order to prove their ownership
* of the Matrix account they are logged into.
*
* @param callback the asynchronous callback called when finished
*/
public void openIdToken(final ApiCallback<Map<Object, Object>> callback) {
mAccountDataRestClient.openIdToken(getMyUserId(), callback);
}
/**
* @return the groups manager
*/
public GroupsManager getGroupsManager() {
return mGroupsManager;
}
/* ==========================================================================================
* Builder
* ========================================================================================== */
public static class Builder {
private MXSession mxSession;
public Builder(HomeServerConnectionConfig hsConfig, MXDataHandler dataHandler, Context context) {
mxSession = new MXSession(hsConfig, dataHandler, context);
}
public Builder withFileEncryption(boolean enableFileEncryption) {
mxSession.mEnableFileEncryption = enableFileEncryption;
return this;
}
/**
* Create a pusher rest client, overriding the push server url if necessary
*
* @param pushServerUrl the push server url, or null or empty to use the default PushersRestClient
* @return this builder, to chain calls
*/
public Builder withPushServerUrl(@Nullable String pushServerUrl) {
// If not empty, create a special PushersRestClient
PushersRestClient pushersRestClient = null;
if (!TextUtils.isEmpty(pushServerUrl)) {
// pusher uses a custom server
try {
HomeServerConnectionConfig alteredHsConfig = new HomeServerConnectionConfig.Builder()
.withHomeServerUri(Uri.parse(pushServerUrl))
.withCredentials(mxSession.mHsConfig.getCredentials())
.build();
pushersRestClient = new PushersRestClient(alteredHsConfig);
} catch (Exception e) {
Log.e(LOG_TAG, "## withPushServerUrl() failed " + e.getMessage(), e);
}
}
if (null != pushersRestClient) {
// Replace the existing client
mxSession.mPushersRestClient = pushersRestClient;
}
return this;
}
/**
* Set the metrics listener of this session
*
* @param metricsListener the metrics listener
* @return this builder, to chain calls
*/
public Builder withMetricsListener(@Nullable MetricsListener metricsListener) {
mxSession.mMetricsListener = metricsListener;
return this;
}
/**
* Build the session
*
* @return the build session
*/
public MXSession build() {
return mxSession;
}
}
}
|
matrix-sdk/src/main/java/org/matrix/androidsdk/MXSession.java
|
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk;
import android.content.Context;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.call.MXCallsManager;
import org.matrix.androidsdk.crypto.MXCrypto;
import org.matrix.androidsdk.crypto.MXCryptoConfig;
import org.matrix.androidsdk.data.DataRetriever;
import org.matrix.androidsdk.data.MyUser;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.RoomSummary;
import org.matrix.androidsdk.data.RoomTag;
import org.matrix.androidsdk.data.comparator.RoomComparatorWithTag;
import org.matrix.androidsdk.data.cryptostore.IMXCryptoStore;
import org.matrix.androidsdk.data.cryptostore.MXFileCryptoStore;
import org.matrix.androidsdk.data.metrics.MetricsListener;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.data.store.MXStoreListener;
import org.matrix.androidsdk.db.MXLatestChatMessageCache;
import org.matrix.androidsdk.db.MXMediasCache;
import org.matrix.androidsdk.groups.GroupsManager;
import org.matrix.androidsdk.network.NetworkConnectivityReceiver;
import org.matrix.androidsdk.rest.callback.ApiCallback;
import org.matrix.androidsdk.rest.callback.ApiFailureCallback;
import org.matrix.androidsdk.rest.callback.SimpleApiCallback;
import org.matrix.androidsdk.rest.client.AccountDataRestClient;
import org.matrix.androidsdk.rest.client.CallRestClient;
import org.matrix.androidsdk.rest.client.CryptoRestClient;
import org.matrix.androidsdk.rest.client.EventsRestClient;
import org.matrix.androidsdk.rest.client.FilterRestClient;
import org.matrix.androidsdk.rest.client.GroupsRestClient;
import org.matrix.androidsdk.rest.client.LoginRestClient;
import org.matrix.androidsdk.rest.client.MediaScanRestClient;
import org.matrix.androidsdk.rest.client.PresenceRestClient;
import org.matrix.androidsdk.rest.client.ProfileRestClient;
import org.matrix.androidsdk.rest.client.PushRulesRestClient;
import org.matrix.androidsdk.rest.client.PushersRestClient;
import org.matrix.androidsdk.rest.client.RoomsRestClient;
import org.matrix.androidsdk.rest.client.ThirdPidRestClient;
import org.matrix.androidsdk.rest.model.CreateRoomParams;
import org.matrix.androidsdk.rest.model.CreateRoomResponse;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.MatrixError;
import org.matrix.androidsdk.rest.model.ReceiptData;
import org.matrix.androidsdk.rest.model.RoomDirectoryVisibility;
import org.matrix.androidsdk.rest.model.RoomMember;
import org.matrix.androidsdk.rest.model.User;
import org.matrix.androidsdk.rest.model.Versions;
import org.matrix.androidsdk.rest.model.bingrules.BingRule;
import org.matrix.androidsdk.rest.model.filter.FilterBody;
import org.matrix.androidsdk.rest.model.filter.FilterResponse;
import org.matrix.androidsdk.rest.model.login.Credentials;
import org.matrix.androidsdk.rest.model.login.LoginFlow;
import org.matrix.androidsdk.rest.model.login.RegistrationFlowResponse;
import org.matrix.androidsdk.rest.model.message.MediaMessage;
import org.matrix.androidsdk.rest.model.message.Message;
import org.matrix.androidsdk.rest.model.pid.DeleteDeviceAuth;
import org.matrix.androidsdk.rest.model.pid.DeleteDeviceParams;
import org.matrix.androidsdk.rest.model.search.SearchResponse;
import org.matrix.androidsdk.rest.model.search.SearchUsersResponse;
import org.matrix.androidsdk.rest.model.sync.DevicesListResponse;
import org.matrix.androidsdk.rest.model.sync.RoomResponse;
import org.matrix.androidsdk.sync.DefaultEventsThreadListener;
import org.matrix.androidsdk.sync.EventsThread;
import org.matrix.androidsdk.sync.EventsThreadListener;
import org.matrix.androidsdk.util.BingRulesManager;
import org.matrix.androidsdk.util.ContentManager;
import org.matrix.androidsdk.util.ContentUtils;
import org.matrix.androidsdk.util.FilterUtil;
import org.matrix.androidsdk.util.JsonUtils;
import org.matrix.androidsdk.util.Log;
import org.matrix.androidsdk.util.UnsentEventsManager;
import org.matrix.androidsdk.util.VersionsUtil;
import org.matrix.olm.OlmManager;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Class that represents one user's session with a particular home server.
* There can potentially be multiple sessions for handling multiple accounts.
*/
public class MXSession {
private static final String LOG_TAG = MXSession.class.getSimpleName();
private DataRetriever mDataRetriever;
private MXDataHandler mDataHandler;
private EventsThread mEventsThread;
private final Credentials mCredentials;
// Api clients
private EventsRestClient mEventsRestClient;
private ProfileRestClient mProfileRestClient;
private PresenceRestClient mPresenceRestClient;
private RoomsRestClient mRoomsRestClient;
private final PushRulesRestClient mPushRulesRestClient;
private PushersRestClient mPushersRestClient;
private final ThirdPidRestClient mThirdPidRestClient;
private final CallRestClient mCallRestClient;
private final AccountDataRestClient mAccountDataRestClient;
private final CryptoRestClient mCryptoRestClient;
private final LoginRestClient mLoginRestClient;
private final GroupsRestClient mGroupsRestClient;
private final MediaScanRestClient mMediaScanRestClient;
private final FilterRestClient mFilterRestClient;
private ApiFailureCallback mFailureCallback;
private ContentManager mContentManager;
public MXCallsManager mCallsManager;
private MetricsListener mMetricsListener;
private Context mAppContent;
private NetworkConnectivityReceiver mNetworkConnectivityReceiver;
private UnsentEventsManager mUnsentEventsManager;
private MXLatestChatMessageCache mLatestChatMessageCache;
private MXMediasCache mMediasCache;
private BingRulesManager mBingRulesManager = null;
private boolean mIsAliveSession = true;
// online status
private boolean mIsOnline = false;
private int mSyncTimeout = 0;
private int mSyncDelay = 0;
private final HomeServerConnectionConfig mHsConfig;
// True if file encryption is enabled
private boolean mEnableFileEncryption;
// the application is launched from a notification
// so, mEventsThread.start might be not ready
private boolean mIsBgCatchupPending = false;
private FilterBody mCurrentFilter = new FilterBody();
// tell if the data save mode is enabled
private boolean mUseDataSaveMode;
// the groups manager
private GroupsManager mGroupsManager;
// load the crypto libs.
public static OlmManager mOlmManager = new OlmManager();
/**
* Create a basic session for direct API calls.
*
* @param hsConfig the home server connection config
*/
private MXSession(HomeServerConnectionConfig hsConfig) {
mCredentials = hsConfig.getCredentials();
mHsConfig = hsConfig;
mEventsRestClient = new EventsRestClient(hsConfig);
mProfileRestClient = new ProfileRestClient(hsConfig);
mPresenceRestClient = new PresenceRestClient(hsConfig);
mRoomsRestClient = new RoomsRestClient(hsConfig);
mPushRulesRestClient = new PushRulesRestClient(hsConfig);
mPushersRestClient = new PushersRestClient(hsConfig);
mThirdPidRestClient = new ThirdPidRestClient(hsConfig);
mCallRestClient = new CallRestClient(hsConfig);
mAccountDataRestClient = new AccountDataRestClient(hsConfig);
mCryptoRestClient = new CryptoRestClient(hsConfig);
mLoginRestClient = new LoginRestClient(hsConfig);
mGroupsRestClient = new GroupsRestClient(hsConfig);
mMediaScanRestClient = new MediaScanRestClient(hsConfig);
mFilterRestClient = new FilterRestClient(hsConfig);
}
/**
* Create a user session with a data handler.
* Private, please use the MxSession.Builder now
*
* @param hsConfig the home server connection config
* @param dataHandler the data handler
* @param appContext the application context
*/
private MXSession(HomeServerConnectionConfig hsConfig, MXDataHandler dataHandler, Context appContext) {
this(hsConfig);
mDataHandler = dataHandler;
mDataHandler.getStore().addMXStoreListener(new MXStoreListener() {
@Override
public void onStoreReady(String accountId) {
Log.d(LOG_TAG, "## onStoreReady()");
getDataHandler().onStoreReady();
}
@Override
public void onStoreCorrupted(String accountId, String description) {
Log.d(LOG_TAG, "## onStoreCorrupted() : token " + getDataHandler().getStore().getEventStreamToken());
// nothing was saved
if (null == getDataHandler().getStore().getEventStreamToken()) {
getDataHandler().onStoreReady();
}
}
@Override
public void postProcess(String accountId) {
getDataHandler().checkPermanentStorageData();
// test if the crypto instance has already been created
if (null == mCrypto) {
MXFileCryptoStore store = new MXFileCryptoStore(mEnableFileEncryption);
store.initWithCredentials(mAppContent, mCredentials);
if (store.hasData() || mEnableCryptoWhenStartingMXSession) {
Log.d(LOG_TAG, "## postProcess() : create the crypto instance for session " + this);
checkCrypto();
} else {
Log.e(LOG_TAG, "## postProcess() : no crypto data");
}
} else {
Log.e(LOG_TAG, "## postProcess() : mCrypto is already created");
}
}
@Override
public void onReadReceiptsLoaded(final String roomId) {
final List<ReceiptData> receipts = mDataHandler.getStore().getEventReceipts(roomId, null, false, false);
final List<String> senders = new ArrayList<>();
for (ReceiptData receipt : receipts) {
senders.add(receipt.userId);
}
mDataHandler.onReceiptEvent(roomId, senders);
}
});
// Initialize a data retriever with rest clients
mDataRetriever = new DataRetriever();
mDataRetriever.setRoomsRestClient(mRoomsRestClient);
mDataHandler.setDataRetriever(mDataRetriever);
mDataHandler.setProfileRestClient(mProfileRestClient);
mDataHandler.setPresenceRestClient(mPresenceRestClient);
mDataHandler.setThirdPidRestClient(mThirdPidRestClient);
mDataHandler.setRoomsRestClient(mRoomsRestClient);
mDataHandler.setEventsRestClient(mEventsRestClient);
mDataHandler.setAccountDataRestClient(mAccountDataRestClient);
// application context
mAppContent = appContext;
mNetworkConnectivityReceiver = new NetworkConnectivityReceiver();
mNetworkConnectivityReceiver.checkNetworkConnection(appContext);
mDataHandler.setNetworkConnectivityReceiver(mNetworkConnectivityReceiver);
mAppContent.registerReceiver(mNetworkConnectivityReceiver, new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
mBingRulesManager = new BingRulesManager(this, mNetworkConnectivityReceiver);
mDataHandler.setPushRulesManager(mBingRulesManager);
mUnsentEventsManager = new UnsentEventsManager(mNetworkConnectivityReceiver, mDataHandler);
mContentManager = new ContentManager(hsConfig, mUnsentEventsManager);
//
mCallsManager = new MXCallsManager(this, mAppContent);
mDataHandler.setCallsManager(mCallsManager);
// the rest client
mEventsRestClient.setUnsentEventsManager(mUnsentEventsManager);
mProfileRestClient.setUnsentEventsManager(mUnsentEventsManager);
mPresenceRestClient.setUnsentEventsManager(mUnsentEventsManager);
mRoomsRestClient.setUnsentEventsManager(mUnsentEventsManager);
mPushRulesRestClient.setUnsentEventsManager(mUnsentEventsManager);
mThirdPidRestClient.setUnsentEventsManager(mUnsentEventsManager);
mCallRestClient.setUnsentEventsManager(mUnsentEventsManager);
mAccountDataRestClient.setUnsentEventsManager(mUnsentEventsManager);
mCryptoRestClient.setUnsentEventsManager(mUnsentEventsManager);
mLoginRestClient.setUnsentEventsManager(mUnsentEventsManager);
mGroupsRestClient.setUnsentEventsManager(mUnsentEventsManager);
// return the default cache manager
mLatestChatMessageCache = new MXLatestChatMessageCache(mCredentials.userId);
mMediasCache = new MXMediasCache(mContentManager, mNetworkConnectivityReceiver, mCredentials.userId, appContext);
mDataHandler.setMediasCache(mMediasCache);
mMediaScanRestClient.setMxStore(mDataHandler.getStore());
mMediasCache.setMediaScanRestClient(mMediaScanRestClient);
mGroupsManager = new GroupsManager(mDataHandler, mGroupsRestClient);
mDataHandler.setGroupsManager(mGroupsManager);
}
private void checkIfAlive() {
synchronized (this) {
if (!mIsAliveSession) {
// Create an Exception to log the stack trace
Log.e(LOG_TAG, "Use of a released session", new Exception("Use of a released session"));
//throw new AssertionError("Should not used a cleared mxsession ");
}
}
}
/**
* Init the user-agent used by the REST requests.
*
* @param context the application context
*/
public static void initUserAgent(Context context) {
RestClient.initUserAgent(context);
}
/**
* Provides the lib version.
*
* @param longFormat true to have a long format i.e with date and time.
* @return the SDK version.
*/
public String getVersion(boolean longFormat) {
checkIfAlive();
String versionName = BuildConfig.VERSION_NAME;
if (!TextUtils.isEmpty(versionName)) {
String gitVersion = mAppContent.getResources().getString(R.string.git_sdk_revision);
if (longFormat) {
String date = mAppContent.getResources().getString(R.string.git_sdk_revision_date);
versionName += " (" + gitVersion + "-" + date + ")";
} else {
versionName += " (" + gitVersion + ")";
}
}
return versionName;
}
/**
* Provides the crypto lib version.
*
* @param context the context
* @param longFormat true to have a long version (with date and time)
* @return the crypto lib version
*/
public String getCryptoVersion(Context context, boolean longFormat) {
String version = "";
if (null != mOlmManager) {
version = longFormat ? mOlmManager.getDetailedVersion(context) : mOlmManager.getVersion();
}
return version;
}
/**
* Get the data handler.
*
* @return the data handler.
*/
public MXDataHandler getDataHandler() {
checkIfAlive();
return mDataHandler;
}
/**
* Get the user credentials.
*
* @return the credentials
*/
public Credentials getCredentials() {
checkIfAlive();
return mCredentials;
}
/**
* Get the API client for requests to the events API.
*
* @return the events API client
*/
public EventsRestClient getEventsApiClient() {
checkIfAlive();
return mEventsRestClient;
}
/**
* Get the API client for requests to the profile API.
*
* @return the profile API client
*/
public ProfileRestClient getProfileApiClient() {
checkIfAlive();
return mProfileRestClient;
}
/**
* Get the API client for requests to the presence API.
*
* @return the presence API client
*/
public PresenceRestClient getPresenceApiClient() {
checkIfAlive();
return mPresenceRestClient;
}
public FilterRestClient getFilterRestClient() {
checkIfAlive();
return mFilterRestClient;
}
/**
* Refresh the presence info of a dedicated user.
*
* @param userId the user userID.
* @param callback the callback.
*/
public void refreshUserPresence(final String userId, final ApiCallback<Void> callback) {
mPresenceRestClient.getPresence(userId, new SimpleApiCallback<User>(callback) {
@Override
public void onSuccess(User user) {
User currentUser = mDataHandler.getStore().getUser(userId);
if (null != currentUser) {
currentUser.presence = user.presence;
currentUser.currently_active = user.currently_active;
currentUser.lastActiveAgo = user.lastActiveAgo;
} else {
currentUser = user;
}
currentUser.setLatestPresenceTs(System.currentTimeMillis());
mDataHandler.getStore().storeUser(currentUser);
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
/**
* Get the API client for requests to the bing rules API.
*
* @return the bing rules API client
*/
public PushRulesRestClient getBingRulesApiClient() {
checkIfAlive();
return mPushRulesRestClient;
}
public ThirdPidRestClient getThirdPidRestClient() {
checkIfAlive();
return mThirdPidRestClient;
}
public CallRestClient getCallRestClient() {
checkIfAlive();
return mCallRestClient;
}
public PushersRestClient getPushersRestClient() {
checkIfAlive();
return mPushersRestClient;
}
public CryptoRestClient getCryptoRestClient() {
checkIfAlive();
return mCryptoRestClient;
}
public HomeServerConnectionConfig getHomeServerConfig() {
checkIfAlive();
return mHsConfig;
}
/**
* Get the API client for requests to the rooms API.
*
* @return the rooms API client
*/
public RoomsRestClient getRoomsApiClient() {
checkIfAlive();
return mRoomsRestClient;
}
public MediaScanRestClient getMediaScanRestClient() {
checkIfAlive();
return mMediaScanRestClient;
}
protected void setEventsApiClient(EventsRestClient eventsRestClient) {
checkIfAlive();
mEventsRestClient = eventsRestClient;
}
protected void setProfileApiClient(ProfileRestClient profileRestClient) {
checkIfAlive();
mProfileRestClient = profileRestClient;
}
protected void setPresenceApiClient(PresenceRestClient presenceRestClient) {
checkIfAlive();
mPresenceRestClient = presenceRestClient;
}
protected void setRoomsApiClient(RoomsRestClient roomsRestClient) {
checkIfAlive();
mRoomsRestClient = roomsRestClient;
}
public MXLatestChatMessageCache getLatestChatMessageCache() {
checkIfAlive();
return mLatestChatMessageCache;
}
public MXMediasCache getMediasCache() {
checkIfAlive();
return mMediasCache;
}
/**
* Provides the application caches size.
*
* @param context the context
* @param callback the asynchronous callback
*/
public static void getApplicationSizeCaches(final Context context, final ApiCallback<Long> callback) {
AsyncTask<Void, Void, Long> task = new AsyncTask<Void, Void, Long>() {
@Override
protected Long doInBackground(Void... params) {
return ContentUtils.getDirectorySize(context, context.getApplicationContext().getFilesDir().getParentFile(), 5);
}
@Override
protected void onPostExecute(Long result) {
Log.d(LOG_TAG, "## getCacheSize() : " + result);
if (null != callback) {
callback.onSuccess(result);
}
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (final Exception e) {
Log.e(LOG_TAG, "## getApplicationSizeCaches() : failed " + e.getMessage(), e);
task.cancel(true);
(new android.os.Handler(Looper.getMainLooper())).post(new Runnable() {
@Override
public void run() {
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
}
/**
* Clear the application cache
*/
private void clearApplicationCaches(Context context) {
mDataHandler.clear();
// network event will not be listened anymore
try {
mAppContent.unregisterReceiver(mNetworkConnectivityReceiver);
} catch (Exception e) {
Log.e(LOG_TAG, "## clearApplicationCaches() : unregisterReceiver failed " + e.getMessage(), e);
}
mNetworkConnectivityReceiver.removeListeners();
// auto resent messages will not be resent
mUnsentEventsManager.clear();
mLatestChatMessageCache.clearCache(context);
mMediasCache.clear();
if (null != mCrypto) {
mCrypto.close();
}
}
/**
* Clear the session data synchronously.
*
* @param context the context
*/
public void clear(final Context context) {
clear(context, null);
}
/**
* Clear the session data.
* if the callback is null, the clear is synchronous.
*
* @param context the context
* @param callback the asynchronous callback
*/
public void clear(final Context context, final ApiCallback<Void> callback) {
synchronized (this) {
if (!mIsAliveSession) {
Log.e(LOG_TAG, "## clear() was already called");
return;
}
mIsAliveSession = false;
}
// stop events stream
stopEventStream();
if (null == callback) {
clearApplicationCaches(context);
} else {
// clear the caches in a background thread to avoid blocking the UI thread
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
clearApplicationCaches(context);
return null;
}
@Override
protected void onPostExecute(Void args) {
if (null != callback) {
callback.onSuccess(null);
}
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (final Exception e) {
Log.e(LOG_TAG, "## clear() failed " + e.getMessage(), e);
task.cancel(true);
(new android.os.Handler(Looper.getMainLooper())).post(new Runnable() {
@Override
public void run() {
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
}
}
/**
* Remove the medias older than the provided timestamp.
*
* @param context the context
* @param timestamp the timestamp (in seconds)
*/
public void removeMediasBefore(final Context context, final long timestamp) {
// list the files to keep even if they are older than the provided timestamp
// because their upload failed
final Set<String> filesToKeep = new HashSet<>();
IMXStore store = getDataHandler().getStore();
Collection<Room> rooms = store.getRooms();
for (Room room : rooms) {
Collection<Event> events = store.getRoomMessages(room.getRoomId());
if (null != events) {
for (Event event : events) {
try {
Message message = null;
if (TextUtils.equals(Event.EVENT_TYPE_MESSAGE, event.getType())) {
message = JsonUtils.toMessage(event.getContent());
} else if (TextUtils.equals(Event.EVENT_TYPE_STICKER, event.getType())) {
message = JsonUtils.toStickerMessage(event.getContent());
}
if (null != message && message instanceof MediaMessage) {
MediaMessage mediaMessage = (MediaMessage) message;
if (mediaMessage.isThumbnailLocalContent()) {
filesToKeep.add(Uri.parse(mediaMessage.getThumbnailUrl()).getPath());
}
if (mediaMessage.isLocalContent()) {
filesToKeep.add(Uri.parse(mediaMessage.getUrl()).getPath());
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "## removeMediasBefore() : failed " + e.getMessage(), e);
}
}
}
}
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
long length = getMediasCache().removeMediasBefore(timestamp, filesToKeep);
// delete also the log files
// they might be large
File logsDir = Log.getLogDirectory();
if (null != logsDir) {
File[] logFiles = logsDir.listFiles();
if (null != logFiles) {
for (File file : logFiles) {
if (ContentUtils.getLastAccessTime(file) < timestamp) {
length += file.length();
file.delete();
}
}
}
}
if (0 != length) {
Log.d(LOG_TAG, "## removeMediasBefore() : save " + android.text.format.Formatter.formatFileSize(context, length));
} else {
Log.d(LOG_TAG, "## removeMediasBefore() : useless");
}
return null;
}
};
try {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (Exception e) {
Log.e(LOG_TAG, "## removeMediasBefore() : failed " + e.getMessage(), e);
task.cancel(true);
}
}
/**
* @return true if the session is active i.e. has not been cleared after a logout.
*/
public boolean isAlive() {
synchronized (this) {
return mIsAliveSession;
}
}
/**
* Get the content manager (for uploading and downloading content) associated with the session.
*
* @return the content manager
*/
public ContentManager getContentManager() {
checkIfAlive();
return mContentManager;
}
/**
* Get the session's current user. The MyUser object provides methods for updating user properties which are not possible for other users.
*
* @return the session's MyUser object
*/
public MyUser getMyUser() {
checkIfAlive();
return mDataHandler.getMyUser();
}
/**
* Get the session's current userid.
*
* @return the session's MyUser id
*/
public String getMyUserId() {
checkIfAlive();
if (null != mDataHandler.getMyUser()) {
return mDataHandler.getMyUser().user_id;
}
return null;
}
/**
* Start the event stream (events thread that listens for events) with an event listener.
*
* @param anEventsListener the event listener or null if using a DataHandler
* @param networkConnectivityReceiver the network connectivity listener.
* @param initialToken the initial sync token (null to start from scratch)
*/
public void startEventStream(final EventsThreadListener anEventsListener,
final NetworkConnectivityReceiver networkConnectivityReceiver,
final String initialToken) {
checkIfAlive();
// reported by a rageshake issue
// startEventStream might be called several times
// when the service is killed and automatically restarted.
// It might be restarted by itself and by android at the same time.
synchronized (LOG_TAG) {
if (mEventsThread != null) {
if (!mEventsThread.isAlive()) {
mEventsThread = null;
Log.e(LOG_TAG, "startEventStream() : create a new EventsThread");
} else {
// https://github.com/vector-im/riot-android/issues/1331
mEventsThread.cancelKill();
Log.e(LOG_TAG, "Ignoring startEventStream() : Thread already created.");
return;
}
}
if (mDataHandler == null) {
Log.e(LOG_TAG, "Error starting the event stream: No data handler is defined");
return;
}
Log.d(LOG_TAG, "startEventStream : create the event stream");
final EventsThreadListener fEventsListener = (null == anEventsListener) ? new DefaultEventsThreadListener(mDataHandler) : anEventsListener;
mEventsThread = new EventsThread(mAppContent, mEventsRestClient, fEventsListener, initialToken);
setSyncFilter(mCurrentFilter);
mEventsThread.setMetricsListener(mMetricsListener);
mEventsThread.setNetworkConnectivityReceiver(networkConnectivityReceiver);
mEventsThread.setIsOnline(mIsOnline);
mEventsThread.setServerLongPollTimeout(mSyncTimeout);
mEventsThread.setSyncDelay(mSyncDelay);
if (mFailureCallback != null) {
mEventsThread.setFailureCallback(mFailureCallback);
}
if (mCredentials.accessToken != null && !mEventsThread.isAlive()) {
// GA issue
try {
mEventsThread.start();
} catch (Exception e) {
Log.e(LOG_TAG, "## startEventStream() : mEventsThread.start failed " + e.getMessage(), e);
}
if (mIsBgCatchupPending) {
Log.d(LOG_TAG, "startEventStream : start a catchup");
mIsBgCatchupPending = false;
// catchup retrieve any available messages before stop the sync
mEventsThread.catchup();
}
}
}
}
/**
* Refresh the access token
*/
public void refreshToken() {
checkIfAlive();
mProfileRestClient.refreshTokens(new ApiCallback<Credentials>() {
@Override
public void onSuccess(Credentials info) {
Log.d(LOG_TAG, "refreshToken : succeeds.");
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "refreshToken : onNetworkError " + e.getMessage(), e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "refreshToken : onMatrixError " + e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "refreshToken : onMatrixError " + e.getMessage(), e);
}
});
}
/**
* Update the online status
*
* @param isOnline true if the client must be seen as online
*/
public void setIsOnline(boolean isOnline) {
if (isOnline != mIsOnline) {
mIsOnline = isOnline;
if (null != mEventsThread) {
mEventsThread.setIsOnline(isOnline);
}
}
}
/**
* @return true if the client is seen as "online"
*/
public boolean isOnline() {
return mIsOnline;
}
/**
* Update the heartbeat request timeout.
*
* @param ms the delay in ms
*/
public void setSyncTimeout(int ms) {
mSyncTimeout = ms;
if (null != mEventsThread) {
mEventsThread.setServerLongPollTimeout(ms);
}
}
/**
* @return the heartbeat request timeout
*/
public int getSyncTimeout() {
return mSyncTimeout;
}
/**
* Set a delay between two sync requests.
*
* @param ms the delay in ms
*/
public void setSyncDelay(int ms) {
mSyncDelay = ms;
if (null != mEventsThread) {
mEventsThread.setSyncDelay(ms);
}
}
/**
* @return the delay between two sync requests.
*/
public int getSyncDelay() {
return mSyncDelay;
}
/**
* Update the data save mode.
*
* @param enabled true to enable the data save mode
*/
public void setUseDataSaveMode(boolean enabled) {
mUseDataSaveMode = enabled;
if (mEventsThread != null) {
setSyncFilter(mCurrentFilter);
}
}
/**
* Allows setting the filter used by the EventsThread
*
* @param filter the content of the filter param on sync requests
*/
public synchronized void setSyncFilter(FilterBody filter) {
Log.d(LOG_TAG, "setSyncFilter ## " + filter);
mCurrentFilter = filter;
// Enable Data save mode and/or LazyLoading
FilterUtil.enableDataSaveMode(mCurrentFilter, mUseDataSaveMode);
FilterUtil.enableLazyLoading(mCurrentFilter, mDataHandler.isLazyLoadingEnabled());
convertFilterToFilterId();
}
/**
* Convert a filter to a filterId
* Either it is already known to the server, or send the filter to the server to get a filterId
*/
private void convertFilterToFilterId() {
// Ensure mCurrentFilter has not been updated in the same time
final String wantedJsonFilter = mCurrentFilter.toJSONString();
// Check if the current filter is known by the server, to directly use the filterId
String filterId = getDataHandler().getStore().getFilters().get(wantedJsonFilter);
if (TextUtils.isEmpty(filterId)) {
// enable the filter in JSON representation so do not block sync until the filter response is there
mEventsThread.setFilterOrFilterId(wantedJsonFilter);
// Send the filter to the server
mFilterRestClient.uploadFilter(getMyUserId(), mCurrentFilter, new SimpleApiCallback<FilterResponse>() {
@Override
public void onSuccess(FilterResponse filter) {
// Store the couple filter/filterId
getDataHandler().getStore().addFilter(wantedJsonFilter, filter.filterId);
// Ensure the filter is still corresponding to the current filter
if (TextUtils.equals(wantedJsonFilter, mCurrentFilter.toJSONString())) {
// Tell the event thread to use the id now
mEventsThread.setFilterOrFilterId(filter.filterId);
}
}
});
} else {
// Tell the event thread to use the id now
mEventsThread.setFilterOrFilterId(filterId);
}
}
/**
* Refresh the network connection information.
* On android version older than 6.0, the doze mode might have killed the network connection.
*/
public void refreshNetworkConnection() {
if (null != mNetworkConnectivityReceiver) {
// mNetworkConnectivityReceiver is a broadcastReceiver
// but some users reported that the network updates were not dispatched
mNetworkConnectivityReceiver.checkNetworkConnection(mAppContent);
}
}
/**
* Shorthand for {@link #startEventStream(EventsThreadListener, NetworkConnectivityReceiver, String)} with no eventListener
* using a DataHandler and no specific failure callback.
*
* @param initialToken the initial sync token (null to sync from scratch).
*/
public void startEventStream(String initialToken) {
checkIfAlive();
startEventStream(null, mNetworkConnectivityReceiver, initialToken);
}
/**
* Gracefully stop the event stream.
*/
public void stopEventStream() {
if (null != mCallsManager) {
mCallsManager.stopTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "stopEventStream");
mEventsThread.kill();
mEventsThread = null;
} else {
Log.e(LOG_TAG, "stopEventStream : mEventsThread is already null");
}
}
/**
* Pause the event stream
*/
public void pauseEventStream() {
checkIfAlive();
if (null != mCallsManager) {
mCallsManager.pauseTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "pauseEventStream");
mEventsThread.pause();
} else {
Log.e(LOG_TAG, "pauseEventStream : mEventsThread is null");
}
if (null != getMediasCache()) {
getMediasCache().clearTmpDecryptedMediaCache();
}
if (null != mGroupsManager) {
mGroupsManager.onSessionPaused();
}
}
/**
* @return the current sync token
*/
public String getCurrentSyncToken() {
return (null != mEventsThread) ? mEventsThread.getCurrentSyncToken() : null;
}
/**
* Resume the event stream
*/
public void resumeEventStream() {
checkIfAlive();
if (null != mNetworkConnectivityReceiver) {
// mNetworkConnectivityReceiver is a broadcastReceiver
// but some users reported that the network updates were not dispatched
mNetworkConnectivityReceiver.checkNetworkConnection(mAppContent);
}
if (null != mCallsManager) {
mCallsManager.unpauseTurnServerRefresh();
}
if (null != mEventsThread) {
Log.d(LOG_TAG, "## resumeEventStream() : unpause");
mEventsThread.unpause();
} else {
Log.e(LOG_TAG, "resumeEventStream : mEventsThread is null");
}
if (mIsBgCatchupPending) {
mIsBgCatchupPending = false;
Log.d(LOG_TAG, "## resumeEventStream() : cancel bg sync");
}
if (null != getMediasCache()) {
getMediasCache().clearShareDecryptedMediaCache();
}
if (null != mGroupsManager) {
mGroupsManager.onSessionResumed();
}
}
/**
* Trigger a catchup
*/
public void catchupEventStream() {
checkIfAlive();
if (null != mEventsThread) {
Log.d(LOG_TAG, "catchupEventStream");
mEventsThread.catchup();
} else {
Log.e(LOG_TAG, "catchupEventStream : mEventsThread is null so catchup when the thread will be created");
mIsBgCatchupPending = true;
}
}
/**
* Set a global failure callback implementation.
*
* @param failureCallback the failure callback
*/
public void setFailureCallback(ApiFailureCallback failureCallback) {
checkIfAlive();
mFailureCallback = failureCallback;
if (mEventsThread != null) {
mEventsThread.setFailureCallback(failureCallback);
}
}
/**
* Create a new room.
*
* @param callback the async callback once the room is ready
*/
public void createRoom(final ApiCallback<String> callback) {
createRoom(null, null, null, callback);
}
/**
* Create a new room with given properties. Needs the data handler.
*
* @param name the room name
* @param topic the room topic
* @param alias the room alias
* @param callback the async callback once the room is ready
*/
public void createRoom(String name, String topic, String alias, final ApiCallback<String> callback) {
createRoom(name, topic, RoomDirectoryVisibility.DIRECTORY_VISIBILITY_PRIVATE, alias, RoomState.GUEST_ACCESS_CAN_JOIN, null, callback);
}
/**
* Create a new room with given properties. Needs the data handler.
*
* @param name the room name
* @param topic the room topic
* @param visibility the room visibility
* @param alias the room alias
* @param guestAccess the guest access rule (see {@link RoomState#GUEST_ACCESS_CAN_JOIN} or {@link RoomState#GUEST_ACCESS_FORBIDDEN})
* @param algorithm the crypto algorithm (null to create an unencrypted room)
* @param callback the async callback once the room is ready
*/
public void createRoom(String name,
String topic,
String visibility,
String alias,
String guestAccess,
String algorithm,
final ApiCallback<String> callback) {
checkIfAlive();
CreateRoomParams params = new CreateRoomParams();
params.name = !TextUtils.isEmpty(name) ? name : null;
params.topic = !TextUtils.isEmpty(topic) ? topic : null;
params.visibility = !TextUtils.isEmpty(visibility) ? visibility : null;
params.roomAliasName = !TextUtils.isEmpty(alias) ? alias : null;
params.guest_access = !TextUtils.isEmpty(guestAccess) ? guestAccess : null;
params.addCryptoAlgorithm(algorithm);
createRoom(params, callback);
}
/**
* Create an encrypted room.
*
* @param algorithm the encryption algorithm.
* @param callback the async callback once the room is ready
*/
public void createEncryptedRoom(String algorithm, final ApiCallback<String> callback) {
CreateRoomParams params = new CreateRoomParams();
params.addCryptoAlgorithm(algorithm);
createRoom(params, callback);
}
/**
* Create a direct message room with one participant.<br>
* The participant can be a user ID or mail address. Once the room is created, on success, the room
* is set as a "direct message" with the participant.
*
* @param aParticipantUserId user ID (or user mail) to be invited in the direct message room
* @param aCreateRoomCallBack async call back response
* @return true if the invite was performed, false otherwise
*/
public boolean createDirectMessageRoom(final String aParticipantUserId, final ApiCallback<String> aCreateRoomCallBack) {
return createDirectMessageRoom(aParticipantUserId, null, aCreateRoomCallBack);
}
/**
* Create a direct message room with one participant.<br>
* The participant can be a user ID or mail address. Once the room is created, on success, the room
* is set as a "direct message" with the participant.
*
* @param aParticipantUserId user ID (or user mail) to be invited in the direct message room
* @param algorithm the crypto algorithm (null to create an unencrypted room)
* @param aCreateRoomCallBack async call back response
* @return true if the invite was performed, false otherwise
*/
public boolean createDirectMessageRoom(final String aParticipantUserId, final String algorithm, final ApiCallback<String> aCreateRoomCallBack) {
boolean retCode = false;
if (!TextUtils.isEmpty(aParticipantUserId)) {
retCode = true;
CreateRoomParams params = new CreateRoomParams();
params.addCryptoAlgorithm(algorithm);
params.setDirectMessage();
params.addParticipantIds(mHsConfig, Arrays.asList(aParticipantUserId));
createRoom(params, aCreateRoomCallBack);
}
return retCode;
}
/**
* Finalise the created room as a direct chat one.
*
* @param roomId the room id
* @param userId the user id
* @param callback the asynchronous callback
*/
private void finalizeDMRoomCreation(final String roomId, String userId, final ApiCallback<String> callback) {
final String fRoomId = roomId;
toggleDirectChatRoom(roomId, userId, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
Room room = getDataHandler().getRoom(fRoomId);
if (null != room) {
room.markAllAsRead(null);
}
if (null != callback) {
callback.onSuccess(fRoomId);
}
}
});
}
/**
* Create a new room with given properties.
*
* @param params the creation parameters.
* @param callback the async callback once the room is ready
*/
public void createRoom(final CreateRoomParams params, final ApiCallback<String> callback) {
mRoomsRestClient.createRoom(params, new SimpleApiCallback<CreateRoomResponse>(callback) {
@Override
public void onSuccess(CreateRoomResponse info) {
final String roomId = info.roomId;
final Room createdRoom = mDataHandler.getRoom(roomId);
// the creation events are not be called during the creation
if (!createdRoom.isJoined()) {
createdRoom.setOnInitialSyncCallback(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
createdRoom.markAllAsRead(null);
if (params.isDirect()) {
finalizeDMRoomCreation(roomId, params.getFirstInvitedUserId(), callback);
} else {
callback.onSuccess(roomId);
}
}
});
} else {
createdRoom.markAllAsRead(null);
if (params.isDirect()) {
finalizeDMRoomCreation(roomId, params.getFirstInvitedUserId(), callback);
} else {
callback.onSuccess(roomId);
}
}
}
});
}
/**
* Join a room by its roomAlias
*
* @param roomIdOrAlias the room alias
* @param callback the async callback once the room is joined. The RoomId is provided.
*/
public void joinRoom(String roomIdOrAlias, final ApiCallback<String> callback) {
checkIfAlive();
// sanity check
if ((null != mDataHandler) && (null != roomIdOrAlias)) {
mDataRetriever.getRoomsRestClient().joinRoom(roomIdOrAlias, new SimpleApiCallback<RoomResponse>(callback) {
@Override
public void onSuccess(final RoomResponse roomResponse) {
final String roomId = roomResponse.roomId;
Room joinedRoom = mDataHandler.getRoom(roomId);
// wait until the initial sync is done
if (!joinedRoom.isJoined()) {
joinedRoom.setOnInitialSyncCallback(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
callback.onSuccess(roomId);
}
});
} else {
// to initialise the notification counters
joinedRoom.markAllAsRead(null);
callback.onSuccess(roomId);
}
}
});
}
}
/**
* Send the read receipts to the latest room messages.
*
* @param rooms the rooms list
* @param callback the asynchronous callback
*/
public void markRoomsAsRead(final Collection<Room> rooms, final ApiCallback<Void> callback) {
if ((null == rooms) || (0 == rooms.size())) {
if (null != callback) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
callback.onSuccess(null);
}
});
}
return;
}
markRoomsAsRead(rooms.iterator(), callback);
}
/**
* Send the read receipts to the latest room messages.
*
* @param roomsIterator the rooms list iterator
* @param callback the asynchronous callback
*/
private void markRoomsAsRead(final Iterator roomsIterator, final ApiCallback<Void> callback) {
if (roomsIterator.hasNext()) {
Room room = (Room) roomsIterator.next();
boolean isRequestSent = false;
if (mNetworkConnectivityReceiver.isConnected()) {
isRequestSent = room.markAllAsRead(new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void anything) {
markRoomsAsRead(roomsIterator, callback);
}
});
} else {
// update the local data
room.sendReadReceipt();
}
if (!isRequestSent) {
markRoomsAsRead(roomsIterator, callback);
}
} else {
if (null != callback) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
callback.onSuccess(null);
}
});
}
}
}
/**
* Retrieve user matrix id from a 3rd party id.
*
* @param address the user id.
* @param media the media.
* @param callback the 3rd party callback
*/
public void lookup3Pid(String address, String media, final ApiCallback<String> callback) {
checkIfAlive();
mThirdPidRestClient.lookup3Pid(address, media, callback);
}
/**
* Retrieve user matrix id from a 3rd party id.
*
* @param addresses 3rd party ids
* @param mediums the medias.
* @param callback the 3rd parties callback
*/
public void lookup3Pids(List<String> addresses, List<String> mediums, ApiCallback<List<String>> callback) {
checkIfAlive();
mThirdPidRestClient.lookup3Pids(addresses, mediums, callback);
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param beforeLimit the number of events to get before the matching results.
* @param afterLimit the number of events to get after the matching results.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessageText(String text,
List<String> rooms,
int beforeLimit,
int afterLimit,
String nextBatch,
final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, rooms, beforeLimit, afterLimit, nextBatch, callback);
}
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessagesByText(String text, List<String> rooms, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, rooms, 0, 0, nextBatch, callback);
}
}
/**
* Perform a remote text search.
*
* @param text the text to search for.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMessagesByText(String text, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMessagesByText(text, null, 0, 0, nextBatch, callback);
}
}
/**
* Cancel any pending search request
*/
public void cancelSearchMessagesByText() {
checkIfAlive();
mEventsRestClient.cancelSearchMessagesByText();
}
/**
* Perform a remote text search for a dedicated media types list
*
* @param name the text to search for.
* @param rooms a list of rooms to search in. nil means all rooms the user is in.
* @param nextBatch the token to pass for doing pagination from a previous response.
* @param callback the request callback
*/
public void searchMediasByName(String name, List<String> rooms, String nextBatch, final ApiCallback<SearchResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchMediasByText(name, rooms, 0, 0, nextBatch, callback);
}
}
/**
* Cancel any pending file search request
*/
public void cancelSearchMediasByText() {
checkIfAlive();
mEventsRestClient.cancelSearchMediasByText();
}
/**
* Perform a remote users search by name / user id.
*
* @param name the text to search for.
* @param limit the maximum number of items to retrieve (can be null)
* @param userIdsFilter the user ids filter (can be null)
* @param callback the callback
*/
public void searchUsers(String name, Integer limit, Set<String> userIdsFilter, final ApiCallback<SearchUsersResponse> callback) {
checkIfAlive();
if (null != callback) {
mEventsRestClient.searchUsers(name, limit, userIdsFilter, callback);
}
}
/**
* Cancel any pending user search
*/
public void cancelUsersSearch() {
checkIfAlive();
mEventsRestClient.cancelUsersSearch();
}
/**
* Return the fulfilled active BingRule for the event.
*
* @param event the event
* @return the fulfilled bingRule
*/
public BingRule fulfillRule(Event event) {
checkIfAlive();
return mBingRulesManager.fulfilledBingRule(event);
}
/**
* @return true if the calls are supported
*/
public boolean isVoipCallSupported() {
if (null != mCallsManager) {
return mCallsManager.isSupported();
} else {
return false;
}
}
/**
* Get the list of rooms that are tagged the specified tag.
* The returned array is ordered according to the room tag order.
*
* @param tag RoomTag.ROOM_TAG_XXX values
* @return the rooms list.
*/
public List<Room> roomsWithTag(final String tag) {
final List<Room> taggedRooms = new ArrayList<>();
// sanity check
if (null == mDataHandler.getStore()) {
return taggedRooms;
}
if (!TextUtils.equals(tag, RoomTag.ROOM_TAG_NO_TAG)) {
final Collection<Room> rooms = mDataHandler.getStore().getRooms();
for (Room room : rooms) {
if (null != room.getAccountData().roomTag(tag)) {
taggedRooms.add(room);
}
}
if (taggedRooms.size() > 0) {
Collections.sort(taggedRooms, new RoomComparatorWithTag(tag));
}
} else {
final Collection<Room> rooms = mDataHandler.getStore().getRooms();
for (Room room : rooms) {
if (!room.getAccountData().hasTags()) {
taggedRooms.add(room);
}
}
}
return taggedRooms;
}
/**
* Get the list of roomIds that are tagged the specified tag.
* The returned array is ordered according to the room tag order.
*
* @param tag RoomTag.ROOM_TAG_XXX values
* @return the room IDs list.
*/
public List<String> roomIdsWithTag(final String tag) {
List<Room> roomsWithTag = roomsWithTag(tag);
List<String> roomIdsList = new ArrayList<>();
for (Room room : roomsWithTag) {
roomIdsList.add(room.getRoomId());
}
return roomIdsList;
}
/**
* Compute the tag order to use for a room tag so that the room will appear in the expected position
* in the list of rooms stamped with this tag.
*
* @param index the targeted index of the room in the list of rooms with the tag `tag`.
* @param originIndex the origin index. Integer.MAX_VALUE if there is none.
* @param tag the tag
* @return the tag order to apply to get the expected position.
*/
public Double tagOrderToBeAtIndex(int index, int originIndex, String tag) {
// Algo (and the [0.0, 1.0] assumption) inspired from matrix-react-sdk:
// We sort rooms by the lexicographic ordering of the 'order' metadata on their tags.
// For convenience, we calculate this for now a floating point number between 0.0 and 1.0.
Double orderA = 0.0; // by default we're next to the beginning of the list
Double orderB = 1.0; // by default we're next to the end of the list too
List<Room> roomsWithTag = roomsWithTag(tag);
if (roomsWithTag.size() > 0) {
// when an object is moved down, the index must be incremented
// because the object will be removed from the list to be inserted after its destination
if ((originIndex != Integer.MAX_VALUE) && (originIndex < index)) {
index++;
}
if (index > 0) {
// Bound max index to the array size
int prevIndex = (index < roomsWithTag.size()) ? index : roomsWithTag.size();
RoomTag prevTag = roomsWithTag.get(prevIndex - 1).getAccountData().roomTag(tag);
if (null == prevTag.mOrder) {
Log.e(LOG_TAG, "computeTagOrderForRoom: Previous room in sublist has no ordering metadata. This should never happen.");
} else {
orderA = prevTag.mOrder;
}
}
if (index <= roomsWithTag.size() - 1) {
RoomTag nextTag = roomsWithTag.get(index).getAccountData().roomTag(tag);
if (null == nextTag.mOrder) {
Log.e(LOG_TAG, "computeTagOrderForRoom: Next room in sublist has no ordering metadata. This should never happen.");
} else {
orderB = nextTag.mOrder;
}
}
}
return (orderA + orderB) / 2.0;
}
/**
* Toggles the direct chat status of a room.<br>
* Create a new direct chat room in the account data section if the room does not exist,
* otherwise the room is removed from the account data section.
* Direct chat room user ID choice algorithm:<br>
* 1- oldest joined room member
* 2- oldest invited room member
* 3- the user himself
*
* @param roomId the room roomId
* @param aParticipantUserId the participant user id
* @param callback the asynchronous callback
*/
public void toggleDirectChatRoom(final String roomId, String aParticipantUserId, final ApiCallback<Void> callback) {
IMXStore store = getDataHandler().getStore();
Room room = store.getRoom(roomId);
if (null != room) {
// if the room was not yet seen as direct chat
if (!getDataHandler().getDirectChatRoomIdsList().contains(roomId)) {
if (null == aParticipantUserId) {
room.getActiveMembersAsync(new SimpleApiCallback<List<RoomMember>>(callback) {
@Override
public void onSuccess(List<RoomMember> members) {
// should never happen but it was reported by a GA issue
if (members.isEmpty()) {
return;
}
RoomMember directChatMember = null;
if (members.size() > 1) {
// sort algo: oldest join first, then oldest invited
Collections.sort(members, new Comparator<RoomMember>() {
@Override
public int compare(RoomMember r1, RoomMember r2) {
int res;
long diff;
if (RoomMember.MEMBERSHIP_JOIN.equals(r2.membership) && RoomMember.MEMBERSHIP_INVITE.equals(r1.membership)) {
res = 1;
} else if (r2.membership.equals(r1.membership)) {
diff = r1.getOriginServerTs() - r2.getOriginServerTs();
res = (0 == diff) ? 0 : ((diff > 0) ? 1 : -1);
} else {
res = -1;
}
return res;
}
});
int nextIndexSearch = 0;
// take the oldest join member
if (!TextUtils.equals(members.get(0).getUserId(), getMyUserId())) {
if (RoomMember.MEMBERSHIP_JOIN.equals(members.get(0).membership)) {
directChatMember = members.get(0);
}
} else {
nextIndexSearch = 1;
if (RoomMember.MEMBERSHIP_JOIN.equals(members.get(1).membership)) {
directChatMember = members.get(1);
}
}
// no join member found, test the oldest join member
if (null == directChatMember) {
if (RoomMember.MEMBERSHIP_INVITE.equals(members.get(nextIndexSearch).membership)) {
directChatMember = members.get(nextIndexSearch);
}
}
}
// last option: get the logged user
if (null == directChatMember) {
directChatMember = members.get(0);
}
toggleDirectChatRoomStep2(roomId, directChatMember.getUserId(), callback);
}
});
} else {
toggleDirectChatRoomStep2(roomId, aParticipantUserId, callback);
}
} else {
Map<String, List<String>> params;
if (null != store.getDirectChatRoomsDict()) {
params = new HashMap<>(store.getDirectChatRoomsDict());
} else {
params = new HashMap<>();
}
// remove the current room from the direct chat list rooms
if (null != store.getDirectChatRoomsDict()) {
List<String> keysList = new ArrayList<>(params.keySet());
for (String key : keysList) {
List<String> roomIdsList = params.get(key);
if (roomIdsList.contains(roomId)) {
roomIdsList.remove(roomId);
if (roomIdsList.isEmpty()) {
// Remove this entry
params.remove(key);
}
}
}
} else {
// should not happen: if the room has to be removed, it means the room has been
// previously detected as being part of the listOfList
Log.e(LOG_TAG, "## toggleDirectChatRoom(): failed to remove a direct chat room (not seen as direct chat room)");
return;
}
// Store and upload the updated map
getDataHandler().setDirectChatRoomsMap(params, callback);
}
}
}
/**
* @param roomId
* @param chosenUserId
* @param callback
*/
private void toggleDirectChatRoomStep2(String roomId,
@NonNull String chosenUserId,
ApiCallback<Void> callback) {
IMXStore store = getDataHandler().getStore();
Map<String, List<String>> params;
if (null != store.getDirectChatRoomsDict()) {
params = new HashMap<>(store.getDirectChatRoomsDict());
} else {
params = new HashMap<>();
}
List<String> roomIdsList = new ArrayList<>();
// search if there is an entry with the same user
if (params.containsKey(chosenUserId)) {
roomIdsList = new ArrayList<>(params.get(chosenUserId));
}
roomIdsList.add(roomId); // update room list with the new room
params.put(chosenUserId, roomIdsList);
// Store and upload the updated map
getDataHandler().setDirectChatRoomsMap(params, callback);
}
/**
* Update the account password
*
* @param oldPassword the former account password
* @param newPassword the new account password
* @param callback the callback
*/
public void updatePassword(String oldPassword, String newPassword, ApiCallback<Void> callback) {
mProfileRestClient.updatePassword(getMyUserId(), oldPassword, newPassword, callback);
}
/**
* Reset the password to a new one.
*
* @param newPassword the new password
* @param threepid_creds the three pids.
* @param callback the callback
*/
public void resetPassword(final String newPassword, final Map<String, String> threepid_creds, final ApiCallback<Void> callback) {
mProfileRestClient.resetPassword(newPassword, threepid_creds, callback);
}
/**
* Triggers a request to update the userIds to ignore
*
* @param userIds the userIds to ignore
* @param callback the callback
*/
private void updateUsers(List<String> userIds, ApiCallback<Void> callback) {
Map<String, Object> ignoredUsersDict = new HashMap<>();
for (String userId : userIds) {
ignoredUsersDict.put(userId, new HashMap<>());
}
Map<String, Object> params = new HashMap<>();
params.put(AccountDataRestClient.ACCOUNT_DATA_KEY_IGNORED_USERS, ignoredUsersDict);
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_IGNORED_USER_LIST, params, callback);
}
/**
* Tells if an user is in the ignored user ids list
*
* @param userId the user id to test
* @return true if the user is ignored
*/
public boolean isUserIgnored(String userId) {
if (null != userId) {
return getDataHandler().getIgnoredUserIds().indexOf(userId) >= 0;
}
return false;
}
/**
* Ignore a list of users.
*
* @param userIds the user ids list to ignore
* @param callback the result callback
*/
public void ignoreUsers(List<String> userIds, ApiCallback<Void> callback) {
List<String> curUserIdsToIgnore = getDataHandler().getIgnoredUserIds();
List<String> userIdsToIgnore = new ArrayList<>(getDataHandler().getIgnoredUserIds());
// something to add
if ((null != userIds) && (userIds.size() > 0)) {
// add the new one
for (String userId : userIds) {
if (userIdsToIgnore.indexOf(userId) < 0) {
userIdsToIgnore.add(userId);
}
}
// some items have been added
if (curUserIdsToIgnore.size() != userIdsToIgnore.size()) {
updateUsers(userIdsToIgnore, callback);
}
}
}
/**
* Unignore a list of users.
*
* @param userIds the user ids list to unignore
* @param callback the result callback
*/
public void unIgnoreUsers(List<String> userIds, ApiCallback<Void> callback) {
List<String> curUserIdsToIgnore = getDataHandler().getIgnoredUserIds();
List<String> userIdsToIgnore = new ArrayList<>(getDataHandler().getIgnoredUserIds());
// something to add
if ((null != userIds) && (userIds.size() > 0)) {
// add the new one
for (String userId : userIds) {
userIdsToIgnore.remove(userId);
}
// some items have been added
if (curUserIdsToIgnore.size() != userIdsToIgnore.size()) {
updateUsers(userIdsToIgnore, callback);
}
}
}
/**
* @return the network receiver.
*/
public NetworkConnectivityReceiver getNetworkConnectivityReceiver() {
return mNetworkConnectivityReceiver;
}
/**
* Ask the home server if the lazy loading of room members is supported.
*
* @param callback the callback, to be notified if the server actually support the lazy loading. True if supported
*/
public void canEnableLazyLoading(final ApiCallback<Boolean> callback) {
// Check that the server support the lazy loading
mLoginRestClient.getVersions(new SimpleApiCallback<Versions>(callback) {
@Override
public void onSuccess(Versions info) {
// Check if we can enable lazyLoading
callback.onSuccess(VersionsUtil.supportLazyLoadMembers(info));
}
});
}
/**
* Invalidate the access token, so that it can no longer be used for authorization.
*
* @param context the application context
* @param callback the callback success and failure callback
*/
public void logout(final Context context, final ApiCallback<Void> callback) {
synchronized (this) {
if (!mIsAliveSession) {
Log.e(LOG_TAG, "## logout() was already called");
return;
}
mIsAliveSession = false;
}
// Clear crypto data
// For security and because it will be no more useful as we will get a new device id
// on the next log in
enableCrypto(false, null);
mLoginRestClient.logout(new ApiCallback<JsonObject>() {
private void clearData() {
// required else the clear won't be done
mIsAliveSession = true;
clear(context, new SimpleApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
@Override
public void onSuccess(JsonObject info) {
Log.d(LOG_TAG, "## logout() : succeed -> clearing the application data ");
clearData();
}
private void onError(String errorMessage) {
Log.e(LOG_TAG, "## logout() : failed " + errorMessage);
clearData();
}
@Override
public void onNetworkError(Exception e) {
onError(e.getMessage());
}
@Override
public void onMatrixError(MatrixError e) {
onError(e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
onError(e.getMessage());
}
});
}
/**
* Deactivate the account.
*
* @param context the application context
* @param type type of authentication
* @param userPassword current password
* @param eraseUserData true to also erase all the user data
* @param callback the success and failure callback
*/
public void deactivateAccount(final Context context,
final String type,
final String userPassword,
final boolean eraseUserData,
final ApiCallback<Void> callback) {
mProfileRestClient.deactivateAccount(type, getMyUserId(), userPassword, eraseUserData, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## deactivateAccount() : succeed -> clearing the application data ");
// Clear crypto data
// For security and because it will be no more useful as we will get a new device id
// on the next log in
enableCrypto(false, null);
clear(context, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
});
}
});
}
/**
* Update the URL preview status by default
*
* @param status the status
* @param callback
*/
public void setURLPreviewStatus(final boolean status, final ApiCallback<Void> callback) {
Map<String, Object> params = new HashMap<>();
params.put(AccountDataRestClient.ACCOUNT_DATA_KEY_URL_PREVIEW_DISABLE, !status);
Log.d(LOG_TAG, "## setURLPreviewStatus() : status " + status);
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_PREVIEW_URLS, params, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## setURLPreviewStatus() : succeeds");
getDataHandler().getStore().setURLPreviewEnabled(status);
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage(), e);
callback.onNetworkError(e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage());
callback.onMatrixError(e);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## setURLPreviewStatus() : failed " + e.getMessage(), e);
callback.onUnexpectedError(e);
}
});
}
/**
* Add user widget to the user Account Data
*
* @param params
* @param callback
*/
public void addUserWidget(final Map<String, Object> params, final ApiCallback<Void> callback) {
Log.d(LOG_TAG, "## addUserWidget()");
mAccountDataRestClient.setAccountData(getMyUserId(), AccountDataRestClient.ACCOUNT_DATA_TYPE_WIDGETS, params, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## addUserWidget() : succeeds");
getDataHandler().getStore().setUserWidgets(params);
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage(), e);
callback.onNetworkError(e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage());
callback.onMatrixError(e);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## addUserWidget() : failed " + e.getMessage(), e);
callback.onUnexpectedError(e);
}
});
}
/**
* Tells if the global URL preview settings is enabled
*
* @return true if it is enabled.
*/
public boolean isURLPreviewEnabled() {
return getDataHandler().getStore().isURLPreviewEnabled();
}
/**
* Get user widget from user AccountData
*
* @return
*/
public Map<String, Object> getUserWidgets() {
return getDataHandler().getStore().getUserWidgets();
}
//==============================================================================================================
// Crypto
//==============================================================================================================
/**
* The module that manages E2E encryption.
* Null if the feature is not enabled
*/
private MXCrypto mCrypto;
/**
* @return the crypto instance
*/
public MXCrypto getCrypto() {
return mCrypto;
}
/**
* @return true if the crypto is enabled
*/
public boolean isCryptoEnabled() {
return null != mCrypto;
}
/**
* enable encryption by default when launching the session
*/
private boolean mEnableCryptoWhenStartingMXSession = false;
/**
* Enable the crypto when initializing a new session.
*/
public void enableCryptoWhenStarting() {
mEnableCryptoWhenStartingMXSession = true;
}
/**
* Optional set of parameters used to configure/customize the e2e encryption
*/
@Nullable
private static MXCryptoConfig sCryptoConfig;
/**
* Define the set of parameters used to configure/customize the e2e encryption
* This configuration must be set before instantiating the session
*/
public static void setCryptoConfig(@Nullable MXCryptoConfig cryptoConfig) {
sCryptoConfig = cryptoConfig;
}
/**
* When the encryption is toogled, the room summaries must be updated
* to display the right messages.
*/
private void decryptRoomSummaries() {
if (null != getDataHandler().getStore()) {
Collection<RoomSummary> summaries = getDataHandler().getStore().getSummaries();
for (RoomSummary summary : summaries) {
mDataHandler.decryptEvent(summary.getLatestReceivedEvent(), null);
}
}
}
/**
* Check if the crypto engine is properly initialized.
* Launch it it is was not yet done.
*/
public void checkCrypto() {
MXFileCryptoStore fileCryptoStore = new MXFileCryptoStore(mEnableFileEncryption);
fileCryptoStore.initWithCredentials(mAppContent, mCredentials);
if ((fileCryptoStore.hasData() || mEnableCryptoWhenStartingMXSession) && (null == mCrypto)) {
boolean isStoreLoaded = false;
try {
// open the store
fileCryptoStore.open();
isStoreLoaded = true;
} catch (UnsatisfiedLinkError e) {
Log.e(LOG_TAG, "## checkCrypto() failed " + e.getMessage(), e);
}
if (!isStoreLoaded) {
// load again the olm manager
// reported by rageshake, it seems that the olm lib is unloaded.
mOlmManager = new OlmManager();
try {
// open the store
fileCryptoStore.open();
isStoreLoaded = true;
} catch (UnsatisfiedLinkError e) {
Log.e(LOG_TAG, "## checkCrypto() failed 2 " + e.getMessage(), e);
}
}
if (!isStoreLoaded) {
Log.e(LOG_TAG, "## checkCrypto() : cannot enable the crypto because of olm lib");
return;
}
mCrypto = new MXCrypto(MXSession.this, fileCryptoStore, sCryptoConfig);
mDataHandler.setCrypto(mCrypto);
// the room summaries are not stored with decrypted content
decryptRoomSummaries();
Log.d(LOG_TAG, "## checkCrypto() : the crypto engine is ready");
} else if (mDataHandler.getCrypto() != mCrypto) {
Log.e(LOG_TAG, "## checkCrypto() : the data handler crypto was not initialized");
mDataHandler.setCrypto(mCrypto);
}
}
/**
* Enable / disable the crypto.
*
* @param cryptoEnabled true to enable the crypto
* @param callback the asynchronous callback called when the action has been done
*/
public void enableCrypto(boolean cryptoEnabled, final ApiCallback<Void> callback) {
if (cryptoEnabled != isCryptoEnabled()) {
if (cryptoEnabled) {
Log.d(LOG_TAG, "Crypto is enabled");
MXFileCryptoStore fileCryptoStore = new MXFileCryptoStore(mEnableFileEncryption);
fileCryptoStore.initWithCredentials(mAppContent, mCredentials);
fileCryptoStore.open();
mCrypto = new MXCrypto(this, fileCryptoStore, sCryptoConfig);
mCrypto.start(true, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
decryptRoomSummaries();
if (null != callback) {
callback.onSuccess(null);
}
}
});
} else if (null != mCrypto) {
Log.d(LOG_TAG, "Crypto is disabled");
IMXCryptoStore store = mCrypto.mCryptoStore;
mCrypto.close();
store.deleteStore();
mCrypto = null;
mDataHandler.setCrypto(null);
decryptRoomSummaries();
if (null != callback) {
callback.onSuccess(null);
}
}
mDataHandler.setCrypto(mCrypto);
} else {
if (null != callback) {
callback.onSuccess(null);
}
}
}
/**
* Retrieves the devices list
*
* @param callback the asynchronous callback
*/
public void getDevicesList(ApiCallback<DevicesListResponse> callback) {
mCryptoRestClient.getDevices(callback);
}
/**
* Set a device name.
*
* @param deviceId the device id
* @param deviceName the device name
* @param callback the asynchronous callback
*/
public void setDeviceName(final String deviceId, final String deviceName, final ApiCallback<Void> callback) {
mCryptoRestClient.setDeviceName(deviceId, deviceName, callback);
}
/**
* Delete a device
*
* @param deviceId the device id
* @param password the passwoerd
* @param callback the asynchronous callback.
*/
public void deleteDevice(final String deviceId, final String password, final ApiCallback<Void> callback) {
mCryptoRestClient.deleteDevice(deviceId, new DeleteDeviceParams(), new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
// should never happen
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onMatrixError(MatrixError matrixError) {
Log.d(LOG_TAG, "## deleteDevice() : onMatrixError " + matrixError.getMessage());
RegistrationFlowResponse registrationFlowResponse = null;
// expected status code is 401
if ((null != matrixError.mStatus) && (matrixError.mStatus == 401)) {
try {
registrationFlowResponse = JsonUtils.toRegistrationFlowResponse(matrixError.mErrorBodyAsString);
} catch (Exception castExcept) {
Log.e(LOG_TAG, "## deleteDevice(): Received status 401 - Exception - JsonUtils.toRegistrationFlowResponse()", castExcept);
}
} else {
Log.d(LOG_TAG, "## deleteDevice(): Received not expected status 401 =" + matrixError.mStatus);
}
List<String> stages = new ArrayList<>();
// check if the server response can be casted
if ((null != registrationFlowResponse)
&& (null != registrationFlowResponse.flows)
&& !registrationFlowResponse.flows.isEmpty()) {
for (LoginFlow flow : registrationFlowResponse.flows) {
if (null != flow.stages) {
stages.addAll(flow.stages);
}
}
}
if (!stages.isEmpty()) {
DeleteDeviceParams params = new DeleteDeviceParams();
params.auth = new DeleteDeviceAuth();
params.auth.session = registrationFlowResponse.session;
params.auth.user = mCredentials.userId;
params.auth.password = password;
Log.d(LOG_TAG, "## deleteDevice() : supported stages " + stages);
deleteDevice(deviceId, params, stages, callback);
} else {
if (null != callback) {
callback.onMatrixError(matrixError);
}
}
}
});
}
/**
* Delete a device.
*
* @param deviceId the device id.
* @param params the delete device params
* @param stages the supported stages
* @param callback the asynchronous callback
*/
private void deleteDevice(final String deviceId, final DeleteDeviceParams params, final List<String> stages, final ApiCallback<Void> callback) {
// test the first one
params.auth.type = stages.get(0);
stages.remove(0);
mCryptoRestClient.deleteDevice(deviceId, params, new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void info) {
if (null != callback) {
callback.onSuccess(null);
}
}
@Override
public void onMatrixError(MatrixError matrixError) {
boolean has401Error = (null != matrixError.mStatus) && (matrixError.mStatus == 401);
// failed, try next flow type
if ((has401Error || TextUtils.equals(matrixError.errcode, MatrixError.FORBIDDEN) || TextUtils.equals(matrixError.errcode, MatrixError.UNKNOWN))
&& !stages.isEmpty()) {
deleteDevice(deviceId, params, stages, callback);
} else {
if (null != callback) {
callback.onMatrixError(matrixError);
}
}
}
});
}
/**
* Gets a bearer token from the homeserver that the user can
* present to a third party in order to prove their ownership
* of the Matrix account they are logged into.
*
* @param callback the asynchronous callback called when finished
*/
public void openIdToken(final ApiCallback<Map<Object, Object>> callback) {
mAccountDataRestClient.openIdToken(getMyUserId(), callback);
}
/**
* @return the groups manager
*/
public GroupsManager getGroupsManager() {
return mGroupsManager;
}
/* ==========================================================================================
* Builder
* ========================================================================================== */
public static class Builder {
private MXSession mxSession;
public Builder(HomeServerConnectionConfig hsConfig, MXDataHandler dataHandler, Context context) {
mxSession = new MXSession(hsConfig, dataHandler, context);
}
public Builder withFileEncryption(boolean enableFileEncryption) {
mxSession.mEnableFileEncryption = enableFileEncryption;
return this;
}
/**
* Create a pusher rest client, overriding the push server url if necessary
*
* @param pushServerUrl the push server url, or null or empty to use the default PushersRestClient
* @return this builder, to chain calls
*/
public Builder withPushServerUrl(@Nullable String pushServerUrl) {
// If not empty, create a special PushersRestClient
PushersRestClient pushersRestClient = null;
if (!TextUtils.isEmpty(pushServerUrl)) {
// pusher uses a custom server
try {
HomeServerConnectionConfig alteredHsConfig = new HomeServerConnectionConfig.Builder()
.withHomeServerUri(Uri.parse(pushServerUrl))
.withCredentials(mxSession.mHsConfig.getCredentials())
.build();
pushersRestClient = new PushersRestClient(alteredHsConfig);
} catch (Exception e) {
Log.e(LOG_TAG, "## withPushServerUrl() failed " + e.getMessage(), e);
}
}
if (null != pushersRestClient) {
// Replace the existing client
mxSession.mPushersRestClient = pushersRestClient;
}
return this;
}
/**
* Set the metrics listener of this session
*
* @param metricsListener the metrics listener
* @return this builder, to chain calls
*/
public Builder withMetricsListener(@Nullable MetricsListener metricsListener) {
mxSession.mMetricsListener = metricsListener;
return this;
}
/**
* Build the session
*
* @return the build session
*/
public MXSession build() {
return mxSession;
}
}
}
|
Ganfra review: split long method
|
matrix-sdk/src/main/java/org/matrix/androidsdk/MXSession.java
|
Ganfra review: split long method
|
|
Java
|
apache-2.0
|
fc991aae722497eb0f0659d8d42659911fb22486
| 0
|
opensingular/singular-core,opensingular/singular-core,opensingular/singular-core,opensingular/singular-core
|
/*
* Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensingular.lib.wicket.util.resource;
public enum Icone implements SingularIcon {
//@formatter:off
ARROW_DOWN("fa fa-arrow-down"),
BAN("icon-ban"),
BRIEFCASE("icon-briefcase"),
BUG("fa fa-bug"),
CALENDAR("icon-calendar"),
CAMERA("icon-camera"),
CARET_SQUARE("fa fa-caret-square-o-up"),
CHAIN("fa fa-chain"),
CHECK("fa fa-check"),
CHECK_CIRCLE("fa-check-circle-o"),
COGS("fa fa-cogs"),
COMMENT("fa fa-comment"),
CREDIT_CARD("icon-credit-card"),
CUP("icon-cup"),
DIRECTIONS("icon-directions"),
EXTERNAL_LINK("fa fa-external-link"),
EYE("icon-eye"),
FILE_POWERPOINT("fa fa-file-powerpoint-o"),
FILE_PDF("fa fa-file-pdf-o"),
FILE_TEXT("fa fa-file-text"),
GIFT("fa fa-gift"),
GLOBE("fa fa-globe"),
GRID("icon-grid"),
HEART("fa fa-heart"),
HISTORY("fa fa-history"),
HOME("icon-home"),
HOTEL("fa fa-h-square"),
HOURGLASS("icon-hourglass"),
INFO_CIRCLE("fa fa-info-circle"),
LIST("fa fa-list"),
LIST_ALT("fa fa-list-alt"),
LOCK("fa fa-lock"),
MAP_MARKER("fa fa-map-marker"),
MINUS("fa fa-minus"),
MONEY("fa fa-money"),
PENCIL_SQUARE("fa fa-pencil-square-o"),
PENCIL("fa fa-pencil"),
PIN("icon-pin"),
PIE("icon-pie-chart"),
PLUS("fa fa-plus"),
ROCKET("icon-rocket"),
REDO("icon-action-redo"),
REMOVE("fa fa-remove"),
SHARE_ALT("fa fa-share-alt"),
SHARE_SQUARE("fa fa-share-square-o"),
STAR("icon-star"),
SPEECH("icon-speech"),
SPEEDOMETER("icon-speedometer"),
TAG("icon-tag"),
TAGS("fa fa-tags"),
TIMES("fa fa-times"),
TRASH("fa fa-trash-o "),
UNDO("icon-action-undo"),
USER("fa fa-user"),
USERS("icon-users"),
USERS3("fa fa-users"),
VERTICAL_ELLIPSIS("fa fa-ellipsis-v"),
WALLET("icon-wallet"),
PUZZLE("icon-puzzle"),
FOLDER("icon-folder"),
WRENCH("icon-wrench"),
MAP("icon-map"),
NOTE("icon-note"),
DOCS("icon-docs"),
CLOCK("icon-clock"),
LAYERS("icon-layers"),
CODE("fa fa-code"),
HAND_UP("fa fa-hand-o-up"),
DASHBOARD("fa fa-dashboard"),
EXCLAMATION_TRIANGLE("fa fa-exclamation-triangle"),
MAGIC("fa fa-magic"),
CLONE("fa fa-clone"),
UPLOAD("fa fa-upload"),
BARCODE("fa fa-barcode"),
CALENDAR_PLUS_O("fa fa-calendar-plus-o"),
RECYCLE("fa fa-recycle"),
SEND_O("fa fa-send-o"),
COUNTRY("icon-globe"),
INBOX("fa fa-inbox"),
NEWSPAPER("fa fa-newspaper-o"),
WARNING("fa fa-warning"),
TASKS("fa fa-tasks"),
SEARCH("fa fa-search"),
SITEMAP("fa fa-sitemap")
;
private final String cssClass;
Icone(String cssClass) {
this.cssClass = cssClass;
}
@Override
public String getCssClass() {
return cssClass;
}
@Override
public String toString() {
return getCssClass();
}
}
|
lib/wicket-utils/src/main/java/org/opensingular/lib/wicket/util/resource/Icone.java
|
/*
* Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensingular.lib.wicket.util.resource;
public enum Icone implements SingularIcon {
//@formatter:off
ARROW_DOWN("fa fa-arrow-down"),
BAN("icon-ban"),
BRIEFCASE("icon-briefcase"),
BUG("fa fa-bug"),
CALENDAR("icon-calendar"),
CAMERA("icon-camera"),
CARET_SQUARE("fa fa-caret-square-o-up"),
CHAIN("fa fa-chain"),
CHECK("fa fa-check"),
CHECK_CIRCLE("fa-check-circle-o"),
COGS("fa fa-cogs"),
COMMENT("fa fa-comment"),
CREDIT_CARD("icon-credit-card"),
CUP("icon-cup"),
DIRECTIONS("icon-directions"),
EXTERNAL_LINK("fa fa-external-link"),
EYE("icon-eye"),
FILE_POWERPOINT("fa fa-file-powerpoint-o"),
FILE_PDF("fa fa-file-pdf-o"),
FILE_TEXT("fa fa-file-text"),
GIFT("fa fa-gift"),
GLOBE("fa fa-globe"),
GRID("icon-grid"),
HEART("fa fa-heart"),
HISTORY("fa fa-history"),
HOME("icon-home"),
HOTEL("fa fa-h-square"),
HOURGLASS("icon-hourglass"),
INFO_CIRCLE("fa fa-info-circle"),
LIST("fa fa-list"),
LIST_ALT("fa fa-list-alt"),
LOCK("fa fa-lock"),
MAP_MARKER("fa fa-map-marker"),
MINUS("fa fa-minus"),
MONEY("fa fa-money"),
PENCIL_SQUARE("fa fa-pencil-square-o"),
PENCIL("fa fa-pencil"),
PIN("icon-pin"),
PIE("icon-pie-chart"),
PLUS("fa fa-plus"),
ROCKET("icon-rocket"),
REDO("icon-action-redo"),
REMOVE("fa fa-remove"),
SHARE_ALT("fa fa-share-alt"),
SHARE_SQUARE("fa fa-share-square-o"),
STAR("icon-star"),
SPEECH("icon-speech"),
SPEEDOMETER("icon-speedometer"),
TAG("icon-tag"),
TAGS("fa fa-tags"),
TIMES("fa fa-times"),
TRASH("fa fa-trash-o "),
UNDO("icon-action-undo"),
USER("fa fa-user"),
USERS("icon-users"),
USERS3("fa fa-users"),
VERTICAL_ELLIPSIS("fa fa-ellipsis-v"),
WALLET("icon-wallet"),
PUZZLE("icon-puzzle"),
FOLDER("icon-folder"),
WRENCH("icon-wrench"),
MAP("icon-map"),
NOTE("icon-note"),
DOCS("icon-docs"),
CLOCK("icon-clock"),
LAYERS("icon-layers"),
CODE("fa fa-code"),
HAND_UP("fa fa-hand-o-up"),
DASHBOARD("fa fa-dashboard"),
EXCLAMATION_TRIANGLE("fa fa-exclamation-triangle"),
MAGIC("fa fa-magic"),
CLONE("fa fa-clone"),
UPLOAD("fa fa-upload"),
BARCODE("fa fa-barcode"),
CALENDAR_PLUS_O("fa fa-calendar-plus-o"),
RECYCLE("fa fa-recycle"),
SEND_O("fa fa-send-o"),
COUNTRY("icon-globe"),
INBOX("fa fa-inbox"),
NEWSPAPER("fa fa-newspaper-o"),
WARNING("fa fa-warning"),
TASKS("fa fa-tasks"),
SEARCH("fa fa-search")
;
private final String cssClass;
Icone(String cssClass) {
this.cssClass = cssClass;
}
@Override
public String getCssClass() {
return cssClass;
}
@Override
public String toString() {
return getCssClass();
}
}
|
Adicionando novos icones
|
lib/wicket-utils/src/main/java/org/opensingular/lib/wicket/util/resource/Icone.java
|
Adicionando novos icones
|
|
Java
|
apache-2.0
|
ad4902eca7cf3c3342ff33d03c042b6961e4a936
| 0
|
osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid
|
package org.osmdroid.tileprovider.modules;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteFullException;
import android.graphics.drawable.Drawable;
import android.util.Log;
import org.osmdroid.api.IMapView;
import org.osmdroid.config.Configuration;
import org.osmdroid.tileprovider.ExpirableBitmapDrawable;
import org.osmdroid.tileprovider.MapTile;
import org.osmdroid.tileprovider.constants.OpenStreetMapTileProviderConstants;
import org.osmdroid.tileprovider.tilesource.ITileSource;
import org.osmdroid.tileprovider.util.Counters;
import org.osmdroid.tileprovider.util.StreamUtils;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_PROVIDER;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_TILE;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_KEY;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.TABLE;
/**
* An implementation of {@link IFilesystemCache} based on the original TileWriter. It writes tiles to a sqlite database cache.
* It supports expiration timestamps if provided by the server from which the tile was downloaded. Trimming
* of expired
* <p>
* If the database exceeds {@link Configuration#getInstance()#getTileFileSystemCacheTrimBytes()}
* cache exceeds 600 Mb then it will be trimmed to 500 Mb by deleting files that expire first.
* @see DatabaseFileArchive
* @see SqliteArchiveTileWriter
* @author Alex O'Ree
* @since 5.1
*/
public class SqlTileWriter implements IFilesystemCache {
public static final String DATABASE_FILENAME = "cache.db";
public static final String COLUMN_EXPIRES ="expires";
/**
* disables cache purge of expired tiled on start up
* if this is set to false, the database will only purge tiles if manually called or if
* the storage device runs out of space.
*
* expired tiles will continue to be overwritten as new versions are downloaded regardless
*
* @since 5.6
*/
public static boolean CLEANUP_ON_START=true;
protected File db_file;
protected SQLiteDatabase db;
protected long lastSizeCheck=0;
/**
* mean tile size computed on first use.
* Sizes are quite variable and a significant underestimate will result in too many tiles being purged.
*/
long tileSize=0l;
static boolean hasInited=false;
public SqlTileWriter() {
Configuration.getInstance().getOsmdroidTileCache().mkdirs();
db_file = new File(Configuration.getInstance().getOsmdroidTileCache().getAbsolutePath() + File.separator + DATABASE_FILENAME);
try {
db = SQLiteDatabase.openOrCreateDatabase(db_file, null);
db.execSQL("CREATE TABLE IF NOT EXISTS " + TABLE + " (" + DatabaseFileArchive.COLUMN_KEY + " INTEGER , " + DatabaseFileArchive.COLUMN_PROVIDER + " TEXT, " + DatabaseFileArchive.COLUMN_TILE + " BLOB, " + COLUMN_EXPIRES +" INTEGER, PRIMARY KEY (" + DatabaseFileArchive.COLUMN_KEY + ", " + DatabaseFileArchive.COLUMN_PROVIDER + "));");
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to start the sqlite tile writer. Check external storage availability.", ex);
}
if (!hasInited) {
hasInited = true;
if (CLEANUP_ON_START) {
// do this in the background because it takes a long time
final Thread t = new Thread() {
@Override
public void run() {
runCleanupOperation();
}
};
t.setPriority(Thread.MIN_PRIORITY);
t.start();
}
}
}
/**
* this could be a long running operation, don't run on the UI thread unless necessary.
* This function prunes the database for old or expired tiles.
*
* @since 5.6
*/
public void runCleanupOperation() {
if (db == null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Finished init thread, aborted due to null database reference");
}
return;
}
try {
if (db_file.length() > Configuration.getInstance().getTileFileSystemCacheMaxBytes()) {
//run the reaper (remove all old expired tiles)
//keep if now is < expiration date
//delete if now is > expiration date
long now = System.currentTimeMillis();
//this part will nuke all expired tiles, not super useful if you're offline
//int rows = db.delete(TABLE, "expires < ?", new String[]{System.currentTimeMillis() + ""});
//Log.d(IMapView.LOGTAG, "Local storage cache purged " + rows + " expired tiles in " + (System.currentTimeMillis() - now) + "ms, cache size is " + db_file.length() + "bytes");
//attempt to trim the database
//note, i considered adding a looping mechanism here but sqlite can behave differently
//i.e. there's no guarantee that the database file size shrinks immediately.
Log.i(IMapView.LOGTAG, "Local cache is now " + db_file.length() + " max size is " + Configuration.getInstance().getTileFileSystemCacheMaxBytes());
long diff = db_file.length() - Configuration.getInstance().getTileFileSystemCacheTrimBytes();
if (tileSize == 0l) {
long count = getRowCount(null);
tileSize = count > 0l ? db_file.length() / count : 4000;
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Number of cached tiles is " + count + ", mean size is " + tileSize);
}
}
long tilesToKill = diff / tileSize;
Log.d(IMapView.LOGTAG, "Local cache purging " + tilesToKill + " tiles.");
if (tilesToKill > 0)
try {
db.execSQL("DELETE FROM " + TABLE + " WHERE " + COLUMN_KEY + " in (SELECT " + COLUMN_KEY + " FROM " + TABLE + " ORDER BY " + COLUMN_EXPIRES + " DESC LIMIT " + tilesToKill + ")");
} catch (Throwable t) {
Log.e(IMapView.LOGTAG, "error purging tiles from the tile cache", t);
}
Log.d(IMapView.LOGTAG, "purge completed in " + (System.currentTimeMillis() - now) + "ms, cache size is " + db_file.length() + " bytes");
}
} catch (Exception ex) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "SqliteTileWriter init thread crash, db is probably not available", ex);
}
}
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Finished init thread");
}
}
@Override
public boolean saveFile(final ITileSource pTileSourceInfo, final MapTile pTile, final InputStream pStream) {
if (db == null || !db.isOpen()) {
Log.d(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + ", database not available.");
Counters.fileCacheSaveErrors++;
return false;
}
try {
ContentValues cv = new ContentValues();
final long index = getIndex(pTile);
cv.put(DatabaseFileArchive.COLUMN_PROVIDER, pTileSourceInfo.name());
BufferedInputStream bis = new BufferedInputStream(pStream);
List<Byte> list = new ArrayList<Byte>();
//ByteArrayBuffer baf = new ByteArrayBuffer(500);
int current = 0;
while ((current = bis.read()) != -1) {
list.add((byte) current);
}
byte[] bits = new byte[list.size()];
for (int i = 0; i < list.size(); i++) {
bits[i] = list.get(i);
}
cv.put(DatabaseFileArchive.COLUMN_KEY, index);
cv.put(DatabaseFileArchive.COLUMN_TILE, bits);
//this shouldn't happen, but just in case
if (pTile.getExpires() != null)
cv.put(COLUMN_EXPIRES, pTile.getExpires().getTime());
db.delete(TABLE, primaryKey, getPrimaryKeyParameters(index, pTileSourceInfo));
db.insert(TABLE, null, cv);
if (Configuration.getInstance().isDebugMode())
Log.d(IMapView.LOGTAG, "tile inserted " + pTileSourceInfo.name() + pTile.toString());
if (System.currentTimeMillis() > lastSizeCheck + 300000){
lastSizeCheck = System.currentTimeMillis();
if (db_file!=null && db_file.length() > Configuration.getInstance().getTileFileSystemCacheMaxBytes()) {
runCleanupOperation();
}
}
} catch (SQLiteFullException ex) {
//the drive is full! trigger the clean up operation
//may want to consider reducing the trim size automagically
runCleanupOperation();
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + " db is " + (db == null ? "null" : "not null"), ex);
Counters.fileCacheSaveErrors++;
}
return false;
}
/**
* Returns true if the given tile source and tile coordinates exist in the cache
*
* @param pTileSource
* @param pTile
* @return
* @since 5.6
*/
public boolean exists(String pTileSource, MapTile pTile) {
if (db == null || !db.isOpen()) {
Log.d(IMapView.LOGTAG, "Unable to test for tile exists cached tile from " + pTileSource + " " + pTile.toString() + ", database not available.");
return false;
}
try {
final long index = getIndex(pTile);
final Cursor cur = getTileCursor(getPrimaryKeyParameters(index, pTileSource), expireQueryColumn);
if (cur.getCount() != 0) {
cur.close();
return true;
}
cur.close();
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSource + " " + pTile.toString(), ex);
}
return false;
}
/**
* Returns true if the given tile source and tile coordinates exist in the cache
*
* @param pTileSource
* @param pTile
* @return
* @since 5.6
*/
@Override
public boolean exists(ITileSource pTileSource, MapTile pTile) {
return exists(pTileSource.name(), pTile);
}
@Override
public void onDetach() {
if (db != null && db.isOpen()) {
try {
db.close();
Log.i(IMapView.LOGTAG, "Database detached");
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Database detach failed",ex);
}
}
db = null;
db_file = null;
}
/**
* purges and deletes everything from the cache database
*
* @return
* @since 5.6
*/
public boolean purgeCache() {
if (db != null && db.isOpen()) {
try {
db.delete(TABLE, null, null);
return true;
} catch (final Throwable e) {
Log.w(IMapView.LOGTAG, "Error purging the db", e);
}
}
return false;
}
/**
* purges and deletes all tiles from the given tile source name from the cache database
*
* @return
* @since 5.6.1
*/
public boolean purgeCache(String mTileSourceName) {
if (db != null && db.isOpen()) {
try {
db.delete(TABLE, COLUMN_PROVIDER + " = ?", new String[]{mTileSourceName});
return true;
} catch (final Throwable e) {
Log.w(IMapView.LOGTAG, "Error purging the db", e);
}
}
return false;
}
/**
* a helper method to import file system stored map tiles into the sql tile cache
* on successful import, the tiles are removed from the file system.
* <p>
* This can take a long time, so consider running this off of the main thread.
*
* @return
*/
public int[] importFromFileCache(boolean removeFromFileSystem) {
int[] ret = new int[]{0, 0, 0, 0};
//inserts
//insert failures
//deletes
//delete failures
File tilePathBase = Configuration.getInstance().getOsmdroidTileCache();
if (tilePathBase.exists()) {
File[] tileSources = tilePathBase.listFiles();
if (tileSources != null) {
for (int i = 0; i < tileSources.length; i++) {
if (tileSources[i].isDirectory() && !tileSources[i].isHidden()) {
//proceed
File[] z = tileSources[i].listFiles();
if (z != null)
for (int zz = 0; zz < z.length; zz++) {
if (z[zz].isDirectory() && !z[zz].isHidden()) {
File[] x = z[zz].listFiles();
if (x != null)
for (int xx = 0; xx < x.length; xx++) {
if (x[xx].isDirectory() && !x[xx].isHidden()) {
File[] y = x[xx].listFiles();
if (x != null)
for (int yy = 0; yy < y.length; yy++) {
if (!y[yy].isHidden() && !y[yy].isDirectory()) {
try {
ContentValues cv = new ContentValues();
final long x1 = Long.parseLong(x[xx].getName());
final long y1 = Long.parseLong(y[yy].getName().substring(0, y[yy].getName().indexOf(".")));
final long z1 = Long.parseLong(z[zz].getName());
final long index = getIndex(x1, y1, z1);
cv.put(DatabaseFileArchive.COLUMN_PROVIDER, tileSources[i].getName());
if (!exists(tileSources[i].getName(), new MapTile((int) z1, (int) x1, (int) y1))) {
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(y[yy]));
List<Byte> list = new ArrayList<Byte>();
//ByteArrayBuffer baf = new ByteArrayBuffer(500);
int current = 0;
while ((current = bis.read()) != -1) {
list.add((byte) current);
}
byte[] bits = new byte[list.size()];
for (int bi = 0; bi < list.size(); bi++) {
bits[bi] = list.get(bi);
}
cv.put(DatabaseFileArchive.COLUMN_KEY, index);
cv.put(DatabaseFileArchive.COLUMN_TILE, bits);
long insert = db.insert(TABLE, null, cv);
if (insert > 0) {
if (Configuration.getInstance().isDebugMode())
Log.d(IMapView.LOGTAG, "tile inserted " + tileSources[i].getName() + "/" + z1 + "/" + x1 + "/" + y1);
ret[0]++;
if (removeFromFileSystem) {
try {
y[yy].delete();
ret[2]++;
;
} catch (Exception ex) {
ret[3]++;
;
}
}
} else {
Log.w(IMapView.LOGTAG, "tile NOT inserted " + tileSources[i].getName() + "/" + z1 + "/" + x1 + "/" + y1);
}
}
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + tileSources[i].getName() + " db is " + (db == null ? "null" : "not null"), ex);
ret[1]++;
}
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
x[xx].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + x[xx].getAbsolutePath(), ex);
ret[3]++;
}
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
z[zz].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + z[zz].getAbsolutePath(), ex);
ret[3]++;
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
tileSources[i].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + tileSources[i].getAbsolutePath(), ex);
ret[3]++;
}
}
} else {
//it's a file, nothing for us to do here
}
}
}
}
return ret;
}
/**
* Removes a specific tile from the cache
*
* @param pTileSourceInfo
* @param pTile
* @return
* @since 5.6
*/
@Override
public boolean remove(final ITileSource pTileSourceInfo, final MapTile pTile) {
if (db == null) {
Log.d(IMapView.LOGTAG, "Unable to delete cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + ", database not available.");
Counters.fileCacheSaveErrors++;
return false;
}
try {
final long index = getIndex(pTile);
db.delete(DatabaseFileArchive.TABLE, primaryKey, getPrimaryKeyParameters(index, pTileSourceInfo));
return true;
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to delete cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + " db is " + (db == null ? "null" : "not null"), ex);
Counters.fileCacheSaveErrors++;
}
return false;
}
/**
* Returns the number of tiles in the cache for the specified tile source name
*
* @param tileSourceName
* @return
* @since 5.6
*/
public long getRowCount(String tileSourceName) {
try {
Cursor mCount = null;
if (tileSourceName == null)
mCount = db.rawQuery("select count(*) from " + TABLE, null);
else
mCount = db.rawQuery("select count(*) from " + TABLE + " where " + COLUMN_PROVIDER + "='" + tileSourceName + "'", null);
mCount.moveToFirst();
long count = mCount.getLong(0);
mCount.close();
return count;
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to query for row count " + tileSourceName, ex);
}
return 0;
}
/**
* Returns the size of the database file in bytes.
*/
public long getSize() {
return db_file.length();
}
/**
* Returns the expiry time of the tile that expires first.
*/
public long getFirstExpiry() {
try {
Cursor cursor = db.rawQuery("select min(" + COLUMN_EXPIRES + ") from " + TABLE, null);
cursor.moveToFirst();
long time = cursor.getLong(0);
cursor.close();
return time;
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to query for oldest tile", ex);
}
return 0;
}
/**
*
* @since 5.6.5
* @param pX
* @param pY
* @param pZ
* @return
*/
public static long getIndex(final long pX, final long pY, final long pZ) {
return ((pZ << pZ) + pX << pZ) + pY;
}
/**
* Gets the single column index value for a map tile
*
* @since 5.6.5
* @param pTile
* @return
*/
public static long getIndex(final MapTile pTile) {
return getIndex(pTile.getX(), pTile.getY(), pTile.getZoomLevel());
}
@Override
public Long getExpirationTimestamp(final ITileSource pTileSource, final MapTile pTile) {
Cursor cursor = null;
try {
cursor = getTileCursor(getPrimaryKeyParameters(getIndex(pTile), pTileSource), expireQueryColumn);
while(cursor.moveToNext()) {
return cursor.getLong(0);
}
} catch (Throwable t) {
Log.e(IMapView.LOGTAG, "error getting expiration date from the tile cache", t);
} finally {
if (cursor != null) {
cursor.close();
}
}
return null;
}
/**
* @since 5.6.5
*/
private static final String primaryKey = DatabaseFileArchive.COLUMN_KEY + "=? and " + DatabaseFileArchive.COLUMN_PROVIDER + "=?";
public static String getPrimaryKey() {
return primaryKey;
}
/**
*
* @since 5.6.5
* @param pIndex
* @param pTileSourceInfo
* @return
*/
public static String[] getPrimaryKeyParameters(final long pIndex, final ITileSource pTileSourceInfo) {
return getPrimaryKeyParameters(pIndex, pTileSourceInfo.name());
}
/**
*
* @since 5.6.5
* @param pIndex
* @param pTileSourceInfo
* @return
*/
public static String[] getPrimaryKeyParameters(final long pIndex, final String pTileSourceInfo) {
return new String[]{String.valueOf(pIndex), pTileSourceInfo};
}
/**
*
* @since 5.6.5
* @param pPrimaryKeyParameters
* @param pColumns
* @return
*/
public Cursor getTileCursor(final String[] pPrimaryKeyParameters, final String[] pColumns) {
return db.query(DatabaseFileArchive.TABLE, pColumns, primaryKey, pPrimaryKeyParameters, null, null, null);
}
/**
* For optimization reasons
* @since 5.6.5
*/
private static final String[] queryColumns = {DatabaseFileArchive.COLUMN_TILE, SqlTileWriter.COLUMN_EXPIRES};
/**
* For optimization reasons
* @since 5.6.5
*/
private static final String[] expireQueryColumn = {SqlTileWriter.COLUMN_EXPIRES};
@Override
public Drawable loadTile(final ITileSource pTileSource, final MapTile pTile) throws Exception{
InputStream inputStream = null;
try {
final long index = getIndex(pTile);
final Cursor cur = getTileCursor(getPrimaryKeyParameters(index, pTileSource), queryColumns);
byte[] bits=null;
long expirationTimestamp=0;
if(cur.getCount() != 0) {
cur.moveToFirst();
bits = cur.getBlob(cur.getColumnIndex(DatabaseFileArchive.COLUMN_TILE));
expirationTimestamp = cur.getLong(cur.getColumnIndex(SqlTileWriter.COLUMN_EXPIRES));
}
cur.close();
if (bits==null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG,"SqlCache - Tile doesn't exist: " +pTileSource.name() + pTile);
}
return null;
}
inputStream = new ByteArrayInputStream(bits);
final Drawable drawable = pTileSource.getDrawable(inputStream);
// Check to see if file has expired
final long now = System.currentTimeMillis();
final boolean fileExpired = expirationTimestamp < now;
if (fileExpired && drawable != null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG,"Tile expired: " + pTileSource.name() +pTile);
}
ExpirableBitmapDrawable.setState(drawable, ExpirableBitmapDrawable.EXPIRED);
}
return drawable;
} finally {
if (inputStream != null) {
StreamUtils.closeStream(inputStream);
}
}
}
}
|
osmdroid-android/src/main/java/org/osmdroid/tileprovider/modules/SqlTileWriter.java
|
package org.osmdroid.tileprovider.modules;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteFullException;
import android.graphics.drawable.Drawable;
import android.util.Log;
import org.osmdroid.api.IMapView;
import org.osmdroid.config.Configuration;
import org.osmdroid.tileprovider.ExpirableBitmapDrawable;
import org.osmdroid.tileprovider.MapTile;
import org.osmdroid.tileprovider.constants.OpenStreetMapTileProviderConstants;
import org.osmdroid.tileprovider.tilesource.ITileSource;
import org.osmdroid.tileprovider.util.Counters;
import org.osmdroid.tileprovider.util.StreamUtils;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_PROVIDER;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_TILE;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.COLUMN_KEY;
import static org.osmdroid.tileprovider.modules.DatabaseFileArchive.TABLE;
/**
* An implementation of {@link IFilesystemCache} based on the original TileWriter. It writes tiles to a sqlite database cache.
* It supports expiration timestamps if provided by the server from which the tile was downloaded. Trimming
* of expired
* <p>
* If the database exceeds {@link Configuration#getInstance()#getTileFileSystemCacheTrimBytes()}
* cache exceeds 600 Mb then it will be trimmed to 500 Mb by deleting files that expire first.
* @see DatabaseFileArchive
* @see SqliteArchiveTileWriter
* @author Alex O'Ree
* @since 5.1
*/
public class SqlTileWriter implements IFilesystemCache {
public static final String DATABASE_FILENAME = "cache.db";
public static final String COLUMN_EXPIRES ="expires";
/**
* disables cache purge of expired tiled on start up
* if this is set to false, the database will only purge tiles if manually called or if
* the storage device runs out of space.
*
* expired tiles will continue to be overwritten as new versions are downloaded regardless
*
* @since 5.6
*/
public static boolean CLEANUP_ON_START=true;
protected File db_file;
protected SQLiteDatabase db;
protected long lastSizeCheck=0;
/**
* mean tile size computed on first use.
* Sizes are quite variable and a significant underestimate will result in too many tiles being purged.
*/
long tileSize=0l;
static boolean hasInited=false;
public SqlTileWriter() {
Configuration.getInstance().getOsmdroidTileCache().mkdirs();
db_file = new File(Configuration.getInstance().getOsmdroidTileCache().getAbsolutePath() + File.separator + DATABASE_FILENAME);
try {
db = SQLiteDatabase.openOrCreateDatabase(db_file, null);
db.execSQL("CREATE TABLE IF NOT EXISTS " + TABLE + " (" + DatabaseFileArchive.COLUMN_KEY + " INTEGER , " + DatabaseFileArchive.COLUMN_PROVIDER + " TEXT, " + DatabaseFileArchive.COLUMN_TILE + " BLOB, " + COLUMN_EXPIRES +" INTEGER, PRIMARY KEY (" + DatabaseFileArchive.COLUMN_KEY + ", " + DatabaseFileArchive.COLUMN_PROVIDER + "));");
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to start the sqlite tile writer. Check external storage availability.", ex);
}
if (!hasInited) {
hasInited = true;
if (CLEANUP_ON_START) {
// do this in the background because it takes a long time
final Thread t = new Thread() {
@Override
public void run() {
runCleanupOperation();
}
};
t.setPriority(Thread.MIN_PRIORITY);
t.start();
}
}
}
/**
* this could be a long running operation, don't run on the UI thread unless necessary.
* This function prunes the database for old or expired tiles.
*
* @since 5.6
*/
public void runCleanupOperation() {
if (db == null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Finished init thread, aborted due to null database reference");
}
return;
}
try {
if (db_file.length() > Configuration.getInstance().getTileFileSystemCacheMaxBytes()) {
//run the reaper (remove all old expired tiles)
//keep if now is < expiration date
//delete if now is > expiration date
long now = System.currentTimeMillis();
//this part will nuke all expired tiles, not super useful if you're offline
//int rows = db.delete(TABLE, "expires < ?", new String[]{System.currentTimeMillis() + ""});
//Log.d(IMapView.LOGTAG, "Local storage cache purged " + rows + " expired tiles in " + (System.currentTimeMillis() - now) + "ms, cache size is " + db_file.length() + "bytes");
//attempt to trim the database
//note, i considered adding a looping mechanism here but sqlite can behave differently
//i.e. there's no guarantee that the database file size shrinks immediately.
Log.i(IMapView.LOGTAG, "Local cache is now " + db_file.length() + " max size is " + Configuration.getInstance().getTileFileSystemCacheMaxBytes());
long diff = db_file.length() - Configuration.getInstance().getTileFileSystemCacheTrimBytes();
if (tileSize == 0l) {
long count = getRowCount(null);
tileSize = count > 0l ? db_file.length() / count : 4000;
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Number of cached tiles is " + count + ", mean size is " + tileSize);
}
}
long tilesToKill = diff / tileSize;
Log.d(IMapView.LOGTAG, "Local cache purging " + tilesToKill + " tiles.");
if (tilesToKill > 0)
try {
db.execSQL("DELETE FROM " + TABLE + " WHERE " + COLUMN_KEY + " in (SELECT " + COLUMN_KEY + " FROM " + TABLE + " ORDER BY " + COLUMN_EXPIRES + " DESC LIMIT " + tilesToKill + ")");
} catch (Throwable t) {
Log.e(IMapView.LOGTAG, "error purging tiles from the tile cache", t);
}
Log.d(IMapView.LOGTAG, "purge completed in " + (System.currentTimeMillis() - now) + "ms, cache size is " + db_file.length() + " bytes");
}
} catch (Exception ex) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "SqliteTileWriter init thread crash, db is probably not available", ex);
}
}
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Finished init thread");
}
}
@Override
public boolean saveFile(final ITileSource pTileSourceInfo, final MapTile pTile, final InputStream pStream) {
if (db == null || !db.isOpen()) {
Log.d(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + ", database not available.");
Counters.fileCacheSaveErrors++;
return false;
}
try {
ContentValues cv = new ContentValues();
final long index = getIndex(pTile);
cv.put(DatabaseFileArchive.COLUMN_PROVIDER, pTileSourceInfo.name());
BufferedInputStream bis = new BufferedInputStream(pStream);
List<Byte> list = new ArrayList<Byte>();
//ByteArrayBuffer baf = new ByteArrayBuffer(500);
int current = 0;
while ((current = bis.read()) != -1) {
list.add((byte) current);
}
byte[] bits = new byte[list.size()];
for (int i = 0; i < list.size(); i++) {
bits[i] = list.get(i);
}
cv.put(DatabaseFileArchive.COLUMN_KEY, index);
cv.put(DatabaseFileArchive.COLUMN_TILE, bits);
//this shouldn't happen, but just in case
if (pTile.getExpires() != null)
cv.put(COLUMN_EXPIRES, pTile.getExpires().getTime());
db.delete(TABLE, primaryKey, getPrimaryKeyParameters(index, pTileSourceInfo));
db.insert(TABLE, null, cv);
if (Configuration.getInstance().isDebugMode())
Log.d(IMapView.LOGTAG, "tile inserted " + pTileSourceInfo.name() + pTile.toString());
if (System.currentTimeMillis() > lastSizeCheck + 300000){
lastSizeCheck = System.currentTimeMillis();
if (db_file!=null && db_file.length() > Configuration.getInstance().getTileFileSystemCacheMaxBytes()) {
runCleanupOperation();
}
}
} catch (SQLiteFullException ex) {
//the drive is full! trigger the clean up operation
//may want to consider reducing the trim size automagically
runCleanupOperation();
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + " db is " + (db == null ? "null" : "not null"), ex);
Counters.fileCacheSaveErrors++;
}
return false;
}
/**
* Returns true if the given tile source and tile coordinates exist in the cache
*
* @param pTileSource
* @param pTile
* @return
* @since 5.6
*/
public boolean exists(String pTileSource, MapTile pTile) {
if (db == null || !db.isOpen()) {
Log.d(IMapView.LOGTAG, "Unable to test for tile exists cached tile from " + pTileSource + " " + pTile.toString() + ", database not available.");
return false;
}
try {
final long index = getIndex(pTile);
final Cursor cur = getTileCursor(getPrimaryKeyParameters(index, pTileSource), expireQueryColumn);
if (cur.getCount() != 0) {
cur.close();
return true;
}
cur.close();
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + pTileSource + " " + pTile.toString(), ex);
}
return false;
}
/**
* Returns true if the given tile source and tile coordinates exist in the cache
*
* @param pTileSource
* @param pTile
* @return
* @since 5.6
*/
@Override
public boolean exists(ITileSource pTileSource, MapTile pTile) {
return exists(pTileSource.name(), pTile);
}
@Override
public void onDetach() {
if (db != null && db.isOpen()) {
try {
db.close();
Log.i(IMapView.LOGTAG, "Database detached");
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Database detach failed",ex);
}
}
db = null;
db_file = null;
}
/**
* purges and deletes everything from the cache database
*
* @return
* @since 5.6
*/
public boolean purgeCache() {
if (db != null && db.isOpen()) {
try {
db.delete(TABLE, null, null);
return true;
} catch (final Throwable e) {
Log.w(IMapView.LOGTAG, "Error purging the db", e);
}
}
return false;
}
/**
* purges and deletes all tiles from the given tile source name from the cache database
*
* @return
* @since 5.6.1
*/
public boolean purgeCache(String mTileSourceName) {
if (db != null && db.isOpen()) {
try {
db.delete(TABLE, COLUMN_PROVIDER + " = ?", new String[]{mTileSourceName});
return true;
} catch (final Throwable e) {
Log.w(IMapView.LOGTAG, "Error purging the db", e);
}
}
return false;
}
/**
* a helper method to import file system stored map tiles into the sql tile cache
* on successful import, the tiles are removed from the file system.
* <p>
* This can take a long time, so consider running this off of the main thread.
*
* @return
*/
public int[] importFromFileCache(boolean removeFromFileSystem) {
int[] ret = new int[]{0, 0, 0, 0};
//inserts
//insert failures
//deletes
//delete failures
File tilePathBase = Configuration.getInstance().getOsmdroidTileCache();
if (tilePathBase.exists()) {
File[] tileSources = tilePathBase.listFiles();
if (tileSources != null) {
for (int i = 0; i < tileSources.length; i++) {
if (tileSources[i].isDirectory() && !tileSources[i].isHidden()) {
//proceed
File[] z = tileSources[i].listFiles();
if (z != null)
for (int zz = 0; zz < z.length; zz++) {
if (z[zz].isDirectory() && !z[zz].isHidden()) {
File[] x = z[zz].listFiles();
if (x != null)
for (int xx = 0; xx < x.length; xx++) {
if (x[xx].isDirectory() && !x[xx].isHidden()) {
File[] y = x[xx].listFiles();
if (x != null)
for (int yy = 0; yy < y.length; yy++) {
if (!y[yy].isHidden() && !y[yy].isDirectory()) {
try {
ContentValues cv = new ContentValues();
final long x1 = Long.parseLong(x[xx].getName());
final long y1 = Long.parseLong(y[yy].getName().substring(0, y[yy].getName().indexOf(".")));
final long z1 = Long.parseLong(z[zz].getName());
final long index = getIndex(x1, y1, z1);
cv.put(DatabaseFileArchive.COLUMN_PROVIDER, tileSources[i].getName());
if (!exists(tileSources[i].getName(), new MapTile((int) z1, (int) x1, (int) y1))) {
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(y[yy]));
List<Byte> list = new ArrayList<Byte>();
//ByteArrayBuffer baf = new ByteArrayBuffer(500);
int current = 0;
while ((current = bis.read()) != -1) {
list.add((byte) current);
}
byte[] bits = new byte[list.size()];
for (int bi = 0; bi < list.size(); bi++) {
bits[bi] = list.get(bi);
}
cv.put(DatabaseFileArchive.COLUMN_KEY, index);
cv.put(DatabaseFileArchive.COLUMN_TILE, bits);
long insert = db.insert(TABLE, null, cv);
if (insert > 0) {
if (Configuration.getInstance().isDebugMode())
Log.d(IMapView.LOGTAG, "tile inserted " + tileSources[i].getName() + "/" + z1 + "/" + x1 + "/" + y1);
ret[0]++;
if (removeFromFileSystem) {
try {
y[yy].delete();
ret[2]++;
;
} catch (Exception ex) {
ret[3]++;
;
}
}
} else {
Log.w(IMapView.LOGTAG, "tile NOT inserted " + tileSources[i].getName() + "/" + z1 + "/" + x1 + "/" + y1);
}
}
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to store cached tile from " + tileSources[i].getName() + " db is " + (db == null ? "null" : "not null"), ex);
ret[1]++;
}
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
x[xx].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + x[xx].getAbsolutePath(), ex);
ret[3]++;
}
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
z[zz].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + z[zz].getAbsolutePath(), ex);
ret[3]++;
}
}
}
if (removeFromFileSystem) {
//clean up the directories
try {
tileSources[i].delete();
} catch (Exception ex) {
Log.e(IMapView.LOGTAG, "Unable to delete directory from " + tileSources[i].getAbsolutePath(), ex);
ret[3]++;
}
}
} else {
//it's a file, nothing for us to do here
}
}
}
}
return ret;
}
/**
* Removes a specific tile from the cache
*
* @param pTileSourceInfo
* @param pTile
* @return
* @since 5.6
*/
@Override
public boolean remove(final ITileSource pTileSourceInfo, final MapTile pTile) {
if (db == null) {
Log.d(IMapView.LOGTAG, "Unable to delete cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + ", database not available.");
Counters.fileCacheSaveErrors++;
return false;
}
try {
final long index = getIndex(pTile);
db.delete(DatabaseFileArchive.TABLE, primaryKey, getPrimaryKeyParameters(index, pTileSourceInfo));
return true;
} catch (Throwable ex) {
//note, although we check for db null state at the beginning of this method, it's possible for the
//db to be closed during the execution of this method
Log.e(IMapView.LOGTAG, "Unable to delete cached tile from " + pTileSourceInfo.name() + " " + pTile.toString() + " db is " + (db == null ? "null" : "not null"), ex);
Counters.fileCacheSaveErrors++;
}
return false;
}
/**
* Returns the number of tiles in the cache for the specified tile source name
*
* @param tileSourceName
* @return
* @since 5.6
*/
public long getRowCount(String tileSourceName) {
try {
Cursor mCount = null;
if (tileSourceName == null)
mCount = db.rawQuery("select count(*) from " + TABLE, null);
else
mCount = db.rawQuery("select count(*) from " + TABLE + " where " + COLUMN_PROVIDER + "='" + tileSourceName + "'", null);
mCount.moveToFirst();
long count = mCount.getLong(0);
mCount.close();
return count;
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to query for row count " + tileSourceName, ex);
}
return 0;
}
/**
* Returns the size of the database file in bytes.
*/
public long getSize() {
return db_file.length();
}
/**
* Returns the expiry time of the tile that expires first.
*/
public long getFirstExpiry() {
try {
Cursor cursor = db.rawQuery("select min(" + COLUMN_EXPIRES + ") from " + TABLE, null);
cursor.moveToFirst();
long time = cursor.getLong(0);
cursor.close();
return time;
} catch (Throwable ex) {
Log.e(IMapView.LOGTAG, "Unable to query for oldest tile", ex);
}
return 0;
}
/**
*
* @since 5.6.5
* @param pX
* @param pY
* @param pZ
* @return
*/
public static long getIndex(final long pX, final long pY, final long pZ) {
return ((pZ << pZ) + pX << pZ) + pY;
}
/**
* Gets the single column index value for a map tile
*
* @since 5.6.5
* @param pTile
* @return
*/
public static long getIndex(final MapTile pTile) {
return getIndex(pTile.getX(), pTile.getY(), pTile.getZoomLevel());
}
@Override
public Long getExpirationTimestamp(final ITileSource pTileSource, final MapTile pTile) {
Cursor cursor = null;
try {
cursor = getTileCursor(getPrimaryKeyParameters(getIndex(pTile), pTileSource), expireQueryColumn);
while(cursor.moveToNext()) {
return cursor.getLong(0);
}
} catch (Throwable t) {
Log.e(IMapView.LOGTAG, "error getting expiration date from the tile cache", t);
} finally {
if (cursor != null) {
cursor.close();
}
}
return null;
}
/**
* @since 5.6.5
*/
private static final String primaryKey = DatabaseFileArchive.COLUMN_KEY + "=? and " + DatabaseFileArchive.COLUMN_PROVIDER + "=?";
public static String getPrimaryKey() {
return primaryKey;
}
/**
*
* @since 5.6.5
* @param pIndex
* @param pTileSourceInfo
* @return
*/
public static String[] getPrimaryKeyParameters(final long pIndex, final ITileSource pTileSourceInfo) {
return getPrimaryKeyParameters(pIndex, pTileSourceInfo.name());
}
/**
*
* @since 5.6.5
* @param pIndex
* @param pTileSourceInfo
* @return
*/
public static String[] getPrimaryKeyParameters(final long pIndex, final String pTileSourceInfo) {
return new String[]{String.valueOf(pIndex), pTileSourceInfo};
}
/**
*
* @since 5.6.5
* @param pPrimaryKeyParameters
* @param pColumns
* @return
*/
public Cursor getTileCursor(final String[] pPrimaryKeyParameters, final String[] pColumns) {
return db.query(DatabaseFileArchive.TABLE, pColumns, primaryKey, pPrimaryKeyParameters, null, null, null);
}
/**
* For optimization reasons
* @since 5.6.5
*/
private static final String[] queryColumns = {DatabaseFileArchive.COLUMN_TILE, SqlTileWriter.COLUMN_EXPIRES};
/**
* For optimization reasons
* @since 5.6.5
*/
private static final String[] expireQueryColumn = {SqlTileWriter.COLUMN_EXPIRES};
@Override
public Drawable loadTile(final ITileSource pTileSource, final MapTile pTile) throws Exception{
InputStream inputStream = null;
try {
final long index = getIndex(pTile);
final Cursor cur = getTileCursor(getPrimaryKeyParameters(index, pTileSource), queryColumns);
byte[] bits=null;
long expirationTimestamp=0;
if(cur.getCount() != 0) {
cur.moveToFirst();
bits = cur.getBlob(cur.getColumnIndex(DatabaseFileArchive.COLUMN_TILE));
expirationTimestamp = cur.getLong(cur.getColumnIndex(SqlTileWriter.COLUMN_EXPIRES));
}
cur.close();
if (bits==null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG,"SqlCache - Tile doesn't exist: " +pTileSource.name() + pTile);
}
return null;
}
inputStream = new ByteArrayInputStream(bits);
final Drawable drawable = pTileSource.getDrawable(inputStream);
// Check to see if file has expired
final long now = System.currentTimeMillis();
final boolean fileExpired = expirationTimestamp < now;
if (fileExpired && drawable != null) {
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG,"Tile expired: " + pTileSource.name() +pTile);
}
ExpirableBitmapDrawable.setState(drawable, ExpirableBitmapDrawable.EXPIRED);
}
return drawable;
} finally {
if (inputStream != null) {
StreamUtils.closeStream(inputStream);
}
}
}
}
|
feature/#634
New:
* MapTileApproximater: a tile provider that computes approximation of tiles based on the tiles of the same region, but on lower zoom level tiles. Useful in offline mode. To be used as a nth provider.
Modifications:
* CacheAdapter: small bug fix
* IFilesystemCache: added method loadTile
* MapTileFilesystemProvider: added a TileWriter member, used TileWriter.loadTile method int TileLoader.loadTile
* MapTileModuleProviderBase: added method tileLoadedScaled, fixed methods tileLoaded* with explicit calls to ExpirableBitmapDrawable.setState
* MapTileProviderBase: moved method getBitmap to MapTileApproximater, used new method MapTileApproximater.approximateTileFromLowerZoom in ZoomInLooper.handleTile
* MapTileProviderBasic: included MapTileApproximater to the provider array
* MapTileSqlCacheProvider: used the SqlTileWriter.loadTile method int TileLoader.loadTile
* SqlArchiveTileWriter: small bug fix, added tool methods for uniform db access, implemented new method loadTile
* SqlTileWriter: added tool methods for uniform db access, implemented new method loadTile
* TileWriter: added method getFile, implemented new method loadTile
|
osmdroid-android/src/main/java/org/osmdroid/tileprovider/modules/SqlTileWriter.java
|
feature/#634
|
|
Java
|
apache-2.0
|
ffce0f29016350f21cb95d1d20eb4350bbd33f30
| 0
|
matej116/cgeo,matej116/cgeo,cgeo/cgeo,rsudev/c-geo-opensource,cgeo/cgeo,cgeo/cgeo,tobiasge/cgeo,tobiasge/cgeo,matej116/cgeo,rsudev/c-geo-opensource,rsudev/c-geo-opensource,tobiasge/cgeo,cgeo/cgeo
|
package cgeo.geocaching.connector.lc;
import cgeo.geocaching.enumerations.CacheSize;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags.SaveFlag;
import cgeo.geocaching.enumerations.WaypointType;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.location.Viewport;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.models.Waypoint;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.storage.DataStore;
import cgeo.geocaching.utils.JsonUtils;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.SynchronizedDateFormat;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import io.reactivex.rxjava3.core.Single;
import io.reactivex.rxjava3.functions.Function;
import okhttp3.Response;
import org.apache.commons.lang3.StringUtils;
final class LCApi {
private static final Boolean minimalFunction = true;
private static final SynchronizedDateFormat DATE_FORMAT = new SynchronizedDateFormat("yyyy-MM-dd", Locale.getDefault());
@NonNull
private static final String API_HOST = "https://labs-api.geocaching.com/Api/Adventures/";
private LCApi() {
// utility class with static methods
}
@Nullable
protected static Geocache searchByGeocode(final String geocode) {
if (!Settings.isGCPremiumMember()) {
return null;
}
try {
final Response response = apiRequest(geocode.substring(2)).blockingGet();
return importCacheFromJSON(response);
} catch (final Exception ignored) {
return null;
}
}
@NonNull
protected static Collection<Geocache> searchByBBox(final Viewport viewport) {
if (!Settings.isGCPremiumMember() || viewport.getLatitudeSpan() == 0 || viewport.getLongitudeSpan() == 0) {
return Collections.emptyList();
}
final double lat1 = viewport.getLatitudeMax();
final double lat2 = viewport.getLatitudeMin();
final double lon1 = viewport.getLongitudeMax();
final double lon2 = viewport.getLongitudeMin();
final double latcenter = (lat1 + lat2) / 2;
final double loncenter = (lon1 + lon2) / 2;
final Geopoint gp1 = new Geopoint(lat1, lon1);
final Geopoint gp2 = new Geopoint(lat2, lon2);
final double radius = gp1.distanceTo(gp2) * 500; // we get diameter in km, need radius in m
Log.d("_LC Radius: " + String.valueOf((int) radius));
final Parameters params = new Parameters("skip", "0");
params.add("take", "500");
params.add("radiusMeters", String.valueOf((int) radius));
params.add("origin.latitude", String.valueOf(latcenter));
params.add("origin.longitude", String.valueOf(loncenter));
try {
final Response response = apiRequest("SearchV3", params).blockingGet();
return importCachesFromJSON(response);
} catch (final Exception ignored) {
return Collections.emptyList();
}
}
@NonNull
protected static Collection<Geocache> searchByCenter(final Geopoint center) {
if (!Settings.isGCPremiumMember()) {
return Collections.emptyList();
}
final Parameters params = new Parameters("skip", "0");
params.add("take", "20");
params.add("radiusMeters", "10000");
params.add("origin.latitude", String.valueOf(center.getLatitude()));
params.add("origin.longitude", String.valueOf(center.getLongitude()));
try {
final Response response = apiRequest("SearchV3", params).blockingGet();
return importCachesFromJSON(response);
} catch (final Exception ignored) {
return Collections.emptyList();
}
}
@NonNull
private static Single<Response> apiRequest(final String uri) {
return Network.getRequest(API_HOST + uri);
}
@NonNull
private static Single<Response> apiRequest(final String uri, final Parameters params) {
return apiRequest(uri, params, false);
}
@NonNull
private static Single<Response> apiRequest(final String uri, final Parameters params, final boolean isRetry) {
final Single<Response> response = Network.getRequest(API_HOST + uri, params);
// retry at most one time
return response.flatMap((Function<Response, Single<Response>>) response1 -> {
if (!isRetry && response1.code() == 403) {
return apiRequest(uri, params, true);
}
return Single.just(response1);
});
}
@NonNull
private static Geocache importCacheFromJSON(final Response response) {
try {
final JsonNode json = JsonUtils.reader.readTree(Network.getResponseData(response));
return parseCacheDetail(json);
} catch (final Exception e) {
Log.w("_LC importCacheFromJSON", e);
return null;
}
}
@NonNull
private static List<Geocache> importCachesFromJSON(final Response response) {
try {
final JsonNode json = JsonUtils.reader.readTree(Network.getResponseData(response));
Log.d("_LC importCachesFromJson: " + json.toPrettyString());
final JsonNode items = json.at("/Items");
if (!items.isArray()) {
return Collections.emptyList();
}
final List<Geocache> caches = new ArrayList<>(items.size());
for (final JsonNode node : items) {
final Geocache cache = parseCache(node);
if (cache != null) {
caches.add(cache);
}
}
return caches;
} catch (final Exception e) {
Log.w("_LC importCachesFromJSON", e);
return Collections.emptyList();
}
}
@Nullable
private static Geocache parseCache(final JsonNode response) {
try {
final Geocache cache = new Geocache();
final JsonNode location = response.at("/Location");
final String firebaseDynamicLink = response.get("FirebaseDynamicLink").asText();
final String[] segments = firebaseDynamicLink.split("/");
final String geocode = LCConnector.GEOCODE_PREFIX + response.get("Id").asText();
cache.setReliableLatLon(true);
cache.setGeocode(geocode);
cache.setCacheId(segments[segments.length - 1]);
cache.setName(response.get("Title").asText());
cache.setCoords(new Geopoint(location.get("Latitude").asText(), location.get("Longitude").asText()));
cache.setType(CacheType.ADVLAB);
cache.setSize(CacheSize.getById("virtual"));
cache.setArchived(response.get("IsArchived").asBoolean()); // we get that even in passive mode!
// cache.setFound(response.get("IsComplete").asBoolean()); as soon as we're using active mode
DataStore.saveCache(cache, EnumSet.of(SaveFlag.CACHE));
return cache;
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseCache", e);
return null;
}
}
// Having a separate parser for details is required because the API provider
// decided to use different upper/lower case wordings for the same entities
@Nullable
private static Geocache parseCacheDetail(final JsonNode response) {
try {
final Geocache cache = new Geocache();
final JsonNode location = response.at("/Location");
final String firebaseDynamicLink = response.get("FirebaseDynamicLink").asText();
final String[] segments = firebaseDynamicLink.split("/");
final String geocode = LCConnector.GEOCODE_PREFIX + response.get("Id").asText();
final String ilink = response.get("KeyImageUrl").asText();
final String desc = response.get("Description").asText();
cache.setReliableLatLon(true);
cache.setGeocode(geocode);
cache.setCacheId(segments[segments.length - 1]);
cache.setName(response.get("Title").asText());
cache.setDescription((StringUtils.isNotBlank(ilink) ? "<img src=\"" + ilink + "\" </img><p><p>" : "") + desc);
cache.setCoords(new Geopoint(location.get("Latitude").asText(), location.get("Longitude").asText()));
cache.setType(CacheType.ADVLAB);
cache.setSize(CacheSize.getById("virtual"));
// cache.setArchived(response.get("IsArchived").asBoolean()); as soon as we're using active mode
// cache.setFound(response.get("IsComplete").asBoolean()); as soon as we're using active mode
cache.setDisabled(false);
cache.setHidden(parseDate(response.get("PublishedUtc").asText()));
cache.setOwnerDisplayName(response.get("OwnerUsername").asText());
cache.setWaypoints(parseWaypoints((ArrayNode) response.path("GeocacheSummaries")), false);
cache.setDetailedUpdatedNow();
DataStore.saveCache(cache, EnumSet.of(SaveFlag.DB));
return cache;
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseCache", e);
return null;
}
}
@Nullable
private static List<Waypoint> parseWaypoints(final ArrayNode wptsJson) {
if (minimalFunction) {
return null;
}
List<Waypoint> result = null;
final Geopoint pointZero = new Geopoint(0, 0);
int stageCounter = 0;
for (final JsonNode wptResponse: wptsJson) {
stageCounter++;
try {
final Waypoint wpt = new Waypoint(wptResponse.get("Title").asText(), WaypointType.PUZZLE, false);
final JsonNode location = wptResponse.at("/Location");
final String ilink = wptResponse.get("KeyImageUrl").asText();
final String desc = wptResponse.get("Description").asText();
// For ALCs, waypoints don't have a geocode, of course they have an id (a uuid) though.
// We artificially create a geocode and a prefix as at least the prefix is used when
// showing waypoints on the map. It seems that the geocode from the parent is used but
// prefixed with what we set here. Not clear where the geocode of a waypoint comes into play
// but we will eventually figure that out.
wpt.setGeocode(String.valueOf(stageCounter));
wpt.setPrefix(String.valueOf(stageCounter));
wpt.setNote("<img style=\"width: 100%;\" src=\"" + ilink + "\"</img><p><p>" + desc + "<p><p>" + wptResponse.get("Question").asText());
final Geopoint pt = new Geopoint(location.get("Latitude").asDouble(), location.get("Longitude").asDouble());
if (pt != null && !pt.equals(pointZero)) {
wpt.setCoords(pt);
} else {
wpt.setOriginalCoordsEmpty(true);
}
if (result == null) {
result = new ArrayList<>();
}
result.add(wpt);
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseWaypoints", e);
}
}
return result;
}
@Nullable
private static Date parseDate(final String date) {
try {
return DATE_FORMAT.parse(date);
} catch (final ParseException e) {
return new Date(0);
}
}
}
|
main/src/cgeo/geocaching/connector/lc/LCApi.java
|
package cgeo.geocaching.connector.lc;
import cgeo.geocaching.enumerations.CacheSize;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags.SaveFlag;
import cgeo.geocaching.enumerations.WaypointType;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.location.Viewport;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.models.Waypoint;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.storage.DataStore;
import cgeo.geocaching.utils.JsonUtils;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.SynchronizedDateFormat;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import io.reactivex.rxjava3.core.Single;
import io.reactivex.rxjava3.functions.Function;
import okhttp3.Response;
import org.apache.commons.lang3.StringUtils;
final class LCApi {
private static final Boolean minimalFunction = true;
private static final SynchronizedDateFormat DATE_FORMAT = new SynchronizedDateFormat("yyyy-MM-dd", Locale.getDefault());
@NonNull
private static final String API_HOST = "https://labs-api.geocaching.com/Api/Adventures/";
private LCApi() {
// utility class with static methods
}
@Nullable
protected static Geocache searchByGeocode(final String geocode) {
if (!Settings.isGCPremiumMember()) {
return null;
}
try {
final Response response = apiRequest(geocode.substring(2)).blockingGet();
return importCacheFromJSON(response);
} catch (final Exception ignored) {
return null;
}
}
@NonNull
protected static Collection<Geocache> searchByBBox(final Viewport viewport) {
if (!Settings.isGCPremiumMember() || viewport.getLatitudeSpan() == 0 || viewport.getLongitudeSpan() == 0) {
return Collections.emptyList();
}
final double latcenter = (viewport.getLatitudeMax() + viewport.getLatitudeMin()) / 2;
final double loncenter = (viewport.getLongitudeMax() + viewport.getLongitudeMin()) / 2;
final Geopoint center = new Geopoint(latcenter, loncenter);
return searchByCenter(center);
}
@NonNull
protected static Collection<Geocache> searchByCenter(final Geopoint center) {
if (!Settings.isGCPremiumMember()) {
return Collections.emptyList();
}
final Parameters params = new Parameters("skip", "0");
params.add("take", "20");
params.add("radiusMeters", "10000");
params.add("origin.latitude", String.valueOf(center.getLatitude()));
params.add("origin.longitude", String.valueOf(center.getLongitude()));
try {
final Response response = apiRequest("SearchV3", params).blockingGet();
return importCachesFromJSON(response);
} catch (final Exception ignored) {
return Collections.emptyList();
}
}
@NonNull
private static Single<Response> apiRequest(final String uri) {
return Network.getRequest(API_HOST + uri);
}
@NonNull
private static Single<Response> apiRequest(final String uri, final Parameters params) {
return apiRequest(uri, params, false);
}
@NonNull
private static Single<Response> apiRequest(final String uri, final Parameters params, final boolean isRetry) {
final Single<Response> response = Network.getRequest(API_HOST + uri, params);
// retry at most one time
return response.flatMap((Function<Response, Single<Response>>) response1 -> {
if (!isRetry && response1.code() == 403) {
return apiRequest(uri, params, true);
}
return Single.just(response1);
});
}
@NonNull
private static Geocache importCacheFromJSON(final Response response) {
try {
final JsonNode json = JsonUtils.reader.readTree(Network.getResponseData(response));
return parseCacheDetail(json);
} catch (final Exception e) {
Log.w("_LC importCacheFromJSON", e);
return null;
}
}
@NonNull
private static List<Geocache> importCachesFromJSON(final Response response) {
try {
final JsonNode json = JsonUtils.reader.readTree(Network.getResponseData(response));
final JsonNode items = json.at("/Items");
if (!items.isArray()) {
return Collections.emptyList();
}
final List<Geocache> caches = new ArrayList<>(items.size());
for (final JsonNode node : items) {
final Geocache cache = parseCache(node);
if (cache != null) {
caches.add(cache);
}
}
return caches;
} catch (final Exception e) {
Log.w("_LC importCachesFromJSON", e);
return Collections.emptyList();
}
}
@Nullable
private static Geocache parseCache(final JsonNode response) {
try {
final Geocache cache = new Geocache();
final JsonNode location = response.at("/Location");
final String firebaseDynamicLink = response.get("FirebaseDynamicLink").asText();
final String[] segments = firebaseDynamicLink.split("/");
final String geocode = LCConnector.GEOCODE_PREFIX + response.get("Id").asText();
cache.setReliableLatLon(true);
cache.setGeocode(geocode);
cache.setCacheId(segments[segments.length - 1]);
cache.setName(response.get("Title").asText());
cache.setCoords(new Geopoint(location.get("Latitude").asText(), location.get("Longitude").asText()));
cache.setType(CacheType.ADVLAB);
cache.setSize(CacheSize.getById("virtual"));
cache.setArchived(response.get("IsArchived").asBoolean()); // we get that even in passive mode!
// cache.setFound(response.get("IsComplete").asBoolean()); as soon as we're using active mode
DataStore.saveCache(cache, EnumSet.of(SaveFlag.CACHE));
return cache;
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseCache", e);
return null;
}
}
// Having a separate parser for details is required because the API provider
// decided to use different upper/lower case wordings for the same entities
@Nullable
private static Geocache parseCacheDetail(final JsonNode response) {
try {
final Geocache cache = new Geocache();
final JsonNode location = response.at("/Location");
final String firebaseDynamicLink = response.get("FirebaseDynamicLink").asText();
final String[] segments = firebaseDynamicLink.split("/");
final String geocode = LCConnector.GEOCODE_PREFIX + response.get("Id").asText();
final String ilink = response.get("KeyImageUrl").asText();
final String desc = response.get("Description").asText();
cache.setReliableLatLon(true);
cache.setGeocode(geocode);
cache.setCacheId(segments[segments.length - 1]);
cache.setName(response.get("Title").asText());
cache.setDescription((StringUtils.isNotBlank(ilink) ? "<img src=\"" + ilink + "\" </img><p><p>" : "") + desc);
cache.setCoords(new Geopoint(location.get("Latitude").asText(), location.get("Longitude").asText()));
cache.setType(CacheType.ADVLAB);
cache.setSize(CacheSize.getById("virtual"));
// cache.setArchived(response.get("IsArchived").asBoolean()); as soon as we're using active mode
// cache.setFound(response.get("IsComplete").asBoolean()); as soon as we're using active mode
cache.setDisabled(false);
cache.setHidden(parseDate(response.get("PublishedUtc").asText()));
cache.setOwnerDisplayName(response.get("OwnerUsername").asText());
cache.setWaypoints(parseWaypoints((ArrayNode) response.path("GeocacheSummaries")), false);
cache.setDetailedUpdatedNow();
DataStore.saveCache(cache, EnumSet.of(SaveFlag.DB));
return cache;
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseCache", e);
return null;
}
}
@Nullable
private static List<Waypoint> parseWaypoints(final ArrayNode wptsJson) {
if (minimalFunction) {
return null;
}
List<Waypoint> result = null;
final Geopoint pointZero = new Geopoint(0, 0);
int stageCounter = 0;
for (final JsonNode wptResponse: wptsJson) {
stageCounter++;
try {
final Waypoint wpt = new Waypoint(wptResponse.get("Title").asText(), WaypointType.PUZZLE, false);
final JsonNode location = wptResponse.at("/Location");
final String ilink = wptResponse.get("KeyImageUrl").asText();
final String desc = wptResponse.get("Description").asText();
// For ALCs, waypoints don't have a geocode, of course they have an id (a uuid) though.
// We artificially create a geocode and a prefix as at least the prefix is used when
// showing waypoints on the map. It seems that the geocode from the parent is used but
// prefixed with what we set here. Not clear where the geocode of a waypoint comes into play
// but we will eventually figure that out.
wpt.setGeocode(String.valueOf(stageCounter));
wpt.setPrefix(String.valueOf(stageCounter));
wpt.setNote("<img style=\"width: 100%;\" src=\"" + ilink + "\"</img><p><p>" + desc + "<p><p>" + wptResponse.get("Question").asText());
final Geopoint pt = new Geopoint(location.get("Latitude").asDouble(), location.get("Longitude").asDouble());
if (pt != null && !pt.equals(pointZero)) {
wpt.setCoords(pt);
} else {
wpt.setOriginalCoordsEmpty(true);
}
if (result == null) {
result = new ArrayList<>();
}
result.add(wpt);
} catch (final NullPointerException e) {
Log.e("_LC LCApi.parseWaypoints", e);
}
}
return result;
}
@Nullable
private static Date parseDate(final String date) {
try {
return DATE_FORMAT.parse(date);
} catch (final ParseException e) {
return new Date(0);
}
}
}
|
Fixes #10569 ALC live map loading needs optimization (#10577)
* real searchByBBox() implemented
* removed haversine, user Geopoint.distanceTo()
* Geopoint.distanceTo() seems to return km, not m
* multiply before truncate to int
* syntax error .. sorry
|
main/src/cgeo/geocaching/connector/lc/LCApi.java
|
Fixes #10569 ALC live map loading needs optimization (#10577)
|
|
Java
|
apache-2.0
|
b03a866b7f635779cdb33f7a34edee2cd8b58a1a
| 0
|
bitbrain/beansjam-2017,bitbrain/beansjam-2017
|
package tv.rocketbeans.supermafiosi.ui;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.NinePatch;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.utils.Align;
import aurelienribon.tweenengine.BaseTween;
import aurelienribon.tweenengine.Tween;
import aurelienribon.tweenengine.TweenCallback;
import aurelienribon.tweenengine.TweenEquations;
import aurelienribon.tweenengine.TweenManager;
import de.bitbrain.braingdx.assets.SharedAssetManager;
import de.bitbrain.braingdx.graphics.GraphicsFactory;
import de.bitbrain.braingdx.tweens.ActorTween;
import de.bitbrain.braingdx.tweens.SharedTweenManager;
import de.bitbrain.braingdx.tweens.ValueTween;
import de.bitbrain.braingdx.util.ValueProvider;
import tv.rocketbeans.supermafiosi.Colors;
import tv.rocketbeans.supermafiosi.assets.Asset;
import tv.rocketbeans.supermafiosi.core.Dialog;
import tv.rocketbeans.supermafiosi.core.DialogManager;
public class DialogBox extends Actor {
private static final float INNER_PADDING_Y = 40f;
private static final float MARGIN = 10f;
private static final float AVATAR_PADDING = -10f;
private static final float TITLE_PADDING = 20f;
private Dialog dialog;
private final DialogManager dialogManager;
private final TweenManager tweenManager = SharedTweenManager.getInstance();
private Label text;
private Label title;
private ValueProvider offsetProvider = new ValueProvider();
private ValueProvider avatarBouncing = new ValueProvider();
private boolean currentlyClosing;
private final NinePatch dialogBackground;
private final NinePatch titleBackground;
static {
Tween.registerAccessor(ValueProvider.class, new ValueTween());
}
public DialogBox(DialogManager dialogManager) {
this.dialogManager = dialogManager;
// Create a nice background so font is readable
Texture buttonNinePatchTexture = SharedAssetManager.getInstance().get(Asset.Textures.PANEL_9PATCH, Texture.class);
Texture labelNinePatchTexture = SharedAssetManager.getInstance().get(Asset.Textures.LABEL_9PATCH, Texture.class);
dialogBackground = GraphicsFactory.createNinePatch(buttonNinePatchTexture, 20, Colors.FOREGROUND);
titleBackground = GraphicsFactory.createNinePatch(labelNinePatchTexture, 15, Colors.FOREGROUND);
}
@Override
public void act(float delta) {
if (!currentlyClosing && (dialog == null || dialog != dialogManager.getCurrentDialog())) {
unsetDialog(dialog, new TweenCallback() {
@Override
public void onEvent(int arg0, BaseTween<?> arg1) {
setDialog(dialogManager.getCurrentDialog());
}
});
}
}
@Override
public float getX() {
return MARGIN;
}
@Override
public float getY() {
return MARGIN + offsetProvider.getValue();
}
@Override
public void draw(Batch batch, float parentAlpha) {
parentAlpha *= getColor().a;
if (title != null) {
title.setX(getTitleX());
title.setY(getTitleY());
titleBackground.getColor().a = title.getColor().a;
titleBackground.draw(batch, getTitleBackgroundX(), getTitleBackgroundY(), getTitleBackgroundWidth(), getTitleBackgroundHeight());
title.draw(batch, 1f);
}
if (dialog != null) {
dialogBackground.draw(batch, getX(), getY(), getWidth() - MARGIN * 2f, getHeight());
Sprite avatar = dialog.getPicture();
avatar.setPosition(getX() + AVATAR_PADDING + 20f, getY() + AVATAR_PADDING + avatarBouncing.getValue());
avatar.setSize(getHeight() - AVATAR_PADDING * 2f, getHeight() - AVATAR_PADDING * 2f);
dialog.getPicture().draw(batch, parentAlpha);
}
if (text != null) {
text.setPosition(getX() + getHeight() + 50f, getY() + getHeight() - text.getHeight() + - INNER_PADDING_Y);
text.draw(batch, parentAlpha);
}
}
private void unsetDialog(Dialog dialog, TweenCallback finishCallback) {
if (dialog != null) {
currentlyClosing = true;
Tween.to(text, ActorTween.ALPHA, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(title, ActorTween.ALPHA, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(this, ActorTween.ALPHA, 0.5f)
.delay(0.3f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(offsetProvider, ValueTween.VALUE, 0.5f)
.target(getFadeOutYPosition())
.ease(TweenEquations.easeInCubic)
.setCallbackTriggers(TweenCallback.COMPLETE)
.setCallback(finishCallback)
.start(tweenManager);
tweenManager.killTarget(avatarBouncing);
} else {
finishCallback.onEvent(0, null);
}
}
private void setDialog(Dialog dialog) {
currentlyClosing = false;
if (dialog != null) {
this.dialog = dialog;
this.text = new Label(dialog.getText(), Styles.LABEL_DIALOG);
this.title = new Label(dialog.getTitle(), Styles.LABEL_DIALOG_TITLE);
text.setColor(dialog.getColor());
text.setWrap(true);
text.setWidth(getWidth() - getHeight() - MARGIN * 2f - 50f);
text.setAlignment(Align.top | Align.left);
text.setHeight(getHeight() - MARGIN);
getColor().a = 0f;
Tween.to(this, ActorTween.ALPHA, 0.8f)
.delay(0.3f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
text.getColor().a = 0f;
Tween.to(text, ActorTween.ALPHA, 0.4f)
.delay(0.6f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(title, ActorTween.ALPHA, 0.6f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
offsetProvider.setValue(getFadeOutYPosition());
Tween.to(offsetProvider, ValueTween.VALUE, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
avatarBouncing.setValue(0f);
Tween.to(avatarBouncing, ValueTween.VALUE, 0.5f)
.target(15f)
.ease(TweenEquations.easeInCubic)
.repeatYoyo(Tween.INFINITY, 0f)
.start(tweenManager);
}
}
private float getTitleBackgroundX() {
return title.getX() - TITLE_PADDING;
}
private float getTitleBackgroundY() {
return title.getY() - TITLE_PADDING;
}
private float getTitleBackgroundWidth() {
return title.getPrefWidth() + TITLE_PADDING * 2f;
}
private float getTitleBackgroundHeight() {
return title.getPrefHeight() + TITLE_PADDING * 2f;
}
private float getTitleY() {
return getY() + getHeight() + TITLE_PADDING - 2f;
}
private float getTitleX() {
return getX() + TITLE_PADDING;
}
private float getFadeOutYPosition() {
return -getHeight() - MARGIN - TITLE_PADDING * 4f;
}
}
|
core/src/tv/rocketbeans/supermafiosi/ui/DialogBox.java
|
package tv.rocketbeans.supermafiosi.ui;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.NinePatch;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.utils.Align;
import aurelienribon.tweenengine.BaseTween;
import aurelienribon.tweenengine.Tween;
import aurelienribon.tweenengine.TweenCallback;
import aurelienribon.tweenengine.TweenEquations;
import aurelienribon.tweenengine.TweenManager;
import de.bitbrain.braingdx.assets.SharedAssetManager;
import de.bitbrain.braingdx.graphics.GraphicsFactory;
import de.bitbrain.braingdx.tweens.ActorTween;
import de.bitbrain.braingdx.tweens.SharedTweenManager;
import de.bitbrain.braingdx.tweens.ValueTween;
import de.bitbrain.braingdx.util.ValueProvider;
import tv.rocketbeans.supermafiosi.Colors;
import tv.rocketbeans.supermafiosi.assets.Asset;
import tv.rocketbeans.supermafiosi.core.Dialog;
import tv.rocketbeans.supermafiosi.core.DialogManager;
public class DialogBox extends Actor {
private static final float INNER_PADDING_Y = 40f;
private static final float MARGIN = 10f;
private static final float AVATAR_PADDING = -10f;
private static final float TITLE_PADDING = 20f;
private Dialog dialog;
private final DialogManager dialogManager;
private final TweenManager tweenManager = SharedTweenManager.getInstance();
private Label text;
private Label title;
private ValueProvider offsetProvider = new ValueProvider();
private ValueProvider avatarBouncing = new ValueProvider();
private boolean currentlyClosing;
private final NinePatch dialogBackground;
private final NinePatch titleBackground;
static {
Tween.registerAccessor(ValueProvider.class, new ValueTween());
}
public DialogBox(DialogManager dialogManager) {
this.dialogManager = dialogManager;
// Create a nice background so font is readable
Texture buttonNinePatchTexture = SharedAssetManager.getInstance().get(Asset.Textures.PANEL_9PATCH, Texture.class);
Texture labelNinePatchTexture = SharedAssetManager.getInstance().get(Asset.Textures.LABEL_9PATCH, Texture.class);
dialogBackground = GraphicsFactory.createNinePatch(buttonNinePatchTexture, 20, Colors.FOREGROUND);
titleBackground = GraphicsFactory.createNinePatch(labelNinePatchTexture, 15, Colors.FOREGROUND);
}
@Override
public void act(float delta) {
if (!currentlyClosing && (dialog == null || dialog != dialogManager.getCurrentDialog())) {
unsetDialog(dialog, new TweenCallback() {
@Override
public void onEvent(int arg0, BaseTween<?> arg1) {
setDialog(dialogManager.getCurrentDialog());
}
});
}
}
@Override
public float getX() {
return MARGIN;
}
@Override
public float getY() {
return MARGIN + offsetProvider.getValue();
}
@Override
public void draw(Batch batch, float parentAlpha) {
parentAlpha *= getColor().a;
if (title != null) {
title.setX(getTitleX());
title.setY(getTitleY());
titleBackground.getColor().a = title.getColor().a;
titleBackground.draw(batch, getTitleBackgroundX(), getTitleBackgroundY(), getTitleBackgroundWidth(), getTitleBackgroundHeight());
title.draw(batch, 1f);
}
if (dialog != null) {
dialogBackground.draw(batch, getX(), getY(), getWidth() - MARGIN * 2f, getHeight());
Sprite avatar = dialog.getPicture();
avatar.setPosition(getX() + AVATAR_PADDING + 20f, getY() + AVATAR_PADDING + avatarBouncing.getValue());
avatar.setSize(getHeight() - AVATAR_PADDING * 2f, getHeight() - AVATAR_PADDING * 2f);
dialog.getPicture().draw(batch, parentAlpha);
}
if (text != null) {
text.setPosition(getX() + getHeight() + 50f, getY() + getHeight() - text.getHeight() + - INNER_PADDING_Y);
text.draw(batch, parentAlpha);
}
}
private void unsetDialog(Dialog dialog, TweenCallback finishCallback) {
if (dialog != null) {
currentlyClosing = true;
Tween.to(text, ActorTween.ALPHA, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(title, ActorTween.ALPHA, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(this, ActorTween.ALPHA, 0.5f)
.delay(0.3f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(offsetProvider, ValueTween.VALUE, 0.5f)
.target(getFadeOutYPosition())
.ease(TweenEquations.easeInCubic)
.setCallbackTriggers(TweenCallback.COMPLETE)
.setCallback(finishCallback)
.start(tweenManager);
tweenManager.killTarget(avatarBouncing);
} else {
finishCallback.onEvent(0, null);
}
}
private void setDialog(Dialog dialog) {
currentlyClosing = false;
if (dialog != null) {
this.dialog = dialog;
this.text = new Label(dialog.getText(), Styles.LABEL_DIALOG);
this.title = new Label(dialog.getTitle(), Styles.LABEL_DIALOG_TITLE);
text.setColor(dialog.getColor());
text.setWrap(true);
text.setWidth(getWidth() - getHeight() - MARGIN * 2f - 50f);
text.setAlignment(Align.top | Align.left);
text.setHeight(getHeight() - MARGIN);
getColor().a = 0f;
Tween.to(this, ActorTween.ALPHA, 0.8f)
.delay(0.3f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
text.getColor().a = 0f;
Tween.to(text, ActorTween.ALPHA, 0.4f)
.delay(0.6f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(title, ActorTween.ALPHA, 0.6f)
.target(1f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
offsetProvider.setValue(getFadeOutYPosition());
Tween.to(offsetProvider, ValueTween.VALUE, 0.5f)
.target(0f)
.ease(TweenEquations.easeInCubic)
.start(tweenManager);
Tween.to(avatarBouncing, ValueTween.VALUE, 0.5f)
.target(15f)
.ease(TweenEquations.easeInCubic)
.repeatYoyo(Tween.INFINITY, 0f)
.start(tweenManager);
}
}
private float getTitleBackgroundX() {
return title.getX() - TITLE_PADDING;
}
private float getTitleBackgroundY() {
return title.getY() - TITLE_PADDING;
}
private float getTitleBackgroundWidth() {
return title.getPrefWidth() + TITLE_PADDING * 2f;
}
private float getTitleBackgroundHeight() {
return title.getPrefHeight() + TITLE_PADDING * 2f;
}
private float getTitleY() {
return getY() + getHeight() + TITLE_PADDING - 2f;
}
private float getTitleX() {
return getX() + TITLE_PADDING;
}
private float getFadeOutYPosition() {
return -getHeight() - MARGIN - TITLE_PADDING * 4f;
}
}
|
Fix bouncing bug
|
core/src/tv/rocketbeans/supermafiosi/ui/DialogBox.java
|
Fix bouncing bug
|
|
Java
|
apache-2.0
|
3af36e1984246f3c1bfc4b38e1bb859944d558b8
| 0
|
nabilzhang/enunciate,nabilzhang/enunciate,nabilzhang/enunciate,uniqueid001/enunciate,nabilzhang/enunciate,nabilzhang/enunciate,nabilzhang/enunciate,uniqueid001/enunciate,uniqueid001/enunciate,uniqueid001/enunciate,nabilzhang/enunciate,uniqueid001/enunciate,uniqueid001/enunciate,uniqueid001/enunciate,uniqueid001/enunciate
|
package com.webcohesion.enunciate.mojo;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.webcohesion.enunciate.Enunciate;
import com.webcohesion.enunciate.EnunciateConfiguration;
import com.webcohesion.enunciate.EnunciateLogger;
import com.webcohesion.enunciate.module.EnunciateModule;
import com.webcohesion.enunciate.module.ProjectExtensionModule;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Contributor;
import org.apache.maven.model.License;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.*;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.apache.maven.shared.filtering.MavenFileFilter;
import org.apache.maven.shared.filtering.MavenFilteringException;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
/**
* Goal which initializes an Enunciate build process.
*/
@SuppressWarnings ( "unchecked" )
@Mojo ( name = "config", defaultPhase = LifecyclePhase.VALIDATE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME )
public class ConfigMojo extends AbstractMojo {
public static final String ENUNCIATE_PROPERTY = "com.webcohesion.enunciate.mojo.ConfigMojo#ENUNCIATE_PROPERTY";
@Component
protected MavenProjectHelper projectHelper;
@Component
protected MavenFileFilter configFilter;
@Component
protected ArtifactFactory artifactFactory;
@Component
protected ArtifactResolver artifactResolver;
@Parameter ( defaultValue = "${project}", required = true, readonly = true )
protected MavenProject project;
@Parameter ( defaultValue = "${plugin.artifacts}", required = true, readonly = true )
protected Collection<org.apache.maven.artifact.Artifact> pluginDependencies;
@Parameter ( defaultValue = "${session}", required = true, readonly = true )
protected MavenSession session;
@Parameter ( defaultValue = "${localRepository}", required = true, readonly = true )
protected ArtifactRepository localRepository;
@Parameter ( defaultValue = "${project.build.directory}", required = true )
protected File exportsDir = null;
/**
* The enunciate artifacts.
*/
@Parameter
protected Artifact[] artifacts;
/**
* The enunciate configuration file.
*/
@Parameter
protected File configFile = null;
/**
* The output directory for Enunciate.
*/
@Parameter ( defaultValue = "${project.build.directory}/enunciate", property = "enunciate.build.directory" )
protected File buildDir = null;
/**
* The Enunciate exports.
*/
@Parameter
protected Map<String, String> exports = new HashMap<String, String>();
/**
* The include patterns.
*/
@Parameter
protected String[] includes;
/**
* The exclude patterns.
*/
@Parameter
protected String[] excludes;
/**
* The modules to include as project extensions.
*/
@Parameter ( name = "project-extensions" )
protected String[] projectExtensions;
/**
* List of compiler arguments.
*/
@Parameter
protected String[] compilerArgs;
/**
* Compiler -source version parameter
*/
@Parameter ( property = "maven.compiler.source" )
private String source = null;
/**
* Compiler -target version parameter
*/
@Parameter ( property = "maven.compiler.target" )
private String target = null;
/**
* The -encoding argument for the Java compiler
*/
@Parameter ( property = "encoding", defaultValue = "${project.build.sourceEncoding}" )
private String encoding = null;
/**
* A flag used to disable enunciate. This is primarily intended for usage from the command line to occasionally adjust the build.
*/
@Parameter ( defaultValue = "false", property = "enunciate.skip" )
protected boolean skipEnunciate;
/**
* A flag used to disable the sourcepath. This may be the quickest and dirtiest way to bypass the infamous "Java compiler crashed" errors.
*/
@Parameter ( defaultValue = "false", property = "enunciate.disable.sourcepath" )
protected boolean disableSourcepath = false;
/**
* The list of dependencies on which Enunciate should attempt to lookup their sources for inclusion in the source path.
* By default, dependencies with the same groupId as the current project will be included.
*/
@Parameter ( name = "sourcepath-includes" )
protected DependencySourceSpec[] sourcepathIncludes;
/**
* The list of dependencies on which Enunciate should NOT attempt to lookup their sources for inclusion in the source path.
* By default, dependencies that do _not_ have the same groupId as the current project will be excluded.
*/
@Parameter ( name = "sourcepath-excludes" )
protected DependencySourceSpec[] sourcepathExcludes;
public void execute() throws MojoExecutionException {
if (skipEnunciate) {
getLog().info("[ENUNCIATE] Skipping enunciate per configuration.");
return;
}
Enunciate enunciate = new Enunciate();
//set up the logger.
enunciate.setLogger(new MavenEnunciateLogger());
//set the build dir.
enunciate.setBuildDir(this.buildDir);
//load the config.
EnunciateConfiguration config = enunciate.getConfiguration();
File configFile = this.configFile;
if (configFile == null) {
configFile = new File(project.getBasedir(), "enunciate.xml");
}
if (configFile.exists()) {
getLog().info("[ENUNCIATE] Using enunciate configuration at " + configFile.getAbsolutePath());
try {
loadConfig(enunciate, configFile);
config.setBase(configFile.getParentFile());
}
catch (Exception e) {
throw new MojoExecutionException("Problem with enunciate config file " + configFile, e);
}
}
//set the default configured label.
config.setDefaultSlug(project.getArtifactId());
if (project.getName() != null && !"".equals(project.getName().trim())) {
StringBuilder description = new StringBuilder("<h1>").append(project.getName()).append("</h1>");
config.setDefaultTitle(project.getName());
if (project.getDescription() != null && !"".equals(project.getDescription().trim())) {
description.append("<p>").append(project.getDescription()).append("</p>");
}
config.setDefaultDescription(description.toString());
}
if (project.getVersion() != null && !"".equals(project.getVersion().trim())) {
config.setDefaultVersion(project.getVersion());
}
List contributors = project.getContributors();
if (contributors != null && !contributors.isEmpty()) {
List<EnunciateConfiguration.Contact> contacts = new ArrayList<EnunciateConfiguration.Contact>(contributors.size());
for (Object c : contributors) {
Contributor contributor = (Contributor) c;
contacts.add(new EnunciateConfiguration.Contact(contributor.getName(), contributor.getUrl(), contributor.getEmail()));
}
config.setDefaultContacts(contacts);
}
List licenses = project.getLicenses();
if (licenses != null && !licenses.isEmpty()) {
License license = (License) licenses.get(0);
config.setDefaultApiLicense(new EnunciateConfiguration.License(license.getName(), license.getUrl(), null, null));
}
//set the class paths.
setClasspathAndSourcepath(enunciate);
//load any modules on the classpath.
List<URL> pluginClasspath = buildPluginClasspath();
ServiceLoader<EnunciateModule> moduleLoader = ServiceLoader.load(EnunciateModule.class, new URLClassLoader(pluginClasspath.toArray(new URL[pluginClasspath.size()]), Thread.currentThread().getContextClassLoader()));
for (EnunciateModule module : moduleLoader) {
enunciate.addModule(module);
}
//set the compiler arguments.
List<String> compilerArgs = new ArrayList<String>();
String sourceVersion = findSourceVersion();
if (sourceVersion != null) {
compilerArgs.add("-source");
compilerArgs.add(sourceVersion);
}
String targetVersion = findTargetVersion();
if (targetVersion != null) {
compilerArgs.add("-target");
compilerArgs.add(targetVersion);
}
String sourceEncoding = this.encoding;
if (sourceEncoding != null) {
compilerArgs.add("-encoding");
compilerArgs.add(sourceEncoding);
}
enunciate.getCompilerArgs().addAll(compilerArgs);
//includes.
if (this.includes != null) {
for (String include : this.includes) {
enunciate.addInclude(include);
}
}
//excludes.
if (this.excludes != null) {
for (String exclude : this.excludes) {
enunciate.addExclude(exclude);
}
}
//exports.
if (this.exports != null) {
for (String exportId : this.exports.keySet()) {
String filename = this.exports.get(exportId);
if (filename == null || "".equals(filename)) {
throw new MojoExecutionException("Invalid (empty or null) filename for export " + exportId + ".");
}
File exportFile = new File(filename);
if (!exportFile.isAbsolute()) {
exportFile = new File(this.exportsDir, filename);
}
enunciate.addExport(exportId, exportFile);
}
}
Set<String> enunciateAddedSourceDirs = new TreeSet<String>();
List<EnunciateModule> modules = enunciate.getModules();
if (modules != null) {
Set<String> projectExtensions = new TreeSet<String>(this.projectExtensions == null ? Collections.<String>emptyList() : Arrays.asList(this.projectExtensions));
for (EnunciateModule module : modules) {
//configure the project with the module project extensions.
if (projectExtensions.contains(module.getName()) && module instanceof ProjectExtensionModule) {
ProjectExtensionModule extensions = (ProjectExtensionModule) module;
for (File projectSource : extensions.getProjectSources()) {
String sourceDir = projectSource.getAbsolutePath();
enunciateAddedSourceDirs.add(sourceDir);
if (!project.getCompileSourceRoots().contains(sourceDir)) {
getLog().debug("[ENUNCIATE] Adding '" + sourceDir + "' to the compile source roots.");
project.addCompileSourceRoot(sourceDir);
}
}
for (File testSource : extensions.getProjectTestSources()) {
project.addTestCompileSourceRoot(testSource.getAbsolutePath());
}
for (File resourceDir : extensions.getProjectResourceDirectories()) {
Resource restResource = new Resource();
restResource.setDirectory(resourceDir.getAbsolutePath());
project.addResource(restResource);
}
for (File resourceDir : extensions.getProjectTestResourceDirectories()) {
Resource resource = new Resource();
resource.setDirectory(resourceDir.getAbsolutePath());
project.addTestResource(resource);
}
}
applyAdditionalConfiguration(module);
}
}
//add any new source directories to the project.
Set<File> sourceDirs = new HashSet<File>();
Collection<String> sourcePaths = (Collection<String>) project.getCompileSourceRoots();
for (String sourcePath : sourcePaths) {
File sourceDir = new File(sourcePath);
if (!enunciateAddedSourceDirs.contains(sourceDir.getAbsolutePath())) {
sourceDirs.add(sourceDir);
}
else {
getLog().info("[ENUNCIATE] " + sourceDir + " appears to be added to the source roots by Enunciate. Excluding from original source roots....");
}
}
for (File sourceDir : sourceDirs) {
enunciate.addSourceDir(sourceDir);
}
postProcessConfig(enunciate);
try {
enunciate.run();
}
catch (RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new MojoExecutionException("Error invoking Enunciate.", e);
}
if (this.artifacts != null) {
for (Artifact projectArtifact : artifacts) {
if (projectArtifact.getEnunciateArtifactId() == null) {
getLog().warn("[ENUNCIATE] No enunciate export id specified. Skipping project artifact...");
continue;
}
com.webcohesion.enunciate.artifacts.Artifact artifact = null;
for (com.webcohesion.enunciate.artifacts.Artifact enunciateArtifact : enunciate.getArtifacts()) {
if (projectArtifact.getEnunciateArtifactId().equals(enunciateArtifact.getId())
|| enunciateArtifact.getAliases().contains(projectArtifact.getEnunciateArtifactId())) {
artifact = enunciateArtifact;
break;
}
}
if (artifact != null) {
try {
File tempExportFile = enunciate.createTempFile(project.getArtifactId() + "-" + projectArtifact.getClassifier(), projectArtifact.getArtifactType());
artifact.exportTo(tempExportFile, enunciate);
projectHelper.attachArtifact(project, projectArtifact.getArtifactType(), projectArtifact.getClassifier(), tempExportFile);
}
catch (IOException e) {
throw new MojoExecutionException("Error exporting Enunciate artifact.", e);
}
}
else {
getLog().warn("[ENUNCIATE] Enunciate artifact '" + projectArtifact.getEnunciateArtifactId() + "' not found in the project...");
}
}
}
postProcess(enunciate);
getPluginContext().put(ConfigMojo.ENUNCIATE_PROPERTY, enunciate);
}
protected void postProcess(Enunciate enunciate) {
}
protected void applyAdditionalConfiguration(EnunciateModule module) {
}
protected String findSourceVersion() {
String source = this.source;
if (source == null) {
List plugins = this.project.getBuildPlugins();
for (Object plugin : plugins) {
if (plugin instanceof Plugin && "org.apache.maven.plugins".equals(((Plugin) plugin).getGroupId()) && "maven-compiler-plugin".equals(((Plugin) plugin).getArtifactId()) && ((Plugin) plugin).getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom configuration = (Xpp3Dom) ((Plugin) plugin).getConfiguration();
Xpp3Dom sourceConfig = configuration.getChild("source");
if (sourceConfig != null) {
source = sourceConfig.getValue();
}
}
}
}
return source;
}
protected String findTargetVersion() {
String target = this.target;
if (target == null) {
List plugins = this.project.getBuildPlugins();
for (Object plugin : plugins) {
if (plugin instanceof Plugin && "org.apache.maven.plugins".equals(((Plugin) plugin).getGroupId()) && "maven-compiler-plugin".equals(((Plugin) plugin).getArtifactId()) && ((Plugin) plugin).getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom configuration = (Xpp3Dom) ((Plugin) plugin).getConfiguration();
Xpp3Dom targetConfig = configuration.getChild("target");
if (targetConfig != null) {
target = targetConfig.getValue();
}
}
}
}
return target;
}
protected List<URL> buildPluginClasspath() throws MojoExecutionException {
List<URL> classpath = new ArrayList<URL>();
for (org.apache.maven.artifact.Artifact next : this.pluginDependencies) {
try {
classpath.add(next.getFile().toURI().toURL());
}
catch (MalformedURLException e) {
throw new MojoExecutionException("Unable to add artifact " + next + " to the classpath.", e);
}
}
return classpath;
}
protected void setClasspathAndSourcepath(Enunciate enunciate) throws MojoExecutionException {
List<File> classpath = new ArrayList<File>();
Set<org.apache.maven.artifact.Artifact> dependencies = new LinkedHashSet<org.apache.maven.artifact.Artifact>();
dependencies.addAll(((Set<org.apache.maven.artifact.Artifact>) this.project.getArtifacts()));
Iterator<org.apache.maven.artifact.Artifact> it = dependencies.iterator();
while (it.hasNext()) {
org.apache.maven.artifact.Artifact artifact = it.next();
String artifactScope = artifact.getScope();
String type = artifact.getType() == null ? "jar" : artifact.getType();
if (!"jar".equals(type)) {
//remove the non-jars from the classpath.
it.remove();
}
else if (org.apache.maven.artifact.Artifact.SCOPE_TEST.equals(artifactScope)) {
//remove just the test-scope artifacts from the classpath.
it.remove();
}
else {
classpath.add(artifact.getFile());
}
}
enunciate.setClasspath(classpath);
if (!this.disableSourcepath) {
List<org.apache.maven.artifact.Artifact> sourcepathDependencies = new ArrayList<org.apache.maven.artifact.Artifact>();
for (org.apache.maven.artifact.Artifact projectDependency : dependencies) {
if (projectDependency.getGroupId().equals(this.project.getGroupId())) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will be made to lookup the sources for " + projectDependency + " because it has the same groupId as the current project.");
}
sourcepathDependencies.add(projectDependency);
}
else if (this.sourcepathIncludes != null) {
for (DependencySourceSpec include : this.sourcepathIncludes) {
if (include.specifies(projectDependency)) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will be made to lookup the sources for " + projectDependency + " because it was explicitly included in the plugin configuration.");
}
sourcepathDependencies.add(projectDependency);
break;
}
}
}
}
//now go through the excludes.
if (this.sourcepathExcludes != null && sourcepathExcludes.length > 0) {
Iterator<org.apache.maven.artifact.Artifact> sourcepathIt = sourcepathDependencies.iterator();
while (sourcepathIt.hasNext()) {
org.apache.maven.artifact.Artifact sourcepathDependency = sourcepathIt.next();
for (DependencySourceSpec exclude : this.sourcepathExcludes) {
if (exclude.specifies(sourcepathDependency)) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will NOT be made to lookup the sources for " + sourcepathDependency + " because it was explicitly excluded in the plugin configuration.");
}
sourcepathIt.remove();
}
}
}
}
//now attempt the source path lookup for the needed dependencies
List<File> sourcepath = new ArrayList<File>();
for (org.apache.maven.artifact.Artifact sourcepathDependency : sourcepathDependencies) {
try {
org.apache.maven.artifact.Artifact sourceArtifact = this.artifactFactory.createArtifactWithClassifier(sourcepathDependency.getGroupId(), sourcepathDependency.getArtifactId(), sourcepathDependency.getVersion(), sourcepathDependency.getType(), "sources");
this.artifactResolver.resolve(sourceArtifact, this.project.getRemoteArtifactRepositories(), this.localRepository);
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Source artifact found at " + sourceArtifact + ".");
}
sourcepath.add(sourceArtifact.getFile());
}
catch (Exception e) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt to find source artifact for " + sourcepathDependency + " failed.");
}
}
}
enunciate.setSourcepath(sourcepath);
}
else {
getLog().warn("[ENUNCIATE] Source path has been disabled. This may result is some missing documentation elements because the source code won't be available.");
}
}
protected void postProcessConfig(Enunciate enunciate) {
//no-op in this implementation.
}
/**
* Load the config, do filtering as needed.
*
* @param config The config to load into.
* @param configFile The config file.
*/
protected void loadConfig(Enunciate config, File configFile) throws IOException, SAXException, MavenFilteringException {
if (this.configFilter == null) {
getLog().debug("[ENUNCIATE] No maven file filter was provided, so no filtering of the config file will be done.");
config.loadConfiguration(configFile);
}
else {
this.buildDir.mkdirs();
File filteredConfig = File.createTempFile("enunciateConfig", ".xml", this.buildDir);
getLog().debug("[ENUNCIATE] Filtering " + configFile + " to " + filteredConfig + "...");
this.configFilter.copyFile(configFile, filteredConfig, true, this.project, new ArrayList(), true, "utf-8", this.session);
config.loadConfiguration(filteredConfig);
}
}
protected class MavenEnunciateLogger implements EnunciateLogger {
@Override
public void debug(String message, Object... formatArgs) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void info(String message, Object... formatArgs) {
if (getLog().isInfoEnabled()) {
getLog().info("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void warn(String message, Object... formatArgs) {
if (getLog().isWarnEnabled()) {
getLog().warn("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void error(String message, Object... formatArgs) {
if (getLog().isErrorEnabled()) {
getLog().error("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
}
}
|
slim-maven-plugin/src/main/java/com/webcohesion/enunciate/mojo/ConfigMojo.java
|
package com.webcohesion.enunciate.mojo;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.webcohesion.enunciate.Enunciate;
import com.webcohesion.enunciate.EnunciateConfiguration;
import com.webcohesion.enunciate.EnunciateLogger;
import com.webcohesion.enunciate.module.EnunciateModule;
import com.webcohesion.enunciate.module.ProjectExtensionModule;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Contributor;
import org.apache.maven.model.License;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.*;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.apache.maven.shared.filtering.MavenFileFilter;
import org.apache.maven.shared.filtering.MavenFilteringException;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
/**
* Goal which initializes an Enunciate build process.
*/
@SuppressWarnings ( "unchecked" )
@Mojo ( name = "config", defaultPhase = LifecyclePhase.VALIDATE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME )
public class ConfigMojo extends AbstractMojo {
public static final String ENUNCIATE_PROPERTY = "com.webcohesion.enunciate.mojo.ConfigMojo#ENUNCIATE_PROPERTY";
@Component
protected MavenProjectHelper projectHelper;
@Component
protected MavenFileFilter configFilter;
@Component
protected ArtifactFactory artifactFactory;
@Component
protected ArtifactResolver artifactResolver;
@Parameter ( defaultValue = "${project}", required = true, readonly = true )
protected MavenProject project;
@Parameter ( defaultValue = "${plugin.artifacts}", required = true, readonly = true )
protected Collection<org.apache.maven.artifact.Artifact> pluginDependencies;
@Parameter ( defaultValue = "${session}", required = true, readonly = true )
protected MavenSession session;
@Parameter ( defaultValue = "${localRepository}", required = true, readonly = true )
protected ArtifactRepository localRepository;
@Parameter ( defaultValue = "${project.build.directory}", required = true )
protected File exportsDir = null;
/**
* The enunciate artifacts.
*/
@Parameter
protected Artifact[] artifacts;
/**
* The enunciate configuration file.
*/
@Parameter
protected File configFile = null;
/**
* The output directory for Enunciate.
*/
@Parameter ( defaultValue = "${project.build.directory}/enunciate", property = "enunciate.build.directory" )
protected File buildDir = null;
/**
* The Enunciate exports.
*/
@Parameter
protected Map<String, String> exports = new HashMap<String, String>();
/**
* The include patterns.
*/
@Parameter
protected String[] includes;
/**
* The exclude patterns.
*/
@Parameter
protected String[] excludes;
/**
* The modules to include as project extensions.
*/
@Parameter ( name = "project-extensions" )
protected String[] projectExtensions;
/**
* List of compiler arguments.
*/
@Parameter
protected String[] compilerArgs;
/**
* Compiler -source version parameter
*/
@Parameter ( property = "maven.compiler.source" )
private String source = null;
/**
* Compiler -target version parameter
*/
@Parameter ( property = "maven.compiler.target" )
private String target = null;
/**
* The -encoding argument for the Java compiler
*/
@Parameter ( property = "encoding", defaultValue = "${project.build.sourceEncoding}" )
private String encoding = null;
/**
* A flag used to disable enunciate. This is primarily intended for usage from the command line to occasionally adjust the build.
*/
@Parameter ( defaultValue = "false", property = "enunciate.skip" )
protected boolean skipEnunciate;
/**
* The list of dependencies on which Enunciate should attempt to lookup their sources for inclusion in the source path.
* By default, dependencies with the same groupId as the current project will be included.
*/
@Parameter ( name = "sourcepath-includes" )
protected DependencySourceSpec[] sourcepathIncludes;
/**
* The list of dependencies on which Enunciate should NOT attempt to lookup their sources for inclusion in the source path.
* By default, dependencies that do _not_ have the same groupId as the current project will be excluded.
*/
@Parameter ( name = "sourcepath-excludes" )
protected DependencySourceSpec[] sourcepathExcludes;
public void execute() throws MojoExecutionException {
if (skipEnunciate) {
getLog().info("[ENUNCIATE] Skipping enunciate per configuration.");
return;
}
Enunciate enunciate = new Enunciate();
//set up the logger.
enunciate.setLogger(new MavenEnunciateLogger());
//set the build dir.
enunciate.setBuildDir(this.buildDir);
//load the config.
EnunciateConfiguration config = enunciate.getConfiguration();
File configFile = this.configFile;
if (configFile == null) {
configFile = new File(project.getBasedir(), "enunciate.xml");
}
if (configFile.exists()) {
getLog().info("[ENUNCIATE] Using enunciate configuration at " + configFile.getAbsolutePath());
try {
loadConfig(enunciate, configFile);
config.setBase(configFile.getParentFile());
}
catch (Exception e) {
throw new MojoExecutionException("Problem with enunciate config file " + configFile, e);
}
}
//set the default configured label.
config.setDefaultSlug(project.getArtifactId());
if (project.getName() != null && !"".equals(project.getName().trim())) {
StringBuilder description = new StringBuilder("<h1>").append(project.getName()).append("</h1>");
config.setDefaultTitle(project.getName());
if (project.getDescription() != null && !"".equals(project.getDescription().trim())) {
description.append("<p>").append(project.getDescription()).append("</p>");
}
config.setDefaultDescription(description.toString());
}
if (project.getVersion() != null && !"".equals(project.getVersion().trim())) {
config.setDefaultVersion(project.getVersion());
}
List contributors = project.getContributors();
if (contributors != null && !contributors.isEmpty()) {
List<EnunciateConfiguration.Contact> contacts = new ArrayList<EnunciateConfiguration.Contact>(contributors.size());
for (Object c : contributors) {
Contributor contributor = (Contributor) c;
contacts.add(new EnunciateConfiguration.Contact(contributor.getName(), contributor.getUrl(), contributor.getEmail()));
}
config.setDefaultContacts(contacts);
}
List licenses = project.getLicenses();
if (licenses != null && !licenses.isEmpty()) {
License license = (License) licenses.get(0);
config.setDefaultApiLicense(new EnunciateConfiguration.License(license.getName(), license.getUrl(), null, null));
}
//set the class paths.
setClasspathAndSourcepath(enunciate);
//load any modules on the classpath.
List<URL> pluginClasspath = buildPluginClasspath();
ServiceLoader<EnunciateModule> moduleLoader = ServiceLoader.load(EnunciateModule.class, new URLClassLoader(pluginClasspath.toArray(new URL[pluginClasspath.size()]), Thread.currentThread().getContextClassLoader()));
for (EnunciateModule module : moduleLoader) {
enunciate.addModule(module);
}
//set the compiler arguments.
List<String> compilerArgs = new ArrayList<String>();
String sourceVersion = findSourceVersion();
if (sourceVersion != null) {
compilerArgs.add("-source");
compilerArgs.add(sourceVersion);
}
String targetVersion = findTargetVersion();
if (targetVersion != null) {
compilerArgs.add("-target");
compilerArgs.add(targetVersion);
}
String sourceEncoding = this.encoding;
if (sourceEncoding != null) {
compilerArgs.add("-encoding");
compilerArgs.add(sourceEncoding);
}
enunciate.getCompilerArgs().addAll(compilerArgs);
//includes.
if (this.includes != null) {
for (String include : this.includes) {
enunciate.addInclude(include);
}
}
//excludes.
if (this.excludes != null) {
for (String exclude : this.excludes) {
enunciate.addExclude(exclude);
}
}
//exports.
if (this.exports != null) {
for (String exportId : this.exports.keySet()) {
String filename = this.exports.get(exportId);
if (filename == null || "".equals(filename)) {
throw new MojoExecutionException("Invalid (empty or null) filename for export " + exportId + ".");
}
File exportFile = new File(filename);
if (!exportFile.isAbsolute()) {
exportFile = new File(this.exportsDir, filename);
}
enunciate.addExport(exportId, exportFile);
}
}
Set<String> enunciateAddedSourceDirs = new TreeSet<String>();
List<EnunciateModule> modules = enunciate.getModules();
if (modules != null) {
Set<String> projectExtensions = new TreeSet<String>(this.projectExtensions == null ? Collections.<String>emptyList() : Arrays.asList(this.projectExtensions));
for (EnunciateModule module : modules) {
//configure the project with the module project extensions.
if (projectExtensions.contains(module.getName()) && module instanceof ProjectExtensionModule) {
ProjectExtensionModule extensions = (ProjectExtensionModule) module;
for (File projectSource : extensions.getProjectSources()) {
String sourceDir = projectSource.getAbsolutePath();
enunciateAddedSourceDirs.add(sourceDir);
if (!project.getCompileSourceRoots().contains(sourceDir)) {
getLog().debug("[ENUNCIATE] Adding '" + sourceDir + "' to the compile source roots.");
project.addCompileSourceRoot(sourceDir);
}
}
for (File testSource : extensions.getProjectTestSources()) {
project.addTestCompileSourceRoot(testSource.getAbsolutePath());
}
for (File resourceDir : extensions.getProjectResourceDirectories()) {
Resource restResource = new Resource();
restResource.setDirectory(resourceDir.getAbsolutePath());
project.addResource(restResource);
}
for (File resourceDir : extensions.getProjectTestResourceDirectories()) {
Resource resource = new Resource();
resource.setDirectory(resourceDir.getAbsolutePath());
project.addTestResource(resource);
}
}
applyAdditionalConfiguration(module);
}
}
//add any new source directories to the project.
Set<File> sourceDirs = new HashSet<File>();
Collection<String> sourcePaths = (Collection<String>) project.getCompileSourceRoots();
for (String sourcePath : sourcePaths) {
File sourceDir = new File(sourcePath);
if (!enunciateAddedSourceDirs.contains(sourceDir.getAbsolutePath())) {
sourceDirs.add(sourceDir);
}
else {
getLog().info("[ENUNCIATE] " + sourceDir + " appears to be added to the source roots by Enunciate. Excluding from original source roots....");
}
}
for (File sourceDir : sourceDirs) {
enunciate.addSourceDir(sourceDir);
}
postProcessConfig(enunciate);
try {
enunciate.run();
}
catch (RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new MojoExecutionException("Error invoking Enunciate.", e);
}
if (this.artifacts != null) {
for (Artifact projectArtifact : artifacts) {
if (projectArtifact.getEnunciateArtifactId() == null) {
getLog().warn("[ENUNCIATE] No enunciate export id specified. Skipping project artifact...");
continue;
}
com.webcohesion.enunciate.artifacts.Artifact artifact = null;
for (com.webcohesion.enunciate.artifacts.Artifact enunciateArtifact : enunciate.getArtifacts()) {
if (projectArtifact.getEnunciateArtifactId().equals(enunciateArtifact.getId())
|| enunciateArtifact.getAliases().contains(projectArtifact.getEnunciateArtifactId())) {
artifact = enunciateArtifact;
break;
}
}
if (artifact != null) {
try {
File tempExportFile = enunciate.createTempFile(project.getArtifactId() + "-" + projectArtifact.getClassifier(), projectArtifact.getArtifactType());
artifact.exportTo(tempExportFile, enunciate);
projectHelper.attachArtifact(project, projectArtifact.getArtifactType(), projectArtifact.getClassifier(), tempExportFile);
}
catch (IOException e) {
throw new MojoExecutionException("Error exporting Enunciate artifact.", e);
}
}
else {
getLog().warn("[ENUNCIATE] Enunciate artifact '" + projectArtifact.getEnunciateArtifactId() + "' not found in the project...");
}
}
}
postProcess(enunciate);
getPluginContext().put(ConfigMojo.ENUNCIATE_PROPERTY, enunciate);
}
protected void postProcess(Enunciate enunciate) {
}
protected void applyAdditionalConfiguration(EnunciateModule module) {
}
protected String findSourceVersion() {
String source = this.source;
if (source == null) {
List plugins = this.project.getBuildPlugins();
for (Object plugin : plugins) {
if (plugin instanceof Plugin && "org.apache.maven.plugins".equals(((Plugin) plugin).getGroupId()) && "maven-compiler-plugin".equals(((Plugin) plugin).getArtifactId()) && ((Plugin) plugin).getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom configuration = (Xpp3Dom) ((Plugin) plugin).getConfiguration();
Xpp3Dom sourceConfig = configuration.getChild("source");
if (sourceConfig != null) {
source = sourceConfig.getValue();
}
}
}
}
return source;
}
protected String findTargetVersion() {
String target = this.target;
if (target == null) {
List plugins = this.project.getBuildPlugins();
for (Object plugin : plugins) {
if (plugin instanceof Plugin && "org.apache.maven.plugins".equals(((Plugin) plugin).getGroupId()) && "maven-compiler-plugin".equals(((Plugin) plugin).getArtifactId()) && ((Plugin) plugin).getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom configuration = (Xpp3Dom) ((Plugin) plugin).getConfiguration();
Xpp3Dom targetConfig = configuration.getChild("target");
if (targetConfig != null) {
target = targetConfig.getValue();
}
}
}
}
return target;
}
protected List<URL> buildPluginClasspath() throws MojoExecutionException {
List<URL> classpath = new ArrayList<URL>();
for (org.apache.maven.artifact.Artifact next : this.pluginDependencies) {
try {
classpath.add(next.getFile().toURI().toURL());
}
catch (MalformedURLException e) {
throw new MojoExecutionException("Unable to add artifact " + next + " to the classpath.", e);
}
}
return classpath;
}
protected void setClasspathAndSourcepath(Enunciate enunciate) throws MojoExecutionException {
List<File> classpath = new ArrayList<File>();
List<File> sourcepath = new ArrayList<File>();
Set<org.apache.maven.artifact.Artifact> dependencies = new LinkedHashSet<org.apache.maven.artifact.Artifact>();
dependencies.addAll(((Set<org.apache.maven.artifact.Artifact>) this.project.getArtifacts()));
Iterator<org.apache.maven.artifact.Artifact> it = dependencies.iterator();
while (it.hasNext()) {
org.apache.maven.artifact.Artifact artifact = it.next();
String artifactScope = artifact.getScope();
String type = artifact.getType() == null ? "jar" : artifact.getType();
if (!"jar".equals(type)) {
//remove the non-jars from the classpath.
it.remove();
}
else if (org.apache.maven.artifact.Artifact.SCOPE_TEST.equals(artifactScope)) {
//remove just the test-scope artifacts from the classpath.
it.remove();
}
else {
classpath.add(artifact.getFile());
}
}
List<org.apache.maven.artifact.Artifact> sourcepathDependencies = new ArrayList<org.apache.maven.artifact.Artifact>();
for (org.apache.maven.artifact.Artifact projectDependency : dependencies) {
if (projectDependency.getGroupId().equals(this.project.getGroupId())) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will be made to lookup the sources for " + projectDependency + " because it has the same groupId as the current project.");
}
sourcepathDependencies.add(projectDependency);
}
else if (this.sourcepathIncludes != null) {
for (DependencySourceSpec include : this.sourcepathIncludes) {
if (include.specifies(projectDependency)) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will be made to lookup the sources for " + projectDependency + " because it was explicitly included in the plugin configuration.");
}
sourcepathDependencies.add(projectDependency);
break;
}
}
}
}
//now go through the excludes.
if (this.sourcepathExcludes != null && sourcepathExcludes.length > 0) {
Iterator<org.apache.maven.artifact.Artifact> sourcepathIt = sourcepathDependencies.iterator();
while (sourcepathIt.hasNext()) {
org.apache.maven.artifact.Artifact sourcepathDependency = sourcepathIt.next();
for (DependencySourceSpec exclude : this.sourcepathExcludes) {
if (exclude.specifies(sourcepathDependency)) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt will NOT be made to lookup the sources for " + sourcepathDependency + " because it was explicitly excluded in the plugin configuration.");
}
sourcepathIt.remove();
}
}
}
}
//now attempt the source path lookup for the needed dependencies
for (org.apache.maven.artifact.Artifact sourcepathDependency : sourcepathDependencies) {
try {
org.apache.maven.artifact.Artifact sourceArtifact = this.artifactFactory.createArtifactWithClassifier(sourcepathDependency.getGroupId(), sourcepathDependency.getArtifactId(), sourcepathDependency.getVersion(), sourcepathDependency.getType(), "sources");
this.artifactResolver.resolve(sourceArtifact, this.project.getRemoteArtifactRepositories(), this.localRepository);
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Source artifact found at " + sourceArtifact + ".");
}
sourcepath.add(sourceArtifact.getFile());
}
catch (Exception e) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] Attempt to find source artifact for " + sourcepathDependency + " failed.");
}
}
}
enunciate.setClasspath(classpath);
enunciate.setSourcepath(sourcepath);
}
protected void postProcessConfig(Enunciate enunciate) {
//no-op in this implementation.
}
/**
* Load the config, do filtering as needed.
*
* @param config The config to load into.
* @param configFile The config file.
*/
protected void loadConfig(Enunciate config, File configFile) throws IOException, SAXException, MavenFilteringException {
if (this.configFilter == null) {
getLog().debug("[ENUNCIATE] No maven file filter was provided, so no filtering of the config file will be done.");
config.loadConfiguration(configFile);
}
else {
this.buildDir.mkdirs();
File filteredConfig = File.createTempFile("enunciateConfig", ".xml", this.buildDir);
getLog().debug("[ENUNCIATE] Filtering " + configFile + " to " + filteredConfig + "...");
this.configFilter.copyFile(configFile, filteredConfig, true, this.project, new ArrayList(), true, "utf-8", this.session);
config.loadConfiguration(filteredConfig);
}
}
protected class MavenEnunciateLogger implements EnunciateLogger {
@Override
public void debug(String message, Object... formatArgs) {
if (getLog().isDebugEnabled()) {
getLog().debug("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void info(String message, Object... formatArgs) {
if (getLog().isInfoEnabled()) {
getLog().info("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void warn(String message, Object... formatArgs) {
if (getLog().isWarnEnabled()) {
getLog().warn("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
@Override
public void error(String message, Object... formatArgs) {
if (getLog().isErrorEnabled()) {
getLog().error("[ENUNCIATE] " + String.format(message, formatArgs));
}
}
}
}
|
option to completely disable the source path.
|
slim-maven-plugin/src/main/java/com/webcohesion/enunciate/mojo/ConfigMojo.java
|
option to completely disable the source path.
|
|
Java
|
apache-2.0
|
f3bc9d7b2d4be0a9690574a7d02bb3f40b26a880
| 0
|
manolo/components,manolo/components,shahrzadmn/vaadin-grid,shahrzadmn/vaadin-grid,manolo/components,shahrzadmn/vaadin-grid,shahrzadmn/vaadin-grid
|
package com.vaadin.components.grid.table;
import java.util.Arrays;
import java.util.List;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style.Overflow;
import com.google.gwt.dom.client.Style.TextOverflow;
import com.google.gwt.query.client.js.JsUtils;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.HTML;
import com.vaadin.client.widgets.Grid.Column;
import com.vaadin.components.common.js.JS;
import com.vaadin.components.common.js.JS.Getter;
import com.vaadin.components.common.js.JS.Setter;
import com.vaadin.components.common.js.JSArray;
import com.vaadin.components.grid.GridComponent;
import com.vaadin.components.grid.config.JSCell;
import com.vaadin.components.grid.config.JSColumn;
import com.vaadin.components.grid.config.JSStaticCell;
import com.vaadin.components.grid.data.GridDataSource;
import com.vaadin.components.grid.data.GridDomTableDataSource;
import com.vaadin.shared.ui.grid.GridConstants;
public final class GridColumn extends Column<Object, Object> {
private final JSColumn jsColumn;
private final GridComponent gridComponent;
public static GridColumn addColumn(JSColumn jsColumn,
GridComponent gridComponent) {
GridColumn result = new GridColumn(jsColumn, gridComponent);
gridComponent.getGrid().addColumn(result, gridComponent.getGrid().getVisibleColumns().size());
result.bindProperties();
return result;
}
private GridColumn(JSColumn jsColumn, GridComponent gridComponent) {
this.jsColumn = jsColumn;
this.gridComponent = gridComponent;
// Default renderer
setRenderer((cell, data) -> {
Element element = cell.getElement();
String content = JS.isUndefinedOrNull(data) ? "" : data.toString();
if (gridComponent.getDataSource() instanceof GridDomTableDataSource
&& new HTML(content).getElement().getFirstChildElement() != null) {
element.setInnerHTML(content);
} else {
Element wrapper = element.getFirstChildElement();
if (wrapper == null || !wrapper.getPropertyBoolean("iswrapper")) {
// Need to create a new wrapper
wrapper = DOM.createSpan();
wrapper.getStyle().setOverflow(Overflow.HIDDEN);
wrapper.getStyle().setTextOverflow(TextOverflow.ELLIPSIS);
wrapper.setPropertyBoolean("iswrapper", true);
element.removeAllChildren();
element.appendChild(wrapper);
}
wrapper.setInnerText(content);
}
});
}
private JSStaticCell getDefaultHeaderCellReference() {
GridStaticSection staticSection = gridComponent.getStaticSection();
return staticSection.getHeaderCellByColumn(
staticSection.getDefaultHeader(), this);
}
private void bindProperties() {
JS.definePropertyAccessors(jsColumn, "headerContent", v -> {
getDefaultHeaderCellReference().setContent(v);
gridComponent.updateWidth();
}, () -> getDefaultHeaderCellReference().getContent());
JS.definePropertyAccessors(jsColumn, "hidden", v -> {
setHidden((Boolean) v);
gridComponent.updateWidth();
}, this::isHidden);
bind("headerText", v -> setHeaderCaption(v == null ? "" : v.toString()));
bind("hidingToggleText", v -> setHidingToggleCaption(v == null ? null : v.toString()));
bind("flex", v -> setExpandRatio(((Double) v).intValue()));
bind("sortable", v -> setSortable((Boolean) v));
bind("hidable", v -> setHidable((Boolean) v));
bind("readOnly", v -> setEditable(!(boolean) v));
bind("renderer", v -> setRenderer((cell, data) -> {
JSCell jsCell = JSCell.create(cell, gridComponent.getContainer());
JS.exec(v, jsCell);
}));
bind("minWidth",
v -> setMinimumWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_MIN_WIDTH
: (double) v));
bind("maxWidth",
v -> setMaximumWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_MAX_WIDTH
: (double) v));
bind("width",
v -> setWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_COLUMN_WIDTH_PX
: (double) v));
}
private void bind(String propertyName, final Setter setter) {
JS.definePropertyAccessors(jsColumn, propertyName, v -> {
setter.setValue(v);
gridComponent.updateWidth();
}, null);
}
public JSColumn getJsColumn() {
return jsColumn;
}
@Override
public Object getValue(Object dataItem) {
dataItem = GridDataSource.extractDataItem(dataItem);
Object result = null;
if (JS.isPrimitiveType(dataItem)) {
if (getColumnIndex() == 0) {
result = dataItem;
}
} else {
if (JsUtils.isArray((JavaScriptObject) dataItem)) {
result = ((JSArray<Object>) dataItem).get(getColumnIndex());
} else {
result = getNestedProperty(dataItem,
Arrays.asList(jsColumn.getName().split("\\.")));
}
}
return result;
}
private Object getNestedProperty(Object o, List<String> props) {
Object result = null;
if (props.isEmpty()) {
result = o;
} else if (JS.isObject(o)) {
result = getNestedProperty(
JsUtils.prop((JavaScriptObject) o, props.get(0)),
props.subList(1, props.size()));
}
return result;
}
private int getColumnIndex() {
return gridComponent.getColumns().indexOf(jsColumn);
}
}
|
vaadin-components-gwt/src/main/java/com/vaadin/components/grid/table/GridColumn.java
|
package com.vaadin.components.grid.table;
import java.util.Arrays;
import java.util.List;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style.Overflow;
import com.google.gwt.dom.client.Style.TextOverflow;
import com.google.gwt.query.client.js.JsUtils;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.HTML;
import com.vaadin.client.widgets.Grid.Column;
import com.vaadin.components.common.js.JS;
import com.vaadin.components.common.js.JS.Getter;
import com.vaadin.components.common.js.JS.Setter;
import com.vaadin.components.common.js.JSArray;
import com.vaadin.components.grid.GridComponent;
import com.vaadin.components.grid.config.JSCell;
import com.vaadin.components.grid.config.JSColumn;
import com.vaadin.components.grid.config.JSStaticCell;
import com.vaadin.components.grid.data.GridDataSource;
import com.vaadin.components.grid.data.GridDomTableDataSource;
import com.vaadin.shared.ui.grid.GridConstants;
public final class GridColumn extends Column<Object, Object> {
private final JSColumn jsColumn;
private final GridComponent gridComponent;
public static GridColumn addColumn(JSColumn jsColumn,
GridComponent gridComponent) {
GridColumn result = new GridColumn(jsColumn, gridComponent);
gridComponent.getGrid().addColumn(result, gridComponent.getGrid().getVisibleColumns().size());
result.bindProperties();
return result;
}
private GridColumn(JSColumn jsColumn, GridComponent gridComponent) {
this.jsColumn = jsColumn;
this.gridComponent = gridComponent;
// Default renderer
setRenderer((cell, data) -> {
Element element = cell.getElement();
String content = JS.isUndefinedOrNull(data) ? "" : data.toString();
if (gridComponent.getDataSource() instanceof GridDomTableDataSource
&& new HTML(content).getElement().getFirstChildElement() != null) {
element.setInnerHTML(content);
} else {
Element wrapper = element.getFirstChildElement();
if (wrapper == null || !wrapper.getPropertyBoolean("iswrapper")) {
// Need to create a new wrapper
wrapper = DOM.createSpan();
wrapper.getStyle().setOverflow(Overflow.HIDDEN);
wrapper.getStyle().setTextOverflow(TextOverflow.ELLIPSIS);
wrapper.setPropertyBoolean("iswrapper", true);
element.removeAllChildren();
element.appendChild(wrapper);
}
wrapper.setInnerText(content);
}
});
}
private JSStaticCell getDefaultHeaderCellReference() {
GridStaticSection staticSection = gridComponent.getStaticSection();
return staticSection.getHeaderCellByColumn(
staticSection.getDefaultHeader(), this);
}
private void bindProperties() {
JS.definePropertyAccessors(jsColumn, "headerContent", v -> {
getDefaultHeaderCellReference().setContent(v);
gridComponent.updateWidth();
}, () -> getDefaultHeaderCellReference().getContent());
JS.definePropertyAccessors(jsColumn, "hidden", v -> {
setHidden((Boolean) v);
gridComponent.updateWidth();
}, this::isHidden);
bind("headerText", v -> setHeaderCaption(v == null ? "" : v.toString()));
bind("hidingToggleText", v -> setHidingToggleCaption(v == null ? null : v.toString()));
bind("flex", v -> setExpandRatio(((Double) v).intValue()));
bind("sortable", v -> setSortable((Boolean) v));
bind("hidable", v -> setHidable((Boolean) v));
bind("readOnly", v -> setEditable(!(boolean) v));
bind("renderer", v -> setRenderer((cell, data) -> {
JSCell jsCell = JSCell.create(cell, gridComponent.getContainer());
JS.exec(v, jsCell);
}));
bind("minWidth",
v -> setMinimumWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_MIN_WIDTH
: (double) v));
bind("maxWidth",
v -> setMaximumWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_MAX_WIDTH
: (double) v));
bind("width",
v -> setWidth(JS.isUndefinedOrNull(v) ? GridConstants.DEFAULT_COLUMN_WIDTH_PX
: (double) v));
}
private void bind(String propertyName, final Setter setter) {
JS.definePropertyAccessors(jsColumn, propertyName, v -> {
setter.setValue(v);
gridComponent.updateWidth();
}, null);
}
private void bind(String propertyName, final Setter setter, final Getter getter) {
JS.definePropertyAccessors(jsColumn, propertyName, v -> {
setter.setValue(v);
gridComponent.updateWidth();
}, getter::getValue);
}
public JSColumn getJsColumn() {
return jsColumn;
}
@Override
public Object getValue(Object dataItem) {
dataItem = GridDataSource.extractDataItem(dataItem);
Object result = null;
if (JS.isPrimitiveType(dataItem)) {
if (getColumnIndex() == 0) {
result = dataItem;
}
} else {
if (JsUtils.isArray((JavaScriptObject) dataItem)) {
result = ((JSArray<Object>) dataItem).get(getColumnIndex());
} else {
result = getNestedProperty(dataItem,
Arrays.asList(jsColumn.getName().split("\\.")));
}
}
return result;
}
private Object getNestedProperty(Object o, List<String> props) {
Object result = null;
if (props.isEmpty()) {
result = o;
} else if (JS.isObject(o)) {
result = getNestedProperty(
JsUtils.prop((JavaScriptObject) o, props.get(0)),
props.subList(1, props.size()));
}
return result;
}
private int getColumnIndex() {
return gridComponent.getColumns().indexOf(jsColumn);
}
}
|
Removed reused method
|
vaadin-components-gwt/src/main/java/com/vaadin/components/grid/table/GridColumn.java
|
Removed reused method
|
|
Java
|
apache-2.0
|
cceb69ba70a1f21a1cdac26493845733a67dfbf7
| 0
|
dkmfbk/utils,fbk/utils
|
package eu.fbk.utils.corenlp.outputters;
import com.google.gson.*;
import edu.stanford.nlp.coref.CorefCoreAnnotations;
import edu.stanford.nlp.coref.data.CorefChain;
import edu.stanford.nlp.ie.machinereading.structure.Span;
import edu.stanford.nlp.ie.util.RelationTriple;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.IndexedWord;
import edu.stanford.nlp.naturalli.NaturalLogicAnnotations;
import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations;
import edu.stanford.nlp.semgraph.SemanticGraphEdge;
import edu.stanford.nlp.sentiment.SentimentCoreAnnotations;
import edu.stanford.nlp.time.TimeAnnotations;
import edu.stanford.nlp.time.Timex;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreeCoreAnnotations;
import edu.stanford.nlp.trees.TreePrint;
import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.TypesafeMap;
import eu.fbk.utils.gson.AnnotationExclusionStrategy;
import eu.fbk.utils.gson.JSONLabel;
import java.io.*;
import java.lang.reflect.Type;
import java.util.List;
/**
* Output an Annotation to JSON.
*
* @author Alessio Palmero Aprosio
*/
@SuppressWarnings("unused")
public class JSONOutputter extends AnnotationOutputter {
private final ThreadLocal<Annotation> annotationThreadLocal = new ThreadLocal<>();
GsonBuilder gsonBuilder = new GsonBuilder();
static private void add(Gson gson, JsonObject jsonObject, TypesafeMap annotation) {
for (Class<?> myClass : annotation.keySet()) {
Object o = annotation.get((Class) myClass);
if (o != null) {
if (myClass.isAnnotationPresent(JSONLabel.class)) {
JSONLabel JsonAnnotation = myClass.getAnnotation(JSONLabel.class);
String name = JsonAnnotation.value();
if (name != null && name.length() > 0) {
try {
jsonObject.add(name, gson.toJsonTree(o));
} catch (Exception e) {
// ignored
}
}
Class<?>[] serializerClasses = JsonAnnotation.serializer();
for (Class<?> serializerClass : serializerClasses) {
if (JsonSerializer.class.isAssignableFrom(serializerClass)) {
// do stuff
}
}
}
}
}
}
class CoreLabelSerializer implements JsonSerializer<CoreLabel> {
@Override
public JsonElement serialize(CoreLabel coreLabel, Type type, JsonSerializationContext jsonSerializationContext) {
return new JsonPrimitive(coreLabel.index());
}
}
class SpanSerializer implements JsonSerializer<Span> {
@Override
public JsonElement serialize(Span span, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonArray jsonArray = new JsonArray();
jsonArray.add(span.start());
jsonArray.add(span.end());
return jsonArray;
}
}
class SemanticGraphSerializer implements JsonSerializer<SemanticGraph> {
@Override
public JsonElement serialize(SemanticGraph semanticGraph, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonArray jsonArray = new JsonArray();
for (IndexedWord root : semanticGraph.getRoots()) {
JsonObject object = new JsonObject();
object.addProperty("dep", "ROOT");
object.addProperty("governor", 0);
object.addProperty("governorGloss", "ROOT");
object.addProperty("dependent", root.index());
object.addProperty("dependentGloss", root.word());
jsonArray.add(object);
}
for (SemanticGraphEdge edge : semanticGraph.edgeListSorted()) {
JsonObject object = new JsonObject();
object.addProperty("dep", edge.getRelation().toString());
object.addProperty("governor", edge.getGovernor().index());
object.addProperty("governorGloss", edge.getGovernor().word());
object.addProperty("dependent", edge.getDependent().index());
object.addProperty("dependentGloss", edge.getDependent().word());
jsonArray.add(object);
}
return jsonArray;
}
}
class RelationTripleSerializer implements JsonSerializer<RelationTriple> {
@Override
public JsonElement serialize(RelationTriple triple, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonObject ieObject = new JsonObject();
ieObject.addProperty("subject", triple.subjectGloss());
ieObject.add("subjectSpan", jsonSerializationContext.serialize(Span.fromPair(triple.subjectTokenSpan())));
ieObject.addProperty("relation", triple.relationGloss());
ieObject.add("relationSpan", jsonSerializationContext.serialize(Span.fromPair(triple.relationTokenSpan())));
ieObject.addProperty("object", triple.objectGloss());
ieObject.add("objectSpan", jsonSerializationContext.serialize(Span.fromPair(triple.objectTokenSpan())));
return ieObject;
}
}
class TimexSerializer implements JsonSerializer<Timex> {
@Override
public JsonElement serialize(Timex time, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonObject timexObj = new JsonObject();
timexObj.addProperty("tid", time.tid());
timexObj.addProperty("type", time.timexType());
timexObj.addProperty("value", time.value());
timexObj.addProperty("altValue", time.altVal());
return timexObj;
}
}
class DoubleSerializer implements JsonSerializer<Double> {
@Override
public JsonElement serialize(Double aDouble, Type type, JsonSerializationContext jsonSerializationContext) {
if (aDouble != null && aDouble.isNaN()) {
aDouble = null;
}
if (aDouble != null && aDouble.isInfinite()) {
aDouble = null;
}
return new JsonPrimitive(aDouble);
}
}
class CorefChainSerializer implements JsonSerializer<CorefChain> {
@Override
public JsonElement serialize(CorefChain chain, Type type,
JsonSerializationContext jsonSerializationContext) {
CorefChain.CorefMention representative = chain.getRepresentativeMention();
JsonArray chainArray = new JsonArray();
for (CorefChain.CorefMention mention : chain.getMentionsInTextualOrder()) {
JsonObject mentionObj = new JsonObject();
mentionObj.addProperty("id", mention.mentionID);
mentionObj.add("text", jsonSerializationContext.serialize(mention.mentionSpan));
mentionObj.add("type", jsonSerializationContext.serialize(mention.mentionType));
mentionObj.add("number", jsonSerializationContext.serialize(mention.number));
mentionObj.add("gender", jsonSerializationContext.serialize(mention.gender));
mentionObj.add("animacy", jsonSerializationContext.serialize(mention.animacy));
mentionObj.addProperty("startIndex", mention.startIndex);
mentionObj.addProperty("endIndex", mention.endIndex);
mentionObj.addProperty("sentNum", mention.sentNum);
mentionObj.add("position", jsonSerializationContext.serialize(mention.position.elems()));
mentionObj.addProperty("isRepresentativeMention", mention == representative);
chainArray.add(mentionObj);
}
return chainArray;
}
}
public JSONOutputter(GsonBuilder gsonBuilder) {
this.gsonBuilder = gsonBuilder;
}
public JSONOutputter() {
this.gsonBuilder = new GsonBuilder();
}
@Override
public void print(Annotation doc, OutputStream target, Options options) throws IOException {
if (options.pretty) {
gsonBuilder.setPrettyPrinting();
}
gsonBuilder.registerTypeAdapter(SemanticGraph.class, new SemanticGraphSerializer());
gsonBuilder.registerTypeAdapter(Span.class, new SpanSerializer());
gsonBuilder.registerTypeAdapter(RelationTriple.class, new RelationTripleSerializer());
gsonBuilder.registerTypeAdapter(Timex.class, new TimexSerializer());
gsonBuilder.registerTypeAdapter(CorefChain.class, new CorefChainSerializer());
gsonBuilder.registerTypeAdapter(CoreLabel.class, new CoreLabelSerializer());
gsonBuilder.registerTypeAdapter(Double.class, new DoubleSerializer());
gsonBuilder.serializeSpecialFloatingPointValues();
gsonBuilder.setExclusionStrategies(new AnnotationExclusionStrategy());
Gson gson = gsonBuilder.create();
String text = doc.get(CoreAnnotations.TextAnnotation.class);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty("docId", doc.get(CoreAnnotations.DocIDAnnotation.class));
jsonObject.addProperty("docDate", doc.get(CoreAnnotations.DocDateAnnotation.class));
jsonObject.addProperty("docSourceType", doc.get(CoreAnnotations.DocSourceTypeAnnotation.class));
jsonObject.addProperty("docType", doc.get(CoreAnnotations.DocTypeAnnotation.class));
jsonObject.addProperty("author", doc.get(CoreAnnotations.AuthorAnnotation.class));
jsonObject.addProperty("location", doc.get(CoreAnnotations.LocationAnnotation.class));
if (options.includeText) {
jsonObject.addProperty("text", text);
}
List<CoreMap> quotes = doc.get(CoreAnnotations.QuotationsAnnotation.class);
if (quotes != null && quotes.size() > 0) {
JsonArray jsonQuotesArray = new JsonArray();
for (CoreMap quote : quotes) {
JsonObject quoteObj = new JsonObject();
List<CoreLabel> tokens = quote.get(CoreAnnotations.TokensAnnotation.class);
int begin = tokens.get(0).beginPosition();
int end = tokens.get(tokens.size() - 1).endPosition();
int beginContext = Math.max(0, begin - 100);
int endContext = Math.min(end + 100, text.length());
quoteObj.addProperty("text", quote.get(CoreAnnotations.TextAnnotation.class));
quoteObj.addProperty("context", text.substring(beginContext, endContext));
quoteObj.addProperty("characterOffsetBegin", begin);
quoteObj.addProperty("characterOffsetEnd", end);
jsonQuotesArray.add(quoteObj);
}
jsonObject.add("quotes", jsonQuotesArray);
}
add(gson, jsonObject, doc);
// Sentences
if (doc.get(CoreAnnotations.SentencesAnnotation.class) != null) {
addSentences(gson, jsonObject, doc.get(CoreAnnotations.SentencesAnnotation.class), options);
}
// Add coref values
annotationThreadLocal.set(doc);
jsonObject.add("corefs", gson.toJsonTree(doc.get(CorefCoreAnnotations.CorefChainAnnotation.class)));
// System.out.println(gson.toJson(jsonObject));
Writer w = new OutputStreamWriter(target);
w.write(gson.toJson(jsonObject));
w.flush();
}
private static void addSentences(Gson gson, JsonObject jsonObject, List<CoreMap> sentences,
Options options) {
JsonArray jsonSentenceArray = new JsonArray();
for (CoreMap sentence : sentences) {
JsonObject sentenceObj = new JsonObject();
List<CoreLabel> tokens = sentence.get(CoreAnnotations.TokensAnnotation.class);
sentenceObj.addProperty("id", sentence.get(CoreAnnotations.SentenceIDAnnotation.class));
sentenceObj.addProperty("index", sentence.get(CoreAnnotations.SentenceIndexAnnotation.class));
sentenceObj.addProperty("line", sentence.get(CoreAnnotations.LineNumberAnnotation.class));
sentenceObj.addProperty("characterOffsetBegin", tokens.get(0).beginPosition());
sentenceObj.addProperty("characterOffsetEnd", tokens.get(tokens.size() - 1).endPosition());
sentenceObj.addProperty("text", sentence.get(CoreAnnotations.TextAnnotation.class));
// Dependencies
sentenceObj.add("basic-dependencies",
gson.toJsonTree(sentence.get(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation.class)));
sentenceObj.add("collapsed-dependencies", gson.toJsonTree(
sentence.get(SemanticGraphCoreAnnotations.CollapsedDependenciesAnnotation.class)));
sentenceObj.add("collapsed-ccprocessed-dependencies", gson.toJsonTree(
sentence.get(SemanticGraphCoreAnnotations.CollapsedCCProcessedDependenciesAnnotation.class)));
// Constituents
Tree tree = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
StringWriter treeStrWriter = new StringWriter();
TreePrint treePrinter = options.constituentTreePrinter;
if (treePrinter == AnnotationOutputter.DEFAULT_CONSTITUENT_TREE_PRINTER) {
// note the '==' -- we're overwriting the default, but only if it was not explicitly set otherwise
treePrinter = new TreePrint("oneline");
}
treePrinter.printTree(tree,
new PrintWriter(treeStrWriter, true));
sentenceObj.addProperty("parse", treeStrWriter.toString().trim());
// Sentiment
Tree sentimentTree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
if (sentimentTree != null) {
int sentiment = RNNCoreAnnotations.getPredictedClass(sentimentTree);
String sentimentClass = sentence.get(SentimentCoreAnnotations.SentimentClass.class);
sentenceObj.addProperty("sentimentValue", Integer.toString(sentiment));
sentenceObj.addProperty("sentiment", sentimentClass.replaceAll("\\s+", ""));
}
// OpenIE
sentenceObj.add("openie", gson.toJsonTree(sentence
.get(NaturalLogicAnnotations.RelationTriplesAnnotation.class)));
// Tokens
if (sentence.get(CoreAnnotations.TokensAnnotation.class) != null) {
addTokens(gson, sentenceObj, sentence.get(CoreAnnotations.TokensAnnotation.class));
}
add(gson, sentenceObj, sentence);
jsonSentenceArray.add(sentenceObj);
}
jsonObject.add("sentences", jsonSentenceArray);
}
private static void addTokens(Gson gson, JsonObject sentenceObj, List<CoreLabel> tokens) {
JsonArray jsonTokenArray = new JsonArray();
for (CoreLabel token : tokens) {
JsonObject tokenObj = new JsonObject();
tokenObj.addProperty("index", token.index());
tokenObj.addProperty("word", token.word());
tokenObj.addProperty("originalText", token.originalText());
tokenObj.addProperty("lemma", token.lemma());
tokenObj.addProperty("characterOffsetBegin", token.beginPosition());
tokenObj.addProperty("characterOffsetEnd", token.endPosition());
tokenObj.addProperty("pos", token.tag());
tokenObj.addProperty("featuresText", token.get(CoreAnnotations.FeaturesAnnotation.class));
tokenObj.addProperty("ner", token.ner());
tokenObj.addProperty("normalizedNER",
token.get(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class));
tokenObj.addProperty("speaker", token.get(CoreAnnotations.SpeakerAnnotation.class));
tokenObj.addProperty("truecase", token.get(CoreAnnotations.TrueCaseAnnotation.class));
tokenObj.addProperty("truecaseText", token.get(CoreAnnotations.TrueCaseTextAnnotation.class));
tokenObj.addProperty("before", token.get(CoreAnnotations.BeforeAnnotation.class));
tokenObj.addProperty("after", token.get(CoreAnnotations.AfterAnnotation.class));
// Timex
tokenObj.add("timex", gson.toJsonTree(token.get(TimeAnnotations.TimexAnnotation.class)));
add(gson, tokenObj, token);
jsonTokenArray.add(tokenObj);
}
sentenceObj.add("tokens", jsonTokenArray);
}
public static String jsonPrint(GsonBuilder gsonBuilder, Annotation annotation) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new JSONOutputter(gsonBuilder).print(annotation, outputStream);
return new String(outputStream.toByteArray(), "UTF-8");
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os) throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os);
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os,
StanfordCoreNLP pipeline) throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os, pipeline);
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os, Options options)
throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os, options);
}
public static String jsonPrint(Annotation annotation) throws IOException {
return jsonPrint(annotation, new Options());
}
public static String jsonPrint(Annotation annotation, Options options) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new JSONOutputter().print(annotation, outputStream, options);
return new String(outputStream.toByteArray(), "UTF-8");
}
public static void jsonPrint(Annotation annotation, OutputStream os) throws IOException {
new JSONOutputter().print(annotation, os);
}
public static void jsonPrint(Annotation annotation, OutputStream os,
StanfordCoreNLP pipeline) throws IOException {
new JSONOutputter().print(annotation, os, pipeline);
}
public static void jsonPrint(Annotation annotation, OutputStream os, Options options)
throws IOException {
new JSONOutputter().print(annotation, os, options);
}
}
|
utils-corenlp/src/main/java/eu/fbk/utils/corenlp/outputters/JSONOutputter.java
|
package eu.fbk.utils.corenlp.outputters;
import com.google.gson.*;
import edu.stanford.nlp.coref.CorefCoreAnnotations;
import edu.stanford.nlp.coref.data.CorefChain;
import edu.stanford.nlp.ie.machinereading.structure.Span;
import edu.stanford.nlp.ie.util.RelationTriple;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.IndexedWord;
import edu.stanford.nlp.naturalli.NaturalLogicAnnotations;
import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations;
import edu.stanford.nlp.semgraph.SemanticGraphEdge;
import edu.stanford.nlp.sentiment.SentimentCoreAnnotations;
import edu.stanford.nlp.time.TimeAnnotations;
import edu.stanford.nlp.time.Timex;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreeCoreAnnotations;
import edu.stanford.nlp.trees.TreePrint;
import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.TypesafeMap;
import eu.fbk.utils.gson.AnnotationExclusionStrategy;
import eu.fbk.utils.gson.JSONLabel;
import java.io.*;
import java.lang.reflect.Type;
import java.util.List;
/**
* Output an Annotation to JSON.
*
* @author Alessio Palmero Aprosio
*/
@SuppressWarnings("unused")
public class JSONOutputter extends AnnotationOutputter {
private final ThreadLocal<Annotation> annotationThreadLocal = new ThreadLocal<>();
GsonBuilder gsonBuilder = new GsonBuilder();
static private void add(Gson gson, JsonObject jsonObject, TypesafeMap annotation) {
for (Class<?> myClass : annotation.keySet()) {
Object o = annotation.get((Class) myClass);
if (o != null) {
if (myClass.isAnnotationPresent(JSONLabel.class)) {
JSONLabel JsonAnnotation = myClass.getAnnotation(JSONLabel.class);
String name = JsonAnnotation.value();
if (name != null && name.length() > 0) {
try {
jsonObject.add(name, gson.toJsonTree(o));
} catch (Exception e) {
// ignored
}
}
Class<?>[] serializerClasses = JsonAnnotation.serializer();
for (Class<?> serializerClass : serializerClasses) {
if (JsonSerializer.class.isAssignableFrom(serializerClass)) {
// do stuff
}
}
}
}
}
}
class CoreLabelSerializer implements JsonSerializer<CoreLabel> {
@Override
public JsonElement serialize(CoreLabel coreLabel, Type type, JsonSerializationContext jsonSerializationContext) {
return new JsonPrimitive(coreLabel.index());
}
}
class SpanSerializer implements JsonSerializer<Span> {
@Override
public JsonElement serialize(Span span, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonArray jsonArray = new JsonArray();
jsonArray.add(span.start());
jsonArray.add(span.end());
return jsonArray;
}
}
class SemanticGraphSerializer implements JsonSerializer<SemanticGraph> {
@Override
public JsonElement serialize(SemanticGraph semanticGraph, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonArray jsonArray = new JsonArray();
for (IndexedWord root : semanticGraph.getRoots()) {
JsonObject object = new JsonObject();
object.addProperty("dep", "ROOT");
object.addProperty("governor", 0);
object.addProperty("governorGloss", "ROOT");
object.addProperty("dependent", root.index());
object.addProperty("dependentGloss", root.word());
jsonArray.add(object);
}
for (SemanticGraphEdge edge : semanticGraph.edgeListSorted()) {
JsonObject object = new JsonObject();
object.addProperty("dep", edge.getRelation().toString());
object.addProperty("governor", edge.getGovernor().index());
object.addProperty("governorGloss", edge.getGovernor().word());
object.addProperty("dependent", edge.getDependent().index());
object.addProperty("dependentGloss", edge.getDependent().word());
jsonArray.add(object);
}
return jsonArray;
}
}
class RelationTripleSerializer implements JsonSerializer<RelationTriple> {
@Override
public JsonElement serialize(RelationTriple triple, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonObject ieObject = new JsonObject();
ieObject.addProperty("subject", triple.subjectGloss());
ieObject.add("subjectSpan", jsonSerializationContext.serialize(Span.fromPair(triple.subjectTokenSpan())));
ieObject.addProperty("relation", triple.relationGloss());
ieObject.add("relationSpan", jsonSerializationContext.serialize(Span.fromPair(triple.relationTokenSpan())));
ieObject.addProperty("object", triple.objectGloss());
ieObject.add("objectSpan", jsonSerializationContext.serialize(Span.fromPair(triple.objectTokenSpan())));
return ieObject;
}
}
class TimexSerializer implements JsonSerializer<Timex> {
@Override
public JsonElement serialize(Timex time, Type type,
JsonSerializationContext jsonSerializationContext) {
JsonObject timexObj = new JsonObject();
timexObj.addProperty("tid", time.tid());
timexObj.addProperty("type", time.timexType());
timexObj.addProperty("value", time.value());
timexObj.addProperty("altValue", time.altVal());
return timexObj;
}
}
class DoubleSerializer implements JsonSerializer<Double> {
@Override
public JsonElement serialize(Double aDouble, Type type, JsonSerializationContext jsonSerializationContext) {
if (aDouble != null && aDouble.isNaN()) {
aDouble = null;
}
if (aDouble != null && aDouble.isInfinite()) {
aDouble = null;
}
return new JsonPrimitive(aDouble);
}
}
class CorefChainSerializer implements JsonSerializer<CorefChain> {
@Override
public JsonElement serialize(CorefChain chain, Type type,
JsonSerializationContext jsonSerializationContext) {
CorefChain.CorefMention representative = chain.getRepresentativeMention();
JsonArray chainArray = new JsonArray();
for (CorefChain.CorefMention mention : chain.getMentionsInTextualOrder()) {
JsonObject mentionObj = new JsonObject();
mentionObj.addProperty("id", mention.mentionID);
mentionObj.add("text", jsonSerializationContext.serialize(mention.mentionSpan));
mentionObj.add("type", jsonSerializationContext.serialize(mention.mentionType));
mentionObj.add("number", jsonSerializationContext.serialize(mention.number));
mentionObj.add("gender", jsonSerializationContext.serialize(mention.gender));
mentionObj.add("animacy", jsonSerializationContext.serialize(mention.animacy));
mentionObj.addProperty("startIndex", mention.startIndex);
mentionObj.addProperty("endIndex", mention.endIndex);
mentionObj.addProperty("sentNum", mention.sentNum);
mentionObj.add("position", jsonSerializationContext.serialize(mention.position.elems()));
mentionObj.addProperty("isRepresentativeMention", mention == representative);
chainArray.add(mentionObj);
}
return chainArray;
}
}
public JSONOutputter(GsonBuilder gsonBuilder) {
this.gsonBuilder = gsonBuilder;
}
public JSONOutputter() {
this.gsonBuilder = new GsonBuilder();
}
@Override
public void print(Annotation doc, OutputStream target, Options options) throws IOException {
if (options.pretty) {
gsonBuilder.setPrettyPrinting();
}
gsonBuilder.registerTypeAdapter(SemanticGraph.class, new SemanticGraphSerializer());
gsonBuilder.registerTypeAdapter(Span.class, new SpanSerializer());
gsonBuilder.registerTypeAdapter(RelationTriple.class, new RelationTripleSerializer());
gsonBuilder.registerTypeAdapter(Timex.class, new TimexSerializer());
gsonBuilder.registerTypeAdapter(CorefChain.class, new CorefChainSerializer());
gsonBuilder.registerTypeAdapter(CoreLabel.class, new CoreLabelSerializer());
gsonBuilder.registerTypeAdapter(Double.class, new DoubleSerializer());
gsonBuilder.serializeSpecialFloatingPointValues();
gsonBuilder.setExclusionStrategies(new AnnotationExclusionStrategy());
Gson gson = gsonBuilder.create();
String text = doc.get(CoreAnnotations.TextAnnotation.class);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty("docId", doc.get(CoreAnnotations.DocIDAnnotation.class));
jsonObject.addProperty("docDate", doc.get(CoreAnnotations.DocDateAnnotation.class));
jsonObject.addProperty("docSourceType", doc.get(CoreAnnotations.DocSourceTypeAnnotation.class));
jsonObject.addProperty("docType", doc.get(CoreAnnotations.DocTypeAnnotation.class));
jsonObject.addProperty("author", doc.get(CoreAnnotations.AuthorAnnotation.class));
jsonObject.addProperty("location", doc.get(CoreAnnotations.LocationAnnotation.class));
if (options.includeText) {
jsonObject.addProperty("text", text);
}
List<CoreMap> quotes = doc.get(CoreAnnotations.QuotationsAnnotation.class);
if (quotes != null && quotes.size() > 0) {
JsonArray jsonQuotesArray = new JsonArray();
for (CoreMap quote : quotes) {
JsonObject quoteObj = new JsonObject();
List<CoreLabel> tokens = quote.get(CoreAnnotations.TokensAnnotation.class);
int begin = tokens.get(0).beginPosition();
int end = tokens.get(tokens.size() - 1).endPosition();
int beginContext = Math.max(0, begin - 100);
int endContext = Math.min(end + 100, text.length());
quoteObj.addProperty("text", quote.get(CoreAnnotations.TextAnnotation.class));
quoteObj.addProperty("context", text.substring(beginContext, endContext));
quoteObj.addProperty("characterOffsetBegin", begin);
quoteObj.addProperty("characterOffsetEnd", end);
jsonQuotesArray.add(quoteObj);
}
jsonObject.add("quotes", jsonQuotesArray);
}
add(gson, jsonObject, doc);
// Sentences
if (doc.get(CoreAnnotations.SentencesAnnotation.class) != null) {
addSentences(gson, jsonObject, doc.get(CoreAnnotations.SentencesAnnotation.class), options);
}
// Add coref values
annotationThreadLocal.set(doc);
jsonObject.add("corefs", gson.toJsonTree(doc.get(CorefCoreAnnotations.CorefChainAnnotation.class)));
// System.out.println(gson.toJson(jsonObject));
Writer w = new OutputStreamWriter(target);
w.write(gson.toJson(jsonObject));
w.flush();
}
private static void addSentences(Gson gson, JsonObject jsonObject, List<CoreMap> sentences,
Options options) {
JsonArray jsonSentenceArray = new JsonArray();
for (CoreMap sentence : sentences) {
JsonObject sentenceObj = new JsonObject();
List<CoreLabel> tokens = sentence.get(CoreAnnotations.TokensAnnotation.class);
sentenceObj.addProperty("id", sentence.get(CoreAnnotations.SentenceIDAnnotation.class));
sentenceObj.addProperty("index", sentence.get(CoreAnnotations.SentenceIndexAnnotation.class));
sentenceObj.addProperty("line", sentence.get(CoreAnnotations.LineNumberAnnotation.class));
sentenceObj.addProperty("characterOffsetBegin", tokens.get(0).beginPosition());
sentenceObj.addProperty("characterOffsetEnd", tokens.get(tokens.size() - 1).endPosition());
sentenceObj.addProperty("text", sentence.get(CoreAnnotations.TextAnnotation.class));
// Dependencies
sentenceObj.add("basic-dependencies",
gson.toJsonTree(sentence.get(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation.class)));
sentenceObj.add("collapsed-dependencies", gson.toJsonTree(
sentence.get(SemanticGraphCoreAnnotations.CollapsedDependenciesAnnotation.class)));
sentenceObj.add("collapsed-ccprocessed-dependencies", gson.toJsonTree(
sentence.get(SemanticGraphCoreAnnotations.CollapsedCCProcessedDependenciesAnnotation.class)));
// Constituents
Tree tree = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
StringWriter treeStrWriter = new StringWriter();
TreePrint treePrinter = options.constituentTreePrinter;
if (treePrinter == AnnotationOutputter.DEFAULT_CONSTITUENT_TREE_PRINTER) {
// note the '==' -- we're overwriting the default, but only if it was not explicitly set otherwise
treePrinter = new TreePrint("oneline");
}
treePrinter.printTree(tree,
new PrintWriter(treeStrWriter, true));
sentenceObj.addProperty("parse", treeStrWriter.toString().trim());
// Sentiment
Tree sentimentTree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
if (sentimentTree != null) {
int sentiment = RNNCoreAnnotations.getPredictedClass(sentimentTree);
String sentimentClass = sentence.get(SentimentCoreAnnotations.SentimentClass.class);
sentenceObj.addProperty("sentimentValue", Integer.toString(sentiment));
sentenceObj.addProperty("sentiment", sentimentClass.replaceAll("\\s+", ""));
}
// OpenIE
sentenceObj.add("openie", gson.toJsonTree(sentence
.get(NaturalLogicAnnotations.RelationTriplesAnnotation.class)));
// Tokens
if (sentence.get(CoreAnnotations.TokensAnnotation.class) != null) {
addTokens(gson, sentenceObj, sentence.get(CoreAnnotations.TokensAnnotation.class));
}
add(gson, sentenceObj, sentence);
jsonSentenceArray.add(sentenceObj);
}
jsonObject.add("sentences", jsonSentenceArray);
}
private static void addTokens(Gson gson, JsonObject sentenceObj, List<CoreLabel> tokens) {
JsonArray jsonTokenArray = new JsonArray();
for (CoreLabel token : tokens) {
JsonObject tokenObj = new JsonObject();
tokenObj.addProperty("index", token.index());
tokenObj.addProperty("word", token.word());
tokenObj.addProperty("originalText", token.originalText());
tokenObj.addProperty("lemma", token.lemma());
tokenObj.addProperty("characterOffsetBegin", token.beginPosition());
tokenObj.addProperty("characterOffsetEnd", token.endPosition());
tokenObj.addProperty("pos", token.tag());
tokenObj.addProperty("ner", token.ner());
tokenObj.addProperty("normalizedNER",
token.get(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class));
tokenObj.addProperty("speaker", token.get(CoreAnnotations.SpeakerAnnotation.class));
tokenObj.addProperty("truecase", token.get(CoreAnnotations.TrueCaseAnnotation.class));
tokenObj.addProperty("truecaseText", token.get(CoreAnnotations.TrueCaseTextAnnotation.class));
tokenObj.addProperty("before", token.get(CoreAnnotations.BeforeAnnotation.class));
tokenObj.addProperty("after", token.get(CoreAnnotations.AfterAnnotation.class));
// Timex
tokenObj.add("timex", gson.toJsonTree(token.get(TimeAnnotations.TimexAnnotation.class)));
add(gson, tokenObj, token);
jsonTokenArray.add(tokenObj);
}
sentenceObj.add("tokens", jsonTokenArray);
}
public static String jsonPrint(GsonBuilder gsonBuilder, Annotation annotation) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new JSONOutputter(gsonBuilder).print(annotation, outputStream);
return new String(outputStream.toByteArray(), "UTF-8");
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os) throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os);
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os,
StanfordCoreNLP pipeline) throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os, pipeline);
}
public static void jsonPrint(GsonBuilder gsonBuilder, Annotation annotation, OutputStream os, Options options)
throws IOException {
new JSONOutputter(gsonBuilder).print(annotation, os, options);
}
public static String jsonPrint(Annotation annotation) throws IOException {
return jsonPrint(annotation, new Options());
}
public static String jsonPrint(Annotation annotation, Options options) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new JSONOutputter().print(annotation, outputStream, options);
return new String(outputStream.toByteArray(), "UTF-8");
}
public static void jsonPrint(Annotation annotation, OutputStream os) throws IOException {
new JSONOutputter().print(annotation, os);
}
public static void jsonPrint(Annotation annotation, OutputStream os,
StanfordCoreNLP pipeline) throws IOException {
new JSONOutputter().print(annotation, os, pipeline);
}
public static void jsonPrint(Annotation annotation, OutputStream os, Options options)
throws IOException {
new JSONOutputter().print(annotation, os, options);
}
}
|
Features text
|
utils-corenlp/src/main/java/eu/fbk/utils/corenlp/outputters/JSONOutputter.java
|
Features text
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.