lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
d54651e016513794d31e8e3f80f3fd6cf467fa12
| 0
|
MatthewTamlin/Spyglass
|
package com.matthewtamlin.spyglass.library.default_annotations;
import com.matthewtamlin.java_utilities.testing.Tested;
import com.matthewtamlin.spyglass.library.default_adapters.DefaultToBooleanResourceAdapter;
import com.matthewtamlin.spyglass.library.meta_annotations.Default;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Defines a default for the annotated method, so that the Spyglass framework can invoke the
* method if its handler annotation is not satisfied. This annotation should only be applied to
* methods which satisfy all of the following criteria:
* <ul>
* <li>The method has a handler annotation.</li>
* <li>The method has no other default annotations.</li>
* <li>The method has at least one boolean parameter.</li>
* <li>Except for one boolean parameter, every parameter has a use annotation.</li>
* </ul>
*/
@Tested(testMethod = "automated")
@Default(adapterClass = DefaultToBooleanResourceAdapter.class)
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DefaultToBooleanResource {
/**
* @return the resource ID of the default value, must resolve to a boolean resource
*/
int value();
}
|
library/src/main/java/com/matthewtamlin/spyglass/library/default_annotations/DefaultToBooleanResource.java
|
package com.matthewtamlin.spyglass.library.default_annotations;
import com.matthewtamlin.java_utilities.testing.Tested;
import com.matthewtamlin.spyglass.library.default_adapters.DefaultToBooleanResourceAdapter;
import com.matthewtamlin.spyglass.library.meta_annotations.Default;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Defines a default for the annotated method, so that the Spyglass framework can invoke the
* method if its handler annotation is not satisfied. This annotation should only be applied to
* methods which satisfy all of the following criteria:
* <ul>
* <li>The method has a handler annotation.</li>
* <li>The method has no other default annotations.</li>
* <li>The method has at least one boolean parameter.</li>
* <li>One boolean parameter has no use annotation.</li>
* <li>Every other parameter has a use annotation.</li>
* </ul>
*/
@Tested(testMethod = "automated")
@Default(adapterClass = DefaultToBooleanResourceAdapter.class)
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DefaultToBooleanResource {
/**
* @return the resource ID of the default value, must resolve to a boolean resource
*/
int value();
}
|
Changed Javadoc
|
library/src/main/java/com/matthewtamlin/spyglass/library/default_annotations/DefaultToBooleanResource.java
|
Changed Javadoc
|
|
Java
|
apache-2.0
|
41a9ff81ac3790cfc67dee2f2b8d25b0c9f27dd5
| 0
|
facebook/buck,romanoid/buck,romanoid/buck,JoelMarcey/buck,kageiit/buck,romanoid/buck,Addepar/buck,Addepar/buck,JoelMarcey/buck,Addepar/buck,Addepar/buck,facebook/buck,SeleniumHQ/buck,romanoid/buck,zpao/buck,rmaz/buck,JoelMarcey/buck,zpao/buck,Addepar/buck,JoelMarcey/buck,romanoid/buck,rmaz/buck,rmaz/buck,SeleniumHQ/buck,nguyentruongtho/buck,facebook/buck,SeleniumHQ/buck,romanoid/buck,Addepar/buck,zpao/buck,romanoid/buck,romanoid/buck,romanoid/buck,Addepar/buck,nguyentruongtho/buck,SeleniumHQ/buck,Addepar/buck,nguyentruongtho/buck,facebook/buck,Addepar/buck,facebook/buck,SeleniumHQ/buck,SeleniumHQ/buck,zpao/buck,nguyentruongtho/buck,facebook/buck,JoelMarcey/buck,rmaz/buck,nguyentruongtho/buck,zpao/buck,kageiit/buck,nguyentruongtho/buck,JoelMarcey/buck,kageiit/buck,SeleniumHQ/buck,rmaz/buck,rmaz/buck,JoelMarcey/buck,rmaz/buck,zpao/buck,Addepar/buck,JoelMarcey/buck,rmaz/buck,SeleniumHQ/buck,kageiit/buck,romanoid/buck,Addepar/buck,SeleniumHQ/buck,kageiit/buck,romanoid/buck,romanoid/buck,rmaz/buck,JoelMarcey/buck,zpao/buck,SeleniumHQ/buck,Addepar/buck,SeleniumHQ/buck,JoelMarcey/buck,nguyentruongtho/buck,Addepar/buck,JoelMarcey/buck,rmaz/buck,SeleniumHQ/buck,rmaz/buck,JoelMarcey/buck,SeleniumHQ/buck,romanoid/buck,facebook/buck,kageiit/buck,rmaz/buck,kageiit/buck,rmaz/buck,JoelMarcey/buck
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.parser;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.EmptyTargetConfiguration;
import com.facebook.buck.core.model.HasBuildTarget;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.filesystem.PathMatcher;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.RecursiveFileMatcher;
import com.facebook.buck.io.watchman.Watchman;
import com.facebook.buck.parser.exceptions.BuildFileParseException;
import com.facebook.buck.parser.exceptions.BuildTargetException;
import com.facebook.buck.parser.exceptions.MissingBuildFileException;
import com.facebook.buck.util.MoreThrowables;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
/** Responsible for discovering all the build targets that match a set of {@link TargetNodeSpec}. */
public class TargetSpecResolver {
private final BuckEventBus eventBus;
private final Watchman watchman;
public TargetSpecResolver(BuckEventBus eventBus, Watchman watchman) {
this.eventBus = eventBus;
this.watchman = watchman;
}
/**
* @return a list of sets of build targets where each set contains all build targets that match a
* corresponding {@link TargetNodeSpec}.
*/
public <T extends HasBuildTarget> ImmutableList<ImmutableSet<BuildTarget>> resolveTargetSpecs(
Cell rootCell,
Iterable<? extends TargetNodeSpec> specs,
FlavorEnhancer<T> flavorEnhancer,
TargetNodeProviderForSpecResolver<T> targetNodeProvider,
TargetNodeFilterForSpecResolver<T> targetNodeFilter)
throws BuildFileParseException, InterruptedException, IOException {
// Convert the input spec iterable into a list so we have a fixed ordering, which we'll rely on
// when returning results.
ImmutableList<TargetNodeSpec> orderedSpecs = ImmutableList.copyOf(specs);
Multimap<Path, Integer> perBuildFileSpecs = groupSpecsByBuildFile(rootCell, orderedSpecs);
// Kick off parse futures for each build file.
ArrayList<ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures =
new ArrayList<>();
for (Path buildFile : perBuildFileSpecs.keySet()) {
Collection<Integer> buildFileSpecs = perBuildFileSpecs.get(buildFile);
TargetNodeSpec firstSpec = orderedSpecs.get(Iterables.get(buildFileSpecs, 0));
Cell cell = rootCell.getCell(firstSpec.getBuildFileSpec().getCellPath());
// Format a proper error message for non-existent build files.
if (!cell.getFilesystem().isFile(buildFile)) {
throw new MissingBuildFileException(
firstSpec.toString(), cell.getFilesystem().getRootPath().relativize(buildFile));
}
for (int index : buildFileSpecs) {
TargetNodeSpec spec = orderedSpecs.get(index);
handleTargetNodeSpec(
flavorEnhancer,
targetNodeProvider,
targetNodeFilter,
targetFutures,
cell,
buildFile,
index,
spec);
}
}
return collectTargets(orderedSpecs.size(), targetFutures);
}
// Resolve all the build files from all the target specs. We store these into a multi-map which
// maps the path to the build file to the index of it's spec file in the ordered spec list.
private Multimap<Path, Integer> groupSpecsByBuildFile(
Cell rootCell, ImmutableList<TargetNodeSpec> orderedSpecs)
throws IOException, InterruptedException {
ParserConfig parserConfig = rootCell.getBuckConfig().getView(ParserConfig.class);
ParserConfig.BuildFileSearchMethod buildFileSearchMethod =
parserConfig.getBuildFileSearchMethod();
Multimap<Path, Integer> perBuildFileSpecs = LinkedHashMultimap.create();
for (int index = 0; index < orderedSpecs.size(); index++) {
TargetNodeSpec spec = orderedSpecs.get(index);
Cell cell = rootCell.getCell(spec.getBuildFileSpec().getCellPath());
ImmutableSet<Path> buildFiles;
try (SimplePerfEvent.Scope perfEventScope =
SimplePerfEvent.scope(
eventBus, PerfEventId.of("FindBuildFiles"), "targetNodeSpec", spec)) {
// Iterate over the build files the given target node spec returns.
ProjectFilesystem filesystem = cell.getFilesystem();
ImmutableSet.Builder<PathMatcher> parsingIgnores =
ImmutableSet.builderWithExpectedSize(filesystem.getBlacklistedPaths().size() + 1);
parsingIgnores.addAll(filesystem.getBlacklistedPaths());
parsingIgnores.add(RecursiveFileMatcher.of(filesystem.getBuckPaths().getBuckOut()));
for (Path subCellRoots : cell.getKnownRoots()) {
if (!subCellRoots.equals(cell.getRoot())) {
parsingIgnores.add(RecursiveFileMatcher.of(filesystem.relativize(subCellRoots)));
}
}
buildFiles =
spec.getBuildFileSpec()
.findBuildFiles(
cell.getBuildFileName(),
filesystem.asView().withView(Paths.get(""), ImmutableSet.of()),
watchman,
buildFileSearchMethod,
parsingIgnores.build());
}
for (Path buildFile : buildFiles) {
perBuildFileSpecs.put(buildFile, index);
}
}
return perBuildFileSpecs;
}
private <T extends HasBuildTarget> void handleTargetNodeSpec(
FlavorEnhancer<T> flavorEnhancer,
TargetNodeProviderForSpecResolver<T> targetNodeProvider,
TargetNodeFilterForSpecResolver<T> targetNodeFilter,
List<ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures,
Cell cell,
Path buildFile,
int index,
TargetNodeSpec spec) {
if (spec instanceof BuildTargetSpec) {
BuildTargetSpec buildTargetSpec = (BuildTargetSpec) spec;
targetFutures.add(
Futures.transform(
targetNodeProvider.getTargetNodeJob(
buildTargetSpec
.getUnconfiguredBuildTarget()
.configure(EmptyTargetConfiguration.INSTANCE)),
node -> {
ImmutableSet<BuildTarget> buildTargets =
applySpecFilter(spec, ImmutableList.of(node), flavorEnhancer, targetNodeFilter);
Preconditions.checkState(
buildTargets.size() == 1,
"BuildTargetSpec %s filter discarded target %s, but was not supposed to.",
spec,
node.getBuildTarget());
return new AbstractMap.SimpleEntry<>(index, buildTargets);
},
MoreExecutors.directExecutor()));
} else {
// Build up a list of all target nodes from the build file.
targetFutures.add(
Futures.transform(
targetNodeProvider.getAllTargetNodesJob(cell, buildFile),
nodes ->
new AbstractMap.SimpleEntry<>(
index, applySpecFilter(spec, nodes, flavorEnhancer, targetNodeFilter)),
MoreExecutors.directExecutor()));
}
}
private ImmutableList<ImmutableSet<BuildTarget>> collectTargets(
int specsCount,
List<ListenableFuture<Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures)
throws InterruptedException {
// Walk through and resolve all the futures, and place their results in a multimap that
// is indexed by the integer representing the input target spec order.
LinkedHashMultimap<Integer, BuildTarget> targetsMap = LinkedHashMultimap.create();
try {
for (ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>> targetFuture :
targetFutures) {
Map.Entry<Integer, ImmutableSet<BuildTarget>> result = targetFuture.get();
targetsMap.putAll(result.getKey(), result.getValue());
}
} catch (ExecutionException e) {
MoreThrowables.throwIfAnyCauseInstanceOf(e, InterruptedException.class);
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e);
}
// Finally, pull out the final build target results in input target spec order, and place them
// into a list of sets that exactly matches the ihput order.
ImmutableList.Builder<ImmutableSet<BuildTarget>> targets = ImmutableList.builder();
for (int index = 0; index < specsCount; index++) {
targets.add(ImmutableSet.copyOf(targetsMap.get(index)));
}
return targets.build();
}
private <T extends HasBuildTarget> ImmutableSet<BuildTarget> applySpecFilter(
TargetNodeSpec spec,
ImmutableList<T> targetNodes,
FlavorEnhancer<T> flavorEnhancer,
TargetNodeFilterForSpecResolver<T> targetNodeFilter) {
ImmutableSet.Builder<BuildTarget> targets = ImmutableSet.builder();
ImmutableMap<BuildTarget, T> partialTargets = targetNodeFilter.filter(spec, targetNodes);
for (Map.Entry<BuildTarget, T> partialTarget : partialTargets.entrySet()) {
BuildTarget target =
flavorEnhancer.enhanceFlavors(
partialTarget.getKey(), partialTarget.getValue(), spec.getTargetType());
targets.add(target);
}
return targets.build();
}
/** Allows to change flavors of some targets while performing the resolution. */
public interface FlavorEnhancer<T extends HasBuildTarget> {
BuildTarget enhanceFlavors(
BuildTarget target, T targetNode, TargetNodeSpec.TargetType targetType);
}
/** Provides target nodes of a given type. */
public interface TargetNodeProviderForSpecResolver<T extends HasBuildTarget> {
ListenableFuture<T> getTargetNodeJob(BuildTarget target) throws BuildTargetException;
ListenableFuture<ImmutableList<T>> getAllTargetNodesJob(Cell cell, Path buildFile)
throws BuildTargetException;
}
/** Performs filtering of target nodes using a given {@link TargetNodeSpec}. */
public interface TargetNodeFilterForSpecResolver<T extends HasBuildTarget> {
ImmutableMap<BuildTarget, T> filter(TargetNodeSpec spec, Iterable<T> nodes);
}
}
|
src/com/facebook/buck/parser/TargetSpecResolver.java
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.parser;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.EmptyTargetConfiguration;
import com.facebook.buck.core.model.HasBuildTarget;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.filesystem.PathMatcher;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.RecursiveFileMatcher;
import com.facebook.buck.io.watchman.Watchman;
import com.facebook.buck.parser.exceptions.BuildFileParseException;
import com.facebook.buck.parser.exceptions.BuildTargetException;
import com.facebook.buck.parser.exceptions.MissingBuildFileException;
import com.facebook.buck.util.MoreThrowables;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
/** Responsible for discovering all the build targets that match a set of {@link TargetNodeSpec}. */
public class TargetSpecResolver {
private final BuckEventBus eventBus;
private final Watchman watchman;
public TargetSpecResolver(BuckEventBus eventBus, Watchman watchman) {
this.eventBus = eventBus;
this.watchman = watchman;
}
/**
* @return a list of sets of build targets where each set contains all build targets that match a
* corresponding {@link TargetNodeSpec}.
*/
public <T extends HasBuildTarget> ImmutableList<ImmutableSet<BuildTarget>> resolveTargetSpecs(
Cell rootCell,
Iterable<? extends TargetNodeSpec> specs,
FlavorEnhancer<T> flavorEnhancer,
TargetNodeProviderForSpecResolver<T> targetNodeProvider,
TargetNodeFilterForSpecResolver<T> targetNodeFilter)
throws BuildFileParseException, InterruptedException, IOException {
// Convert the input spec iterable into a list so we have a fixed ordering, which we'll rely on
// when returning results.
ImmutableList<TargetNodeSpec> orderedSpecs = ImmutableList.copyOf(specs);
Multimap<Path, Integer> perBuildFileSpecs = groupSpecsByBuildFile(rootCell, orderedSpecs);
// Kick off parse futures for each build file.
ArrayList<ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures =
new ArrayList<>();
for (Path buildFile : perBuildFileSpecs.keySet()) {
Collection<Integer> buildFileSpecs = perBuildFileSpecs.get(buildFile);
TargetNodeSpec firstSpec = orderedSpecs.get(Iterables.get(buildFileSpecs, 0));
Cell cell = rootCell.getCell(firstSpec.getBuildFileSpec().getCellPath());
// Format a proper error message for non-existent build files.
if (!cell.getFilesystem().isFile(buildFile)) {
throw new MissingBuildFileException(
firstSpec.toString(), cell.getFilesystem().getRootPath().relativize(buildFile));
}
for (int index : buildFileSpecs) {
TargetNodeSpec spec = orderedSpecs.get(index);
handleTargetNodeSpec(
flavorEnhancer,
targetNodeProvider,
targetNodeFilter,
targetFutures,
cell,
buildFile,
index,
spec);
}
}
return collectTargets(orderedSpecs.size(), targetFutures);
}
// Resolve all the build files from all the target specs. We store these into a multi-map which
// maps the path to the build file to the index of it's spec file in the ordered spec list.
private Multimap<Path, Integer> groupSpecsByBuildFile(
Cell rootCell, ImmutableList<TargetNodeSpec> orderedSpecs)
throws IOException, InterruptedException {
ParserConfig parserConfig = rootCell.getBuckConfig().getView(ParserConfig.class);
ParserConfig.BuildFileSearchMethod buildFileSearchMethod =
parserConfig.getBuildFileSearchMethod();
Multimap<Path, Integer> perBuildFileSpecs = LinkedHashMultimap.create();
for (int index = 0; index < orderedSpecs.size(); index++) {
TargetNodeSpec spec = orderedSpecs.get(index);
Cell cell = rootCell.getCell(spec.getBuildFileSpec().getCellPath());
ImmutableSet<Path> buildFiles;
try (SimplePerfEvent.Scope perfEventScope =
SimplePerfEvent.scope(
eventBus, PerfEventId.of("FindBuildFiles"), "targetNodeSpec", spec)) {
// Iterate over the build files the given target node spec returns.
ProjectFilesystem filesystem = cell.getFilesystem();
ImmutableSet.Builder<PathMatcher> parsingIgnores =
ImmutableSet.builderWithExpectedSize(filesystem.getBlacklistedPaths().size() + 1);
parsingIgnores.addAll(filesystem.getBlacklistedPaths());
parsingIgnores.add(RecursiveFileMatcher.of(filesystem.getBuckPaths().getBuckOut()));
for (Path subCellRoots : cell.getKnownRoots()) {
if (!subCellRoots.equals(cell.getRoot())) {
parsingIgnores.add(RecursiveFileMatcher.of(filesystem.relativize(subCellRoots)));
}
}
buildFiles =
spec.getBuildFileSpec()
.findBuildFiles(
cell.getBuildFileName(),
filesystem.asView().withView(Paths.get(""), ImmutableSet.of()),
watchman,
buildFileSearchMethod,
parsingIgnores.build());
}
for (Path buildFile : buildFiles) {
perBuildFileSpecs.put(buildFile, index);
}
}
return perBuildFileSpecs;
}
private <T extends HasBuildTarget> void handleTargetNodeSpec(
FlavorEnhancer<T> flavorEnhancer,
TargetNodeProviderForSpecResolver<T> targetNodeProvider,
TargetNodeFilterForSpecResolver<T> targetNodeFilter,
List<ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures,
Cell cell,
Path buildFile,
int index,
TargetNodeSpec spec) {
if (spec instanceof BuildTargetSpec) {
BuildTargetSpec buildTargetSpec = (BuildTargetSpec) spec;
targetFutures.add(
Futures.transform(
targetNodeProvider.getTargetNodeJob(
buildTargetSpec
.getUnconfiguredBuildTarget()
.configure(EmptyTargetConfiguration.INSTANCE)),
node -> {
ImmutableSet<BuildTarget> buildTargets =
applySpecFilter(spec, ImmutableList.of(node), flavorEnhancer, targetNodeFilter);
Preconditions.checkState(
buildTargets.size() == 1,
"BuildTargetSpec %s filter discarded target %s, but was not supposed to.",
spec,
node.getBuildTarget());
return new AbstractMap.SimpleEntry<>(index, buildTargets);
},
MoreExecutors.directExecutor()));
} else {
// Build up a list of all target nodes from the build file.
targetFutures.add(
Futures.transform(
targetNodeProvider.getAllTargetNodesJob(cell, buildFile),
nodes ->
new AbstractMap.SimpleEntry<>(
index, applySpecFilter(spec, nodes, flavorEnhancer, targetNodeFilter)),
MoreExecutors.directExecutor()));
}
}
private ImmutableList<ImmutableSet<BuildTarget>> collectTargets(
int specsCount,
List<ListenableFuture<Entry<Integer, ImmutableSet<BuildTarget>>>> targetFutures)
throws InterruptedException {
// Walk through and resolve all the futures, and place their results in a multimap that
// is indexed by the integer representing the input target spec order.
LinkedHashMultimap<Integer, BuildTarget> targetsMap = LinkedHashMultimap.create();
try {
for (ListenableFuture<Map.Entry<Integer, ImmutableSet<BuildTarget>>> targetFuture :
targetFutures) {
Map.Entry<Integer, ImmutableSet<BuildTarget>> result = targetFuture.get();
targetsMap.putAll(result.getKey(), result.getValue());
}
} catch (ExecutionException e) {
MoreThrowables.throwIfAnyCauseInstanceOf(e, BuildFileParseException.class);
MoreThrowables.throwIfAnyCauseInstanceOf(e, BuildTargetException.class);
MoreThrowables.throwIfAnyCauseInstanceOf(e, HumanReadableException.class);
MoreThrowables.throwIfAnyCauseInstanceOf(e, InterruptedException.class);
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e);
}
// Finally, pull out the final build target results in input target spec order, and place them
// into a list of sets that exactly matches the ihput order.
ImmutableList.Builder<ImmutableSet<BuildTarget>> targets = ImmutableList.builder();
for (int index = 0; index < specsCount; index++) {
targets.add(ImmutableSet.copyOf(targetsMap.get(index)));
}
return targets.build();
}
private <T extends HasBuildTarget> ImmutableSet<BuildTarget> applySpecFilter(
TargetNodeSpec spec,
ImmutableList<T> targetNodes,
FlavorEnhancer<T> flavorEnhancer,
TargetNodeFilterForSpecResolver<T> targetNodeFilter) {
ImmutableSet.Builder<BuildTarget> targets = ImmutableSet.builder();
ImmutableMap<BuildTarget, T> partialTargets = targetNodeFilter.filter(spec, targetNodes);
for (Map.Entry<BuildTarget, T> partialTarget : partialTargets.entrySet()) {
BuildTarget target =
flavorEnhancer.enhanceFlavors(
partialTarget.getKey(), partialTarget.getValue(), spec.getTargetType());
targets.add(target);
}
return targets.build();
}
/** Allows to change flavors of some targets while performing the resolution. */
public interface FlavorEnhancer<T extends HasBuildTarget> {
BuildTarget enhanceFlavors(
BuildTarget target, T targetNode, TargetNodeSpec.TargetType targetType);
}
/** Provides target nodes of a given type. */
public interface TargetNodeProviderForSpecResolver<T extends HasBuildTarget> {
ListenableFuture<T> getTargetNodeJob(BuildTarget target) throws BuildTargetException;
ListenableFuture<ImmutableList<T>> getAllTargetNodesJob(Cell cell, Path buildFile)
throws BuildTargetException;
}
/** Performs filtering of target nodes using a given {@link TargetNodeSpec}. */
public interface TargetNodeFilterForSpecResolver<T extends HasBuildTarget> {
ImmutableMap<BuildTarget, T> filter(TargetNodeSpec spec, Iterable<T> nodes);
}
}
|
Don't unwrap exceptions in TargetSpecResolver
Summary:
Buck's exception handling unwraps exceptions much better than this.
Unwrapping them here (especially with throwIfAnyInstanceOf) is bound to
lose useful information.
Reviewed By: sbalabanov
fbshipit-source-id: f84b3ecc59
|
src/com/facebook/buck/parser/TargetSpecResolver.java
|
Don't unwrap exceptions in TargetSpecResolver
|
|
Java
|
apache-2.0
|
45da72e44e388152a52948cf04faeef2ba985b82
| 0
|
splunk/splunk-shuttl,splunk/splunk-shuttl,splunk/splunk-shuttl,splunk/splunk-shuttl,splunk/splunk-shuttl,splunk/splunk-shuttl
|
// Copyright (C) 2011 Splunk Inc.
//
// Splunk Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.splunk.shep.server;
import java.io.File;
import org.apache.log4j.Logger;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.webapp.WebAppContext;
import org.eclipse.jetty.xml.XmlConfiguration;
import com.splunk.shep.server.mbeans.ShepServer;
/**
* Main class that starts up Shep with the integrated Jetty server
*
* @author kpakkirisamy
*
*/
public class ShepJettyServer {
public static void main(String args[]) {
org.apache.log4j.Logger logger = Logger.getLogger("ShepServer");
try {
Server server = new Server();
// TODO: Replace paths relative to /bin/
XmlConfiguration configuration = new XmlConfiguration(new File(
"../jetty/shep.xml").toURI().toURL());
configuration.configure(server);
// TODO: Replace paths relative to /bin/
server.setHandler(new WebAppContext("../webapps/shep", "/shep"));
ShepServer servermbean = new ShepServer();
Connector connectors[] = server.getConnectors();
for (Connector c : connectors) {
logger.debug("Connector Name: " + c.getName());
logger.debug(" host: " + c.getHost());
logger.debug(" port: " + c.getPort());
if (c.getName().equals("Splunk.Shep.Http")) {
c.setHost(servermbean.getHttpHost());
c.setPort(servermbean.getHttpPort());
}
}
server.start();
} catch (Exception e) {
logger.error("Error during startup", e);
System.exit(1);
}
}
}
|
src/java/com/splunk/shep/server/ShepJettyServer.java
|
// Copyright (C) 2011 Splunk Inc.
//
// Splunk Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.splunk.shep.server;
import java.io.File;
import org.apache.log4j.Logger;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.webapp.WebAppContext;
import org.eclipse.jetty.xml.XmlConfiguration;
import com.splunk.shep.server.mbeans.ShepServer;
/**
* Main class that starts up Shep with the integrated Jetty server
*
* @author kpakkirisamy
*
*/
public class ShepJettyServer {
public static void main(String args[]) {
org.apache.log4j.Logger logger = Logger.getLogger("ShepServer");
try {
Server server = new Server();
XmlConfiguration configuration = new XmlConfiguration(new File(
"../jetty/shep.xml").toURL());
configuration.configure(server);
server.setHandler(new WebAppContext("../webapps/shep", "/shep"));
ShepServer servermbean = new ShepServer();
Connector connectors[] = server.getConnectors();
for (Connector c : connectors) {
logger.debug("Connector Name: " + c.getName());
logger.debug(" host: " + c.getHost());
logger.debug(" port: " + c.getPort());
if (c.getName().equals("Splunk.Shep.Http")) {
c.setHost(servermbean.getHttpHost());
c.setPort(servermbean.getHttpPort());
}
}
server.start();
} catch (Exception e) {
logger.error("Error during startup", e);
System.exit(1);
}
}
}
|
added TODO and removed use of deprecated method
|
src/java/com/splunk/shep/server/ShepJettyServer.java
|
added TODO and removed use of deprecated method
|
|
Java
|
apache-2.0
|
e319820b1b14304f00e40f3dba2b9c6e4ecda6bf
| 0
|
darionyaphet/flink,apache/flink,twalthr/flink,StephanEwen/incubator-flink,twalthr/flink,greghogan/flink,apache/flink,twalthr/flink,greghogan/flink,kl0u/flink,godfreyhe/flink,StephanEwen/incubator-flink,rmetzger/flink,StephanEwen/incubator-flink,godfreyhe/flink,twalthr/flink,kl0u/flink,zjureel/flink,xccui/flink,darionyaphet/flink,zentol/flink,twalthr/flink,godfreyhe/flink,tony810430/flink,tony810430/flink,tony810430/flink,zjureel/flink,wwjiang007/flink,wwjiang007/flink,clarkyzl/flink,rmetzger/flink,wwjiang007/flink,godfreyhe/flink,rmetzger/flink,gyfora/flink,xccui/flink,tony810430/flink,aljoscha/flink,gyfora/flink,zjureel/flink,zjureel/flink,rmetzger/flink,tillrohrmann/flink,kl0u/flink,aljoscha/flink,tillrohrmann/flink,rmetzger/flink,darionyaphet/flink,zentol/flink,zentol/flink,aljoscha/flink,wwjiang007/flink,apache/flink,zentol/flink,kl0u/flink,zjureel/flink,twalthr/flink,zjureel/flink,lincoln-lil/flink,StephanEwen/incubator-flink,clarkyzl/flink,aljoscha/flink,apache/flink,godfreyhe/flink,tillrohrmann/flink,tony810430/flink,tillrohrmann/flink,lincoln-lil/flink,lincoln-lil/flink,zentol/flink,tony810430/flink,StephanEwen/incubator-flink,zjureel/flink,gyfora/flink,rmetzger/flink,lincoln-lil/flink,zentol/flink,tillrohrmann/flink,greghogan/flink,kl0u/flink,zentol/flink,tillrohrmann/flink,apache/flink,StephanEwen/incubator-flink,xccui/flink,gyfora/flink,wwjiang007/flink,clarkyzl/flink,xccui/flink,greghogan/flink,wwjiang007/flink,gyfora/flink,apache/flink,lincoln-lil/flink,godfreyhe/flink,xccui/flink,darionyaphet/flink,clarkyzl/flink,xccui/flink,gyfora/flink,greghogan/flink,aljoscha/flink,twalthr/flink,lincoln-lil/flink,tony810430/flink,rmetzger/flink,clarkyzl/flink,apache/flink,lincoln-lil/flink,greghogan/flink,xccui/flink,godfreyhe/flink,aljoscha/flink,tillrohrmann/flink,wwjiang007/flink,gyfora/flink,kl0u/flink,darionyaphet/flink
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connectors.hive;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.HiveVersionTestUtil;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.constraints.UniqueConstraint;
import org.apache.flink.table.api.internal.TableImpl;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.catalog.hive.HiveTestUtils;
import org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory;
import org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper;
import org.apache.flink.table.catalog.hive.client.HiveShimLoader;
import org.apache.flink.types.Row;
import org.apache.flink.util.ArrayUtils;
import org.apache.flink.util.CollectionUtil;
import com.klarna.hiverunner.HiveShell;
import com.klarna.hiverunner.annotations.HiveSQL;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Table;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test hive connector with table API.
*/
@RunWith(FlinkStandaloneHiveRunner.class)
public class TableEnvHiveConnectorITCase {
@HiveSQL(files = {})
private static HiveShell hiveShell;
private static HiveCatalog hiveCatalog;
private static HiveMetastoreClientWrapper hmsClient;
@BeforeClass
public static void setup() {
HiveConf hiveConf = hiveShell.getHiveConf();
hiveCatalog = HiveTestUtils.createHiveCatalog(hiveConf);
hiveCatalog.open();
hmsClient = HiveMetastoreClientFactory.create(hiveConf, HiveShimLoader.getHiveVersion());
}
@Test
public void testDefaultPartitionName() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
tableEnv.executeSql("create table db1.src (x int, y int)");
tableEnv.executeSql("create table db1.part (x int) partitioned by (y int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1, 1}).addRow(new Object[]{2, null}).commit();
// test generating partitions with default name
tableEnv.executeSql("insert into db1.part select * from db1.src").await();
HiveConf hiveConf = hiveShell.getHiveConf();
String defaultPartName = hiveConf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME);
Table hiveTable = hmsClient.getTable("db1", "part");
Path defaultPartPath = new Path(hiveTable.getSd().getLocation(), "y=" + defaultPartName);
FileSystem fs = defaultPartPath.getFileSystem(hiveConf);
assertTrue(fs.exists(defaultPartPath));
TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select y, x from db1.part order by x");
List<Row> rows = CollectionUtil.iteratorToList(flinkTable.execute().collect());
assertEquals(Arrays.toString(new String[]{"1,1", "null,2"}), rows.toString());
tableEnv.executeSql("drop database db1 cascade");
}
@Test
public void testGetNonExistingFunction() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
tableEnv.executeSql("create table db1.src (d double, s string)");
tableEnv.executeSql("create table db1.dest (x bigint)");
// just make sure the query runs through, no need to verify result
tableEnv.executeSql("insert into db1.dest select count(d) from db1.src").await();
tableEnv.executeSql("drop database db1 cascade");
}
@Test
public void testDifferentFormats() throws Exception {
String[] formats = new String[]{"orc", "parquet", "sequencefile", "csv", "avro"};
for (String format : formats) {
if (format.equals("avro") && !HiveVersionTestUtil.HIVE_110_OR_LATER) {
// timestamp is not supported for avro tables before 1.1.0
continue;
}
readWriteFormat(format);
}
}
private void readWriteFormat(String format) throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
// create source and dest tables
String suffix;
if (format.equals("csv")) {
suffix = "row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'";
} else {
suffix = "stored as " + format;
}
String tableSchema;
// use 2018-08-20 00:00:00.1 to avoid multi-version print difference.
List<Object> row1 = new ArrayList<>(Arrays.asList(1, "a", "2018-08-20 00:00:00.1"));
List<Object> row2 = new ArrayList<>(Arrays.asList(2, "b", "2019-08-26 00:00:00.1"));
// some data types are not supported for parquet tables in early versions -- https://issues.apache.org/jira/browse/HIVE-6384
if (HiveVersionTestUtil.HIVE_120_OR_LATER || !format.equals("parquet")) {
tableSchema = "(i int,s string,ts timestamp,dt date)";
row1.add("2018-08-20");
row2.add("2019-08-26");
} else {
tableSchema = "(i int,s string,ts timestamp)";
}
tableEnv.executeSql(String.format(
"create table db1.src %s partitioned by (p1 string, p2 timestamp) %s", tableSchema, suffix));
tableEnv.executeSql(String.format(
"create table db1.dest %s partitioned by (p1 string, p2 timestamp) %s", tableSchema, suffix));
// prepare source data with Hive
// TABLE keyword in INSERT INTO is mandatory prior to 1.1.0
hiveShell.execute(String.format(
"insert into table db1.src partition(p1='first',p2='2018-08-20 00:00:00.1') values (%s)",
toRowValue(row1)));
hiveShell.execute(String.format(
"insert into table db1.src partition(p1='second',p2='2018-08-26 00:00:00.1') values (%s)",
toRowValue(row2)));
List<String> expected = Arrays.asList(
String.join("\t", ArrayUtils.concat(
row1.stream().map(Object::toString).toArray(String[]::new),
new String[]{"first", "2018-08-20 00:00:00.1"})),
String.join("\t", ArrayUtils.concat(
row2.stream().map(Object::toString).toArray(String[]::new),
new String[]{"second", "2018-08-26 00:00:00.1"})));
verifyFlinkQueryResult(tableEnv.sqlQuery("select * from db1.src"), expected);
// Ignore orc write test for Hive version 2.0.x for now due to FLINK-13998
if (!format.equals("orc") || !HiveShimLoader.getHiveVersion().startsWith("2.0")) {
// populate dest table with source table
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
// verify data on hive side
verifyHiveQueryResult("select * from db1.dest", expected);
}
tableEnv.executeSql("drop database db1 cascade");
}
private String toRowValue(List<Object> row) {
return row.stream().map(o -> {
String res = o.toString();
if (o instanceof String) {
res = "'" + res + "'";
}
return res;
}).collect(Collectors.joining(","));
}
@Test
public void testDecimal() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src1 (x decimal(10,2))");
tableEnv.executeSql("create table db1.src2 (x decimal(10,2))");
tableEnv.executeSql("create table db1.dest (x decimal(10,2))");
// populate src1 from Hive
// TABLE keyword in INSERT INTO is mandatory prior to 1.1.0
hiveShell.execute("insert into table db1.src1 values (1.0),(2.12),(5.123),(5.456),(123456789.12)");
// populate src2 with same data from Flink
tableEnv.executeSql("insert into db1.src2 values (cast(1.0 as decimal(10,2))), (cast(2.12 as decimal(10,2))), " +
"(cast(5.123 as decimal(10,2))), (cast(5.456 as decimal(10,2))), (cast(123456789.12 as decimal(10,2)))")
.await();
// verify src1 and src2 contain same data
verifyHiveQueryResult("select * from db1.src2", hiveShell.executeQuery("select * from db1.src1"));
// populate dest with src1 from Flink -- to test reading decimal type from Hive
tableEnv.executeSql("insert into db1.dest select * from db1.src1").await();
verifyHiveQueryResult("select * from db1.dest", hiveShell.executeQuery("select * from db1.src1"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testInsertOverwrite() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
// non-partitioned
tableEnv.executeSql("create table db1.dest (x int, y string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "dest").addRow(new Object[]{1, "a"}).addRow(new Object[]{2, "b"}).commit();
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta", "2\tb"));
tableEnv.executeSql("insert overwrite db1.dest values (3, 'c')").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("3\tc"));
// static partition
tableEnv.executeSql("create table db1.part(x int) partitioned by (y int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{1}).commit("y=1");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{2}).commit("y=2");
tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("insert overwrite db1.part partition (y=1) select 100").await();
verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "2\t2"));
// dynamic partition
tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("insert overwrite db1.part values (200,2),(3,3)").await();
// only overwrite dynamically matched partitions, other existing partitions remain intact
verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "200\t2", "3\t3"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testStaticPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1}).addRow(new Object[]{2}).commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
tableEnv.executeSql("insert into db1.dest partition (p1='1''1', p2=1.1) select x from db1.src").await();
assertEquals(1, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1'1\t1.1", "2\t1'1\t1.1"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDynamicPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int, y string, z double)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a", 1.1})
.addRow(new Object[]{2, "a", 2.2})
.addRow(new Object[]{3, "b", 3.3})
.commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
assertEquals(3, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta\t1.1", "2\ta\t2.2", "3\tb\t3.3"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testPartialDynamicPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int, y string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1, "a"}).addRow(new Object[]{2, "b"}).commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 double, p2 string)");
tableEnv.executeSql("insert into db1.dest partition (p1=1.1) select x,y from db1.src").await();
assertEquals(2, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1.1\ta", "2\t1.1\tb"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDateTimestampPartitionColumns() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.part(x int) partitioned by (dt date,ts timestamp)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{1})
.addRow(new Object[]{2})
.commit("dt='2019-12-23',ts='2019-12-23 00:00:00'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{3})
.commit("dt='2019-12-25',ts='2019-12-25 16:23:43.012'");
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.part order by x").execute().collect());
assertEquals("[1,2019-12-23,2019-12-23T00:00, 2,2019-12-23,2019-12-23T00:00, 3,2019-12-25,2019-12-25T16:23:43.012]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.part where dt=cast('2019-12-25' as date)").execute().collect());
assertEquals("[3]", results.toString());
tableEnv.executeSql("insert into db1.part select 4,cast('2019-12-31' as date),cast('2019-12-31 12:00:00.0' as timestamp)")
.await();
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select max(dt) from db1.part").execute().collect());
assertEquals("[2019-12-31]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testUDTF() {
// W/o https://issues.apache.org/jira/browse/HIVE-11878 Hive registers the App classloader as the classloader
// for the UDTF and closes the App classloader when we tear down the session. This causes problems for JUnit code
// and shutdown hooks that have to run after the test finishes, because App classloader can no longer load new
// classes. And will crash the forked JVM, thus failing the test phase.
// Therefore disable such tests for older Hive versions.
String hiveVersion = HiveShimLoader.getHiveVersion();
Assume.assumeTrue(hiveVersion.compareTo("2.0.0") >= 0 || hiveVersion.compareTo("1.3.0") >= 0);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.simple (i int,a array<int>)");
tableEnv.executeSql("create table db1.nested (a array<map<int, string>>)");
tableEnv.executeSql("create function hiveudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
hiveShell.insertInto("db1", "simple").addRow(3, Arrays.asList(1, 2, 3)).commit();
Map<Integer, String> map1 = new HashMap<>();
map1.put(1, "a");
map1.put(2, "b");
Map<Integer, String> map2 = new HashMap<>();
map2.put(3, "c");
hiveShell.insertInto("db1", "nested").addRow(Arrays.asList(map1, map2)).commit();
List<Row> results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.simple, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[1, 2, 3]", results.toString());
results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.nested, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[{1=a, 2=b}, {3=c}]", results.toString());
tableEnv.executeSql("create table db1.ts (a array<timestamp>)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "ts").addRow(new Object[]{
new Object[]{Timestamp.valueOf("2015-04-28 15:23:00"), Timestamp.valueOf("2016-06-03 17:05:52")}})
.commit();
results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.ts, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[2015-04-28T15:23, 2016-06-03T17:05:52]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
tableEnv.executeSql("drop function hiveudtf");
}
}
@Test
public void testNotNullConstraints() throws Exception {
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue("By default columns should be nullable",
tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
assertFalse("NOT NULL columns should be reflected in table schema",
tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
assertTrue("NOT NULL NORELY columns should be considered nullable",
tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testPKConstraint() throws Exception {
// While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in 2.x versions.
// So let's only test for 3.x.
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
// test rely PK constraints
tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue(tableSchema.getPrimaryKey().isPresent());
UniqueConstraint pk = tableSchema.getPrimaryKey().get();
assertEquals(2, pk.getColumns().size());
assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
// test norely PK constraints
tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
// test table w/o PK
tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testTimestamp() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (ts timestamp)");
tableEnv.executeSql("create table db1.dest (ts timestamp)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{Timestamp.valueOf("2019-11-11 00:00:00")})
.addRow(new Object[]{Timestamp.valueOf("2019-12-03 15:43:32.123456789")})
.commit();
// test read timestamp from hive
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect());
assertEquals(2, results.size());
assertEquals(LocalDateTime.of(2019, 11, 11, 0, 0), results.get(0).getField(0));
assertEquals(LocalDateTime.of(2019, 12, 3, 15, 43, 32, 123456789), results.get(1).getField(0));
// test write timestamp to hive
tableEnv.executeSql("insert into db1.dest select max(ts) from db1.src").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("2019-12-03 15:43:32.123456789"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDate() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (dt date)");
tableEnv.executeSql("create table db1.dest (dt date)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{Date.valueOf("2019-12-09")})
.addRow(new Object[]{Date.valueOf("2019-12-12")})
.commit();
// test read date from hive
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect());
assertEquals(2, results.size());
assertEquals(LocalDate.of(2019, 12, 9), results.get(0).getField(0));
assertEquals(LocalDate.of(2019, 12, 12), results.get(1).getField(0));
// test write date to hive
tableEnv.executeSql("insert into db1.dest select max(dt) from db1.src").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("2019-12-12"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testViews() {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (key int,val string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a"})
.addRow(new Object[]{1, "aa"})
.addRow(new Object[]{1, "aaa"})
.addRow(new Object[]{2, "b"})
.addRow(new Object[]{3, "c"})
.addRow(new Object[]{3, "ccc"})
.commit();
tableEnv.executeSql("create table db1.keys (key int,name string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "keys")
.addRow(new Object[]{1, "key1"})
.addRow(new Object[]{2, "key2"})
.addRow(new Object[]{3, "key3"})
.addRow(new Object[]{4, "key4"})
.commit();
hiveShell.execute("create view db1.v1 as select key as k,val as v from db1.src limit 2");
hiveShell.execute("create view db1.v2 as select key,count(*) from db1.src group by key having count(*)>1 order by key");
hiveShell.execute("create view db1.v3 as select k.key,k.name,count(*) from db1.src s join db1.keys k on s.key=k.key group by k.key,k.name order by k.key");
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select count(v) from db1.v1").execute().collect());
assertEquals("[2]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v2").execute().collect());
assertEquals("[1,3, 3,2]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v3").execute().collect());
assertEquals("[1,key1,3, 2,key2,1, 3,key3,2]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testWhitespacePartValue() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string)");
StatementSet stmtSet = tableEnv.createStatementSet();
stmtSet.addInsertSql("insert into db1.dest select 1,' '");
stmtSet.addInsertSql("insert into db1.dest select 2,'a \t'");
stmtSet.execute().await();
assertEquals("[p= , p=a %09]", hiveShell.executeQuery("show partitions db1.dest").toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
private void testCompressTextTable(boolean batch) throws Exception {
TableEnvironment tableEnv = batch ?
getTableEnvWithHiveCatalog() :
getStreamTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x string,y string)");
hiveShell.execute("create table db1.dest like db1.src");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{"a", "b"})
.addRow(new Object[]{"c", "d"})
.commit();
hiveCatalog.getHiveConf().setBoolVar(HiveConf.ConfVars.COMPRESSRESULT, true);
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
List<String> expected = Arrays.asList("a\tb", "c\td");
verifyHiveQueryResult("select * from db1.dest", expected);
verifyFlinkQueryResult(tableEnv.sqlQuery("select * from db1.dest"), expected);
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testBatchCompressTextTable() throws Exception {
testCompressTextTable(true);
}
@Test
public void testStreamCompressTextTable() throws Exception {
testCompressTextTable(false);
}
private void testTransactionalTable(boolean batch) {
TableEnvironment tableEnv = batch ?
getTableEnvWithHiveCatalog() :
getStreamTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x string,y string)");
hiveShell.execute("create table db1.dest (x string,y string) clustered by (x) into 3 buckets stored as orc tblproperties ('transactional'='true')");
List<Exception> exceptions = new ArrayList<>();
try {
tableEnv.executeSql("insert into db1.src select * from db1.dest").await();
} catch (Exception e) {
exceptions.add(e);
}
try {
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
} catch (Exception e) {
exceptions.add(e);
}
assertEquals(2, exceptions.size());
exceptions.forEach(e -> {
assertTrue(e instanceof FlinkHiveException);
assertEquals("Reading or writing ACID table db1.dest is not supported.", e.getMessage());
});
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testBatchTransactionalTable() {
testTransactionalTable(true);
}
@Test
public void testStreamTransactionalTable() {
testTransactionalTable(false);
}
@Test
public void testRegexSerDe() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int,y string) " +
"row format serde 'org.apache.hadoop.hive.serde2.RegexSerDe' " +
"with serdeproperties ('input.regex'='([\\\\d]+)\\u0001([\\\\S]+)')");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a"})
.addRow(new Object[]{2, "ab"})
.commit();
assertEquals("[1,a, 2,ab]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src order by x").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testUpdatePartitionSD() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string) stored as rcfile");
tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
tableEnv.executeSql("alter table db1.dest set fileformat sequencefile");
tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
assertEquals("[1,1]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.dest").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testParquetNameMapping() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.t1 (x int,y int) stored as parquet");
tableEnv.executeSql("insert into table db1.t1 values (1,10),(2,20)").await();
Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("db1", "t1"));
String location = hiveTable.getSd().getLocation();
tableEnv.executeSql(String.format("create table db1.t2 (y int,x int) stored as parquet location '%s'", location));
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
assertEquals("[1, 2]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t1").execute().collect()).toString());
assertEquals("[1, 2]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t2").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testOrcSchemaEvol() throws Exception {
// not supported until 2.1.0 -- https://issues.apache.org/jira/browse/HIVE-11981,
// https://issues.apache.org/jira/browse/HIVE-13178
Assume.assumeTrue(HiveVersionTestUtil.HIVE_210_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x smallint,y int) stored as orc");
hiveShell.execute("insert into table db1.src values (1,100),(2,200)");
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
tableEnv.executeSql("alter table db1.src change x x int");
assertEquals("[1,100, 2,200]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
tableEnv.executeSql("alter table db1.src change y y string");
assertEquals("[1,100, 2,200]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testNonExistingPartitionFolder() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.part (x int) partitioned by (p int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{1}).commit("p=1");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{2}).commit("p=2");
tableEnv.executeSql("alter table db1.part add partition (p=3)");
// remove one partition
Path toRemove = new Path(hiveCatalog.getHiveTable(new ObjectPath("db1", "part")).getSd().getLocation(), "p=2");
FileSystem fs = toRemove.getFileSystem(hiveShell.getHiveConf());
fs.delete(toRemove, true);
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.part").execute().collect());
assertEquals("[1,1]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testInsertPartitionWithStarSource() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create table src (x int,y string)");
HiveTestUtils.createTextTableInserter(
hiveShell,
"default",
"src")
.addRow(new Object[]{1, "a"})
.commit();
tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)");
tableEnv.executeSql("insert into dest partition (p1=1) select * from src").await();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from dest").execute().collect());
assertEquals("[1,1,a]", results.toString());
tableEnv.executeSql("drop table if exists src");
tableEnv.executeSql("drop table if exists dest");
}
@Test
public void testInsertPartitionWithValuesSource() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)");
tableEnv.executeSql("insert into dest partition (p1=1) values(1, 'a')").await();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from dest").execute().collect());
assertEquals("[1,1,a]", results.toString());
tableEnv.executeSql("drop table if exists dest");
}
private TableEnvironment getTableEnvWithHiveCatalog() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode(SqlDialect.HIVE);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
return tableEnv;
}
private TableEnvironment getStreamTableEnvWithHiveCatalog() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerStreamMode(env, SqlDialect.HIVE);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
return tableEnv;
}
private void verifyHiveQueryResult(String query, List<String> expected) {
List<String> results = hiveShell.executeQuery(query);
assertEquals(expected.size(), results.size());
assertEquals(new HashSet<>(expected), new HashSet<>(results));
}
private void verifyFlinkQueryResult(org.apache.flink.table.api.Table table, List<String> expected) throws Exception {
List<Row> rows = CollectionUtil.iteratorToList(table.execute().collect());
List<String> results = rows.stream().map(row ->
IntStream.range(0, row.getArity())
.mapToObj(row::getField)
.map(o -> o instanceof LocalDateTime ?
Timestamp.valueOf((LocalDateTime) o) : o)
.map(Object::toString)
.collect(Collectors.joining("\t"))).collect(Collectors.toList());
assertEquals(expected.size(), results.size());
assertEquals(new HashSet<>(expected), new HashSet<>(results));
}
}
|
flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/TableEnvHiveConnectorITCase.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connectors.hive;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.HiveVersionTestUtil;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.constraints.UniqueConstraint;
import org.apache.flink.table.api.internal.TableImpl;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.catalog.hive.HiveTestUtils;
import org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory;
import org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper;
import org.apache.flink.table.catalog.hive.client.HiveShimLoader;
import org.apache.flink.types.Row;
import org.apache.flink.util.ArrayUtils;
import org.apache.flink.util.CollectionUtil;
import com.klarna.hiverunner.HiveShell;
import com.klarna.hiverunner.annotations.HiveSQL;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Table;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test hive connector with table API.
*/
@RunWith(FlinkStandaloneHiveRunner.class)
public class TableEnvHiveConnectorITCase {
@HiveSQL(files = {})
private static HiveShell hiveShell;
private static HiveCatalog hiveCatalog;
private static HiveMetastoreClientWrapper hmsClient;
@BeforeClass
public static void setup() {
HiveConf hiveConf = hiveShell.getHiveConf();
hiveCatalog = HiveTestUtils.createHiveCatalog(hiveConf);
hiveCatalog.open();
hmsClient = HiveMetastoreClientFactory.create(hiveConf, HiveShimLoader.getHiveVersion());
}
@Test
public void testDefaultPartitionName() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
tableEnv.executeSql("create table db1.src (x int, y int)");
tableEnv.executeSql("create table db1.part (x int) partitioned by (y int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1, 1}).addRow(new Object[]{2, null}).commit();
// test generating partitions with default name
tableEnv.executeSql("insert into db1.part select * from db1.src").await();
HiveConf hiveConf = hiveShell.getHiveConf();
String defaultPartName = hiveConf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME);
Table hiveTable = hmsClient.getTable("db1", "part");
Path defaultPartPath = new Path(hiveTable.getSd().getLocation(), "y=" + defaultPartName);
FileSystem fs = defaultPartPath.getFileSystem(hiveConf);
assertTrue(fs.exists(defaultPartPath));
TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select y, x from db1.part order by x");
List<Row> rows = CollectionUtil.iteratorToList(flinkTable.execute().collect());
assertEquals(Arrays.toString(new String[]{"1,1", "null,2"}), rows.toString());
tableEnv.executeSql("drop database db1 cascade");
}
@Test
public void testGetNonExistingFunction() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
tableEnv.executeSql("create table db1.src (d double, s string)");
tableEnv.executeSql("create table db1.dest (x bigint)");
// just make sure the query runs through, no need to verify result
tableEnv.executeSql("insert into db1.dest select count(d) from db1.src").await();
tableEnv.executeSql("drop database db1 cascade");
}
@Test
public void testDifferentFormats() throws Exception {
String[] formats = new String[]{"orc", "parquet", "sequencefile", "csv", "avro"};
for (String format : formats) {
if (format.equals("avro") && !HiveVersionTestUtil.HIVE_110_OR_LATER) {
// timestamp is not supported for avro tables before 1.1.0
continue;
}
readWriteFormat(format);
}
}
private void readWriteFormat(String format) throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
// create source and dest tables
String suffix;
if (format.equals("csv")) {
suffix = "row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'";
} else {
suffix = "stored as " + format;
}
String tableSchema;
// use 2018-08-20 00:00:00.1 to avoid multi-version print difference.
List<Object> row1 = new ArrayList<>(Arrays.asList(1, "a", "2018-08-20 00:00:00.1"));
List<Object> row2 = new ArrayList<>(Arrays.asList(2, "b", "2019-08-26 00:00:00.1"));
// some data types are not supported for parquet tables in early versions -- https://issues.apache.org/jira/browse/HIVE-6384
if (HiveVersionTestUtil.HIVE_120_OR_LATER || !format.equals("parquet")) {
tableSchema = "(i int,s string,ts timestamp,dt date)";
row1.add("2018-08-20");
row2.add("2019-08-26");
} else {
tableSchema = "(i int,s string,ts timestamp)";
}
tableEnv.executeSql(String.format(
"create table db1.src %s partitioned by (p1 string, p2 timestamp) %s", tableSchema, suffix));
tableEnv.executeSql(String.format(
"create table db1.dest %s partitioned by (p1 string, p2 timestamp) %s", tableSchema, suffix));
// prepare source data with Hive
// TABLE keyword in INSERT INTO is mandatory prior to 1.1.0
hiveShell.execute(String.format(
"insert into table db1.src partition(p1='first',p2='2018-08-20 00:00:00.1') values (%s)",
toRowValue(row1)));
hiveShell.execute(String.format(
"insert into table db1.src partition(p1='second',p2='2018-08-26 00:00:00.1') values (%s)",
toRowValue(row2)));
List<String> expected = Arrays.asList(
String.join("\t", ArrayUtils.concat(
row1.stream().map(Object::toString).toArray(String[]::new),
new String[]{"first", "2018-08-20 00:00:00.1"})),
String.join("\t", ArrayUtils.concat(
row2.stream().map(Object::toString).toArray(String[]::new),
new String[]{"second", "2018-08-26 00:00:00.1"})));
verifyFlinkQueryResult(tableEnv.sqlQuery("select * from db1.src"), expected);
// Ignore orc write test for Hive version 2.0.x for now due to FLINK-13998
if (!format.equals("orc") || !HiveShimLoader.getHiveVersion().startsWith("2.0")) {
// populate dest table with source table
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
// verify data on hive side
verifyHiveQueryResult("select * from db1.dest", expected);
}
tableEnv.executeSql("drop database db1 cascade");
}
private String toRowValue(List<Object> row) {
return row.stream().map(o -> {
String res = o.toString();
if (o instanceof String) {
res = "'" + res + "'";
}
return res;
}).collect(Collectors.joining(","));
}
@Test
public void testDecimal() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src1 (x decimal(10,2))");
tableEnv.executeSql("create table db1.src2 (x decimal(10,2))");
tableEnv.executeSql("create table db1.dest (x decimal(10,2))");
// populate src1 from Hive
// TABLE keyword in INSERT INTO is mandatory prior to 1.1.0
hiveShell.execute("insert into table db1.src1 values (1.0),(2.12),(5.123),(5.456),(123456789.12)");
// populate src2 with same data from Flink
tableEnv.executeSql("insert into db1.src2 values (cast(1.0 as decimal(10,2))), (cast(2.12 as decimal(10,2))), " +
"(cast(5.123 as decimal(10,2))), (cast(5.456 as decimal(10,2))), (cast(123456789.12 as decimal(10,2)))")
.await();
// verify src1 and src2 contain same data
verifyHiveQueryResult("select * from db1.src2", hiveShell.executeQuery("select * from db1.src1"));
// populate dest with src1 from Flink -- to test reading decimal type from Hive
tableEnv.executeSql("insert into db1.dest select * from db1.src1").await();
verifyHiveQueryResult("select * from db1.dest", hiveShell.executeQuery("select * from db1.src1"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testInsertOverwrite() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
// non-partitioned
tableEnv.executeSql("create table db1.dest (x int, y string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "dest").addRow(new Object[]{1, "a"}).addRow(new Object[]{2, "b"}).commit();
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta", "2\tb"));
tableEnv.executeSql("insert overwrite db1.dest values (3, 'c')").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("3\tc"));
// static partition
tableEnv.executeSql("create table db1.part(x int) partitioned by (y int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{1}).commit("y=1");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{2}).commit("y=2");
tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("insert overwrite db1.part partition (y=1) select 100").await();
verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "2\t2"));
// dynamic partition
tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("insert overwrite db1.part values (200,2),(3,3)").await();
// only overwrite dynamically matched partitions, other existing partitions remain intact
verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "200\t2", "3\t3"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testStaticPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1}).addRow(new Object[]{2}).commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
tableEnv.executeSql("insert into db1.dest partition (p1='1''1', p2=1.1) select x from db1.src").await();
assertEquals(1, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1'1\t1.1", "2\t1'1\t1.1"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDynamicPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int, y string, z double)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a", 1.1})
.addRow(new Object[]{2, "a", 2.2})
.addRow(new Object[]{3, "b", 3.3})
.commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
assertEquals(3, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta\t1.1", "2\ta\t2.2", "3\tb\t3.3"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testPartialDynamicPartition() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int, y string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src").addRow(new Object[]{1, "a"}).addRow(new Object[]{2, "b"}).commit();
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 double, p2 string)");
tableEnv.executeSql("insert into db1.dest partition (p1=1.1) select x,y from db1.src").await();
assertEquals(2, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1.1\ta", "2\t1.1\tb"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDateTimestampPartitionColumns() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.part(x int) partitioned by (dt date,ts timestamp)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{1})
.addRow(new Object[]{2})
.commit("dt='2019-12-23',ts='2019-12-23 00:00:00'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{3})
.commit("dt='2019-12-25',ts='2019-12-25 16:23:43.012'");
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.part order by x").execute().collect());
assertEquals("[1,2019-12-23,2019-12-23T00:00, 2,2019-12-23,2019-12-23T00:00, 3,2019-12-25,2019-12-25T16:23:43.012]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.part where dt=cast('2019-12-25' as date)").execute().collect());
assertEquals("[3]", results.toString());
tableEnv.executeSql("insert into db1.part select 4,cast('2019-12-31' as date),cast('2019-12-31 12:00:00.0' as timestamp)")
.await();
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select max(dt) from db1.part").execute().collect());
assertEquals("[2019-12-31]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testUDTF() {
// W/o https://issues.apache.org/jira/browse/HIVE-11878 Hive registers the App classloader as the classloader
// for the UDTF and closes the App classloader when we tear down the session. This causes problems for JUnit code
// and shutdown hooks that have to run after the test finishes, because App classloader can no longer load new
// classes. And will crash the forked JVM, thus failing the test phase.
// Therefore disable such tests for older Hive versions.
String hiveVersion = HiveShimLoader.getHiveVersion();
Assume.assumeTrue(hiveVersion.compareTo("2.0.0") >= 0 || hiveVersion.compareTo("1.3.0") >= 0);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.simple (i int,a array<int>)");
tableEnv.executeSql("create table db1.nested (a array<map<int, string>>)");
tableEnv.executeSql("create function hiveudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
hiveShell.insertInto("db1", "simple").addRow(3, Arrays.asList(1, 2, 3)).commit();
Map<Integer, String> map1 = new HashMap<>();
map1.put(1, "a");
map1.put(2, "b");
Map<Integer, String> map2 = new HashMap<>();
map2.put(3, "c");
hiveShell.insertInto("db1", "nested").addRow(Arrays.asList(map1, map2)).commit();
List<Row> results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.simple, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[1, 2, 3]", results.toString());
results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.nested, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[{1=a, 2=b}, {3=c}]", results.toString());
tableEnv.executeSql("create table db1.ts (a array<timestamp>)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "ts").addRow(new Object[]{
new Object[]{Timestamp.valueOf("2015-04-28 15:23:00"), Timestamp.valueOf("2016-06-03 17:05:52")}})
.commit();
results = CollectionUtil.iteratorToList(
tableEnv.sqlQuery("select x from db1.ts, lateral table(hiveudtf(a)) as T(x)").execute().collect());
assertEquals("[2015-04-28T15:23, 2016-06-03T17:05:52]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
tableEnv.executeSql("drop function hiveudtf");
}
}
@Test
public void testNotNullConstraints() throws Exception {
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue("By default columns should be nullable",
tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
assertFalse("NOT NULL columns should be reflected in table schema",
tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
assertTrue("NOT NULL NORELY columns should be considered nullable",
tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testPKConstraint() throws Exception {
// While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in 2.x versions.
// So let's only test for 3.x.
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
// test rely PK constraints
tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
TableSchema tableSchema = catalogTable.getSchema();
assertTrue(tableSchema.getPrimaryKey().isPresent());
UniqueConstraint pk = tableSchema.getPrimaryKey().get();
assertEquals(2, pk.getColumns().size());
assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
// test norely PK constraints
tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
// test table w/o PK
tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
tableSchema = catalogTable.getSchema();
assertFalse(tableSchema.getPrimaryKey().isPresent());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testTimestamp() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (ts timestamp)");
tableEnv.executeSql("create table db1.dest (ts timestamp)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{Timestamp.valueOf("2019-11-11 00:00:00")})
.addRow(new Object[]{Timestamp.valueOf("2019-12-03 15:43:32.123456789")})
.commit();
// test read timestamp from hive
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect());
assertEquals(2, results.size());
assertEquals(LocalDateTime.of(2019, 11, 11, 0, 0), results.get(0).getField(0));
assertEquals(LocalDateTime.of(2019, 12, 3, 15, 43, 32, 123456789), results.get(1).getField(0));
// test write timestamp to hive
tableEnv.executeSql("insert into db1.dest select max(ts) from db1.src").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("2019-12-03 15:43:32.123456789"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testDate() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (dt date)");
tableEnv.executeSql("create table db1.dest (dt date)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{Date.valueOf("2019-12-09")})
.addRow(new Object[]{Date.valueOf("2019-12-12")})
.commit();
// test read date from hive
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect());
assertEquals(2, results.size());
assertEquals(LocalDate.of(2019, 12, 9), results.get(0).getField(0));
assertEquals(LocalDate.of(2019, 12, 12), results.get(1).getField(0));
// test write date to hive
tableEnv.executeSql("insert into db1.dest select max(dt) from db1.src").await();
verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("2019-12-12"));
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testViews() {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (key int,val string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a"})
.addRow(new Object[]{1, "aa"})
.addRow(new Object[]{1, "aaa"})
.addRow(new Object[]{2, "b"})
.addRow(new Object[]{3, "c"})
.addRow(new Object[]{3, "ccc"})
.commit();
tableEnv.executeSql("create table db1.keys (key int,name string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "keys")
.addRow(new Object[]{1, "key1"})
.addRow(new Object[]{2, "key2"})
.addRow(new Object[]{3, "key3"})
.addRow(new Object[]{4, "key4"})
.commit();
hiveShell.execute("create view db1.v1 as select key as k,val as v from db1.src limit 2");
hiveShell.execute("create view db1.v2 as select key,count(*) from db1.src group by key having count(*)>1 order by key");
hiveShell.execute("create view db1.v3 as select k.key,k.name,count(*) from db1.src s join db1.keys k on s.key=k.key group by k.key,k.name order by k.key");
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select count(v) from db1.v1").execute().collect());
assertEquals("[2]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v2").execute().collect());
assertEquals("[1,3, 3,2]", results.toString());
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v3").execute().collect());
assertEquals("[1,key1,3, 2,key2,1, 3,key3,2]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testWhitespacePartValue() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string)");
StatementSet stmtSet = tableEnv.createStatementSet();
stmtSet.addInsertSql("insert into db1.dest select 1,' '");
stmtSet.addInsertSql("insert into db1.dest select 2,'a \t'");
stmtSet.execute().await();
assertEquals("[p= , p=a %09]", hiveShell.executeQuery("show partitions db1.dest").toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
private void testCompressTextTable(boolean batch) throws Exception {
TableEnvironment tableEnv = batch ?
getTableEnvWithHiveCatalog() :
getStreamTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x string,y string)");
hiveShell.execute("create table db1.dest like db1.src");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{"a", "b"})
.addRow(new Object[]{"c", "d"})
.commit();
hiveCatalog.getHiveConf().setBoolVar(HiveConf.ConfVars.COMPRESSRESULT, true);
tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
List<String> expected = Arrays.asList("a\tb", "c\td");
verifyHiveQueryResult("select * from db1.dest", expected);
verifyFlinkQueryResult(tableEnv.sqlQuery("select * from db1.dest"), expected);
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testBatchCompressTextTable() throws Exception {
testCompressTextTable(true);
}
@Test
public void testStreamCompressTextTable() throws Exception {
testCompressTextTable(false);
}
private void testTransactionalTable(boolean batch) {
TableEnvironment tableEnv = batch ?
getTableEnvWithHiveCatalog() :
getStreamTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x string,y string)");
hiveShell.execute("create table db1.dest (x string,y string) clustered by (x) into 3 buckets stored as orc tblproperties ('transactional'='true')");
List<Exception> exceptions = new ArrayList<>();
try {
TableEnvUtil.execInsertSqlAndWaitResult(tableEnv, "insert into db1.src select * from db1.dest");
} catch (Exception e) {
exceptions.add(e);
}
try {
TableEnvUtil.execInsertSqlAndWaitResult(tableEnv, "insert into db1.dest select * from db1.src");
} catch (Exception e) {
exceptions.add(e);
}
assertEquals(2, exceptions.size());
exceptions.forEach(e -> {
assertTrue(e instanceof FlinkHiveException);
assertEquals("Reading or writing ACID table db1.dest is not supported.", e.getMessage());
});
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testBatchTransactionalTable() {
testTransactionalTable(true);
}
@Test
public void testStreamTransactionalTable() {
testTransactionalTable(false);
}
@Test
public void testRegexSerDe() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x int,y string) " +
"row format serde 'org.apache.hadoop.hive.serde2.RegexSerDe' " +
"with serdeproperties ('input.regex'='([\\\\d]+)\\u0001([\\\\S]+)')");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "src")
.addRow(new Object[]{1, "a"})
.addRow(new Object[]{2, "ab"})
.commit();
assertEquals("[1,a, 2,ab]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src order by x").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testUpdatePartitionSD() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string) stored as rcfile");
tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
tableEnv.executeSql("alter table db1.dest set fileformat sequencefile");
tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
assertEquals("[1,1]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.dest").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testParquetNameMapping() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.t1 (x int,y int) stored as parquet");
tableEnv.executeSql("insert into table db1.t1 values (1,10),(2,20)").await();
Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("db1", "t1"));
String location = hiveTable.getSd().getLocation();
tableEnv.executeSql(String.format("create table db1.t2 (y int,x int) stored as parquet location '%s'", location));
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
assertEquals("[1, 2]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t1").execute().collect()).toString());
assertEquals("[1, 2]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t2").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testOrcSchemaEvol() throws Exception {
// not supported until 2.1.0 -- https://issues.apache.org/jira/browse/HIVE-11981,
// https://issues.apache.org/jira/browse/HIVE-13178
Assume.assumeTrue(HiveVersionTestUtil.HIVE_210_OR_LATER);
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.src (x smallint,y int) stored as orc");
hiveShell.execute("insert into table db1.src values (1,100),(2,200)");
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
tableEnv.executeSql("alter table db1.src change x x int");
assertEquals("[1,100, 2,200]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
tableEnv.executeSql("alter table db1.src change y y string");
assertEquals("[1,100, 2,200]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testNonExistingPartitionFolder() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.part (x int) partitioned by (p int)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{1}).commit("p=1");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part").addRow(new Object[]{2}).commit("p=2");
tableEnv.executeSql("alter table db1.part add partition (p=3)");
// remove one partition
Path toRemove = new Path(hiveCatalog.getHiveTable(new ObjectPath("db1", "part")).getSd().getLocation(), "p=2");
FileSystem fs = toRemove.getFileSystem(hiveShell.getHiveConf());
fs.delete(toRemove, true);
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.part").execute().collect());
assertEquals("[1,1]", results.toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
@Test
public void testInsertPartitionWithStarSource() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create table src (x int,y string)");
HiveTestUtils.createTextTableInserter(
hiveShell,
"default",
"src")
.addRow(new Object[]{1, "a"})
.commit();
tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)");
tableEnv.executeSql("insert into dest partition (p1=1) select * from src").await();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from dest").execute().collect());
assertEquals("[1,1,a]", results.toString());
tableEnv.executeSql("drop table if exists src");
tableEnv.executeSql("drop table if exists dest");
}
@Test
public void testInsertPartitionWithValuesSource() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)");
tableEnv.executeSql("insert into dest partition (p1=1) values(1, 'a')").await();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from dest").execute().collect());
assertEquals("[1,1,a]", results.toString());
tableEnv.executeSql("drop table if exists dest");
}
private TableEnvironment getTableEnvWithHiveCatalog() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode(SqlDialect.HIVE);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
return tableEnv;
}
private TableEnvironment getStreamTableEnvWithHiveCatalog() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerStreamMode(env, SqlDialect.HIVE);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
return tableEnv;
}
private void verifyHiveQueryResult(String query, List<String> expected) {
List<String> results = hiveShell.executeQuery(query);
assertEquals(expected.size(), results.size());
assertEquals(new HashSet<>(expected), new HashSet<>(results));
}
private void verifyFlinkQueryResult(org.apache.flink.table.api.Table table, List<String> expected) throws Exception {
List<Row> rows = CollectionUtil.iteratorToList(table.execute().collect());
List<String> results = rows.stream().map(row ->
IntStream.range(0, row.getArity())
.mapToObj(row::getField)
.map(o -> o instanceof LocalDateTime ?
Timestamp.valueOf((LocalDateTime) o) : o)
.map(Object::toString)
.collect(Collectors.joining("\t"))).collect(Collectors.toList());
assertEquals(expected.size(), results.size());
assertEquals(new HashSet<>(expected), new HashSet<>(results));
}
}
|
[hotfix][FLINK-19183][hive] Fix compile error by TableEnvHiveConnectorITCase.testTransactionalTable
|
flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/TableEnvHiveConnectorITCase.java
|
[hotfix][FLINK-19183][hive] Fix compile error by TableEnvHiveConnectorITCase.testTransactionalTable
|
|
Java
|
apache-2.0
|
cf357dc5be777bfebdc9d7a9a48ab6c7dca900ec
| 0
|
apache/wicket,dashorst/wicket,aldaris/wicket,freiheit-com/wicket,selckin/wicket,selckin/wicket,AlienQueen/wicket,klopfdreh/wicket,dashorst/wicket,mafulafunk/wicket,martin-g/wicket-osgi,mosoft521/wicket,aldaris/wicket,Servoy/wicket,selckin/wicket,mosoft521/wicket,topicusonderwijs/wicket,zwsong/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,selckin/wicket,bitstorm/wicket,Servoy/wicket,aldaris/wicket,aldaris/wicket,mafulafunk/wicket,AlienQueen/wicket,klopfdreh/wicket,freiheit-com/wicket,dashorst/wicket,zwsong/wicket,mosoft521/wicket,klopfdreh/wicket,bitstorm/wicket,astrapi69/wicket,AlienQueen/wicket,selckin/wicket,dashorst/wicket,AlienQueen/wicket,bitstorm/wicket,freiheit-com/wicket,Servoy/wicket,mafulafunk/wicket,mosoft521/wicket,freiheit-com/wicket,bitstorm/wicket,astrapi69/wicket,zwsong/wicket,Servoy/wicket,aldaris/wicket,topicusonderwijs/wicket,dashorst/wicket,freiheit-com/wicket,astrapi69/wicket,topicusonderwijs/wicket,bitstorm/wicket,zwsong/wicket,apache/wicket,apache/wicket,klopfdreh/wicket,astrapi69/wicket,klopfdreh/wicket,martin-g/wicket-osgi,Servoy/wicket,AlienQueen/wicket,apache/wicket,mosoft521/wicket,apache/wicket,martin-g/wicket-osgi
|
/*
* $Id$ $Revision$ $Date:
* 2006-05-26 07:46:36 +0200 (vr, 26 mei 2006) $
*
* ==============================================================================
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package wicket.markup.html.form;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import wicket.Component;
import wicket.IRequestTarget;
import wicket.MarkupContainer;
import wicket.Page;
import wicket.PageParameters;
import wicket.Request;
import wicket.RequestCycle;
import wicket.WicketRuntimeException;
import wicket.markup.ComponentTag;
import wicket.markup.MarkupStream;
import wicket.markup.html.WebMarkupContainer;
import wicket.markup.html.border.Border;
import wicket.markup.html.form.persistence.CookieValuePersister;
import wicket.markup.html.form.persistence.IValuePersister;
import wicket.markup.html.form.validation.IFormValidator;
import wicket.model.IModel;
import wicket.model.Model;
import wicket.protocol.http.WebRequest;
import wicket.protocol.http.WebRequestCycle;
import wicket.protocol.http.request.WebClientInfo;
import wicket.protocol.http.request.WebRequestCodingStrategy;
import wicket.request.IRequestCycleProcessor;
import wicket.request.RequestParameters;
import wicket.request.target.component.BookmarkableListenerInterfaceRequestTarget;
import wicket.request.target.component.listener.ListenerInterfaceRequestTarget;
import wicket.util.lang.Bytes;
import wicket.util.string.AppendingStringBuffer;
import wicket.util.string.Strings;
import wicket.util.string.interpolator.MapVariableInterpolator;
import wicket.util.upload.FileUploadException;
import wicket.util.upload.FileUploadBase.SizeLimitExceededException;
/**
* Base class for forms. To implement a form, subclass this class, add
* FormComponents (such as CheckBoxes, ListChoices or TextFields) to the form.
* You can nest multiple buttons if you want to vary submit behavior. However,
* it is not necessary to use Wicket's button class, just putting e.g. <input
* type="submit" value="go"> suffices.
* <p>
* By default, the processing of a form works like this:
* <li> The submitting button is looked up. A submitting button is a button that
* is nested in this form (is a child component) and that was clicked by the
* user. If a submitting button was found, and it has the defaultFormProcessing
* field set to false (default is true), it's onSubmit method will be called
* right away, thus no validition is done, and things like updating form
* component models that would normally be done are skipped. In that respect,
* nesting a button with the defaultFormProcessing field set to false has the
* same effect as nesting a normal link. If you want you can call validate() to
* execute form validation, hasError() to find out whether validate() resulted
* in validation errors, and updateFormComponentModels() to update the models of
* nested form components. </li>
* <li> When no submitting button with defaultFormProcessing set to false was
* found, this form is processed (method process()). Now, two possible paths
* exist:
* <ul>
* <li> Form validation failed. All nested form components will be marked
* invalid, and onError() is called to allow clients to provide custom error
* handling code. </li>
* <li> Form validation succeeded. The nested components will be asked to update
* their models and persist their data is applicable. After that, method
* delegateSubmit with optionally the submitting button is called. The default
* when there is a submitting button is to first call onSubmit on that button,
* and after that call onSubmit on this form. Clients may override
* delegateSubmit if they want different behavior. </li>
* </ul>
* </li>
* </li>
* </p>
*
* Form for handling (file) uploads with multipart requests is supported by
* callign setMultiPart(true) ( although wicket will try to automatically detect
* this for you ). Use this with
* {@link wicket.markup.html.form.upload.FileUploadField} components. You can
* attach mutliple FileUploadField components for muliple file uploads.
* <p>
* In case of an upload error two resource keys are available to specify error
* messages: uploadTooLarge and uploadFailed
*
* ie in [page].properties
*
* [form-id].uploadTooLarge=You have uploaded a file that is over the allowed
* limit of 2Mb
*
* <p>
* If you want to have multiple buttons which submit the same form, simply put
* two or more button components somewhere in the hierarchy of components that
* are children of the form.
* </p>
* <p>
* To get form components to persist their values for users via cookies, simply
* call setPersistent(true) on the form component.
* </p>
*
* @param <T>
* Type of model object this component holds
*
* @author Jonathan Locke
* @author Juergen Donnerstag
* @author Eelco Hillenius
* @author Cameron Braid
* @author Johan Compagner
* @author Igor Vaynberg (ivaynberg)
*/
public class Form<T> extends WebMarkupContainer<T> implements IFormSubmitListener
{
/**
* The FAKE_SUBMIT hidden field name constant;
*/
public static final String HIDDEN_FIELD_FAKE_SUBMIT = "fs";
/**
* The WICKET_STATE hidden field name constant;
*/
public static final String HIDDEN_FIELD_WICKET_STATE = "ws";
/**
* Visitor used for validation
*
* @author Igor Vaynberg (ivaynberg)
*/
private static abstract class ValidationVisitor implements FormComponent.IVisitor
{
/**
* @see wicket.markup.html.form.FormComponent.IVisitor#formComponent(wicket.markup.html.form.FormComponent)
*/
public void formComponent(FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy() && formComponent.isValid()
&& formComponent.isEnabled() && formComponent.isEnableAllowed())
{
validate(formComponent);
}
}
/**
* Callback that should be used to validate form component
*
* @param formComponent
*/
public abstract void validate(FormComponent formComponent);
}
private static final String UPLOAD_TOO_LARGE_RESOURCE_KEY = "uploadTooLarge";
private static final String UPLOAD_FAILED_RESOURCE_KEY = "uploadFailed";
/** Flag that indicates this form has been submitted during this request */
private static final short FLAG_SUBMITTED = FLAG_RESERVED1;
private static final long serialVersionUID = 1L;
/** Log. */
private static final Log log = LogFactory.getLog(Form.class);
/** Maximum size of an upload in bytes */
private Bytes maxSize = Bytes.MAX;
/** True if the form has enctype of multipart/form-data */
private boolean multiPart = false;
/** multi-validators assigned to this form */
private Object formValidators = null;
/**
* Any default button. If set, a hidden submit button will be rendered right
* after the form tag, so that when users press enter in a textfield, this
* button's action will be selected. If no default button is set, nothing
* additional is rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*/
private Button defaultButton;
private boolean bookmarkableHint;
/**
* Constructs a form with no validation.
*
* @param parent
* The parent of this component
*
* @param id
* See Component
*/
public Form(MarkupContainer parent, final String id)
{
super(parent, id);
}
/**
* @param id
* See Component
* @param model
* See Component
* @see wicket.Component#Component(MarkupContainer,String, IModel)
*/
public Form(MarkupContainer parent, final String id, IModel<T> model)
{
super(parent, id, model);
}
/**
* Gets the default button. If set (not null), a hidden submit button will
* be rendered right after the form tag, so that when users press enter in a
* textfield, this button's action will be selected. If no default button is
* set (it is null), nothing additional is rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*
* @return The button to set as the default button, or null when you want to
* 'unset' any previously set default button
*/
public final Button getDefaultButton()
{
return defaultButton;
}
/**
* @return the maxSize of uploaded files
*/
public Bytes getMaxSize()
{
return this.maxSize;
}
/**
* THIS METHOD IS NOT PART OF THE WICKET PUBLIC API. DO NOT CALL IT.
* <p>
* Retrieves FormComponent values related to the page using the persister
* and assign the values to the FormComponent. Thus initializing them.
*/
public final void loadPersistentFormComponentValues()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// Component must implement persister interface and
// persistence for that component must be enabled.
// Else ignore the persisted value. It'll be deleted
// once the user submits the Form containing that FormComponent.
// Note: if that is true, values may remain persisted longer
// than really necessary
if (formComponent.isVisibleInHierarchy() && formComponent.isPersistent())
{
// The persister
final IValuePersister persister = getValuePersister();
// Retrieve persisted value
persister.load(formComponent);
}
}
});
}
/**
* THIS METHOD IS NOT PART OF THE WICKET API. DO NOT ATTEMPT TO OVERRIDE OR
* CALL IT.
*
* Handles form submissions.
*
* @see Form#validate()
*/
public final void onFormSubmitted()
{
setFlag(FLAG_SUBMITTED, true);
if (handleMultiPart())
{
// Tells FormComponents that a new user input has come
inputChanged();
String url = getRequest().getParameter(getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT));
if (!Strings.isEmpty(url))
{
dispatchEvent(getPage(), url);
}
else
{
// First, see if the processing was triggered by a Wicket button
final Button submittingButton = findSubmittingButton();
// When processing was triggered by a Wicket button and that
// button indicates it wants to be called immediately
// (without processing), call Button.onSubmit() right away.
if (submittingButton != null && !submittingButton.getDefaultFormProcessing())
{
submittingButton.onSubmit();
}
else
{
// process the form for this request
if (process())
{
// let clients handle further processing
delegateSubmit(submittingButton);
}
}
}
}
}
/**
* Checks if this form has been submitted during the current request
*
* @return true if the form has been submitted during this request, false
* otherwise
*/
public final boolean isSubmitted()
{
return getFlag(FLAG_SUBMITTED);
}
/**
* @see wicket.Component#internalOnDetach()
*/
@Override
protected void internalOnDetach()
{
super.internalOnDetach();
setFlag(FLAG_SUBMITTED, false);
}
/**
* Removes already persisted data for all FormComponent childs and disable
* persistence for the same components.
*
* @see Page#removePersistedFormData(Class, boolean)
*
* @param disablePersistence
* if true, disable persistence for all FormComponents on that
* page. If false, it will remain unchanged.
*/
public void removePersistentFormComponentValues(final boolean disablePersistence)
{
// The persistence manager responsible to persist and retrieve
// FormComponent data
final IValuePersister persister = getValuePersister();
// Search for FormComponents like TextField etc.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// remove the FormComponent's persisted data
persister.clear(formComponent);
// Disable persistence if requested. Leave unchanged
// otherwise.
if (formComponent.isPersistent() && disablePersistence)
{
formComponent.setPersistent(false);
}
}
}
});
}
/**
* Sets the default button. If set (not null), a hidden submit button will
* be rendered right after the form tag, so that when users press enter in a
* textfield, this button's action will be selected. If no default button is
* set (so unset by calling this method with null), nothing additional is
* rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*
* @param button
* The button to set as the default button, or null when you want
* to 'unset' any previously set default button
*/
public final void setDefaultButton(Button button)
{
this.defaultButton = button;
}
/**
* @param maxSize
* The maxSize for uploaded files
*/
public void setMaxSize(final Bytes maxSize)
{
this.maxSize = maxSize;
}
/**
* Set to true to use enctype='multipart/form-data', and to process file
* uplloads by default multiPart = false
*
* @param multiPart
* whether this form should behave as a multipart form
*/
public void setMultiPart(boolean multiPart)
{
this.multiPart = multiPart;
}
/**
* @see wicket.Component#setVersioned(boolean)
*/
@Override
public final Component setVersioned(final boolean isVersioned)
{
super.setVersioned(isVersioned);
// Search for FormComponents like TextField etc.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
formComponent.setVersioned(isVersioned);
}
});
return this;
}
/**
* Method made final because we want to ensure users call setVersioned.
*
* @see wicket.Component#isVersioned()
*/
@Override
public boolean isVersioned()
{
return super.isVersioned();
}
/**
* Convenient and typesafe way to visit all the form components on a form
*
* @param visitor
* The visitor interface to call
*/
public final void visitFormComponents(final FormComponent.IVisitor visitor)
{
visitChildren(FormComponent.class, new IVisitor()
{
public Object component(final Component component)
{
visitor.formComponent((FormComponent)component);
return CONTINUE_TRAVERSAL;
}
});
/**
* TODO Post 1.2 General: Maybe we should re-think how Borders are
* implemented, because there are just too many exceptions in the code
* base because of borders. This time it is to solve the problem tested
* in BoxBorderTestPage_3 where the Form is defined in the box border
* and the FormComponents are in the "body". Thus, the formComponents
* are not childs of the form. They are rather childs of the border, as
* the Form itself.
*/
if (getParent() instanceof Border)
{
MarkupContainer border = getParent();
Iterator iter = border.iterator();
while (iter.hasNext())
{
Component child = (Component)iter.next();
if (child instanceof FormComponent)
{
visitor.formComponent((FormComponent)child);
}
}
}
}
/**
* If a default button was set on this form, this method will be called to
* render an extra field with an invisible style so that pressing enter in
* one of the textfields will do a form submit using this button. This
* method is overridable as what we do is best effort only, and may not what
* you want in specific situations. So if you have specific usability
* concerns, or want to follow another strategy, you may override this
* method.
*
* @param markupStream
* The markup stream
* @param openTag
* The open tag for the body
*/
protected void appendDefaultButtonField(final MarkupStream markupStream,
final ComponentTag openTag)
{
AppendingStringBuffer buffer = new AppendingStringBuffer();
// get the value, first seeing whether the value attribute is set
// by a model
String value = defaultButton.getModelObjectAsString();
if (value == null || "".equals(value))
{
// nope it isn't; try to read from the attributes
// note that we're only trying lower case here
value = defaultButton.getMarkupAttributes().getString("value");
}
// append the button
String userAgent = ((WebClientInfo)getSession().getClientInfo()).getUserAgent();
buffer.append("<input type=\"submit\" value=\"").append(value).append("\" name=\"").append(
defaultButton.getInputName()).append("\"");
if (userAgent != null && userAgent.indexOf("MSIE") != -1)
{
buffer.append("style=\"width: 0px\"");
}
else
{
buffer.append(" style=\"display: none\"");
}
buffer.append("\" />");
getResponse().write(buffer);
}
/**
* Template method to allow clients to do any processing (like recording the
* current model so that, in case onSubmit does further validation, the
* model can be rolled back) before the actual updating of form component
* models is done.
*/
protected void beforeUpdateFormComponentModels()
{
}
/**
* Called (by the default implementation of 'process') when all fields
* validated, the form was updated and it's data was allowed to be
* persisted. It is meant for delegating further processing to clients.
* <p>
* This implementation first finds out whether the form processing was
* triggered by a nested button of this form. If that is the case, that
* button's onSubmit is called first.
* </p>
* <p>
* Regardless of whether a submitting button was found, the form's onSubmit
* method is called next.
* </p>
*
* @param submittingButton
* the button that triggered this form processing, or null if the
* processing was triggered by something else (like a non-Wicket
* submit button or a javascript execution)
*/
protected void delegateSubmit(Button submittingButton)
{
// when the given button is not null, it means that it was the
// submitting button
if (submittingButton != null)
{
submittingButton.onSubmit();
}
// Model was successfully updated with valid data
onSubmit();
}
/**
* Gets the button which submitted this form.
*
* @return The button which submitted this form or null if the processing
* was not trigger by a registered button component
*/
protected final Button findSubmittingButton()
{
Button button = (Button)visitChildren(Button.class, new IVisitor()
{
public Object component(final Component component)
{
// Get button
final Button button = (Button)component;
// Check for button-name or button-name.x request string
if (getRequest().getParameter(button.getInputName()) != null
|| getRequest().getParameter(button.getInputName() + ".x") != null)
{
if (!button.isVisible())
{
throw new WicketRuntimeException("Submit Button " + button.getInputName()
+ " (path=" + button.getPageRelativePath() + ") is not visible");
}
return button;
}
return CONTINUE_TRAVERSAL;
}
});
if (button == null)
{
button = (Button)getPage().visitChildren(SubmitLink.class, new IVisitor()
{
public Object component(final Component component)
{
// Get button
final SubmitLink button = (SubmitLink)component;
// Check for button-name or button-name.x request string
if (button.getForm() == Form.this
&& (getRequest().getParameter(button.getInputName()) != null || getRequest()
.getParameter(button.getInputName() + ".x") != null))
{
if (!button.isVisible())
{
throw new WicketRuntimeException("Submit Button is not visible");
}
return button;
}
return CONTINUE_TRAVERSAL;
}
});
}
return button;
}
/**
* Gets the form component persistence manager; it is lazy loaded.
*
* @return The form component value persister
*/
protected IValuePersister getValuePersister()
{
return new CookieValuePersister();
}
/**
* Gets whether the current form has any error registered.
*
* @return True if this form has at least one error.
*/
protected final boolean hasError()
{
// if this form itself has an error message
if (hasErrorMessage())
{
return true;
}
// the form doesn't have any errors, now check any nested form
// components
return anyFormComponentError();
}
/**
* @see wicket.Component#internalOnModelChanged()
*/
@Override
protected void internalOnModelChanged()
{
// Visit all the form components and validate each
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// If form component is using form model
if (formComponent.sameRootModel(Form.this))
{
formComponent.modelChanged();
}
}
});
}
/**
* Mark each form component on this form invalid.
*/
protected final void markFormComponentsInvalid()
{
// call invalidate methods of all nested form components
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.invalid();
}
}
});
}
/**
* Mark each form component on this form valid.
*/
protected final void markFormComponentsValid()
{
// call invalidate methods of all nested form components
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.valid();
}
}
});
}
/**
* Returns the HiddenFieldId which will be used as the name and id property
* of the hiddenfield that is generated by this Form. The only 2 fields that
* are supported now are 'fs' (fake submits) and 'ws' (wicket state)
*
* @param hiddenFieldName
* The name of the hiddenfield
*
* @return The name and id of the hidden field.
*/
public final String getHiddenFieldId(String hiddenFieldName)
{
return getMarkupId() + ":hf:" + hiddenFieldName;
}
/**
* Append an additional hidden input tag to support anchor tags that can
* submit a form.
*
* @param markupStream
* The markup stream
* @param openTag
* The open tag for the body
*/
@Override
protected void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag)
{
String fs = getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT);
getResponse().write(
new AppendingStringBuffer(
"\n<div style=\"display:none\"><input type=\"hidden\" name=\"").append(fs)
.append("\" id=\"").append(fs).append("\"/>"));
String ws = getHiddenFieldId(HIDDEN_FIELD_WICKET_STATE);
getResponse().write(
new AppendingStringBuffer("\n<input type=\"hidden\" name=\"wicketState\" id=\"")
.append(ws).append("\"/></div>"));
// if a default button was set, handle the rendering of that
if (defaultButton != null && defaultButton.isVisibleInHierarchy()
&& defaultButton.isEnabled())
{
appendDefaultButtonField(markupStream, openTag);
}
// do the rest of the processing
super.onComponentTagBody(markupStream, openTag);
}
/**
* @see wicket.Component#onComponentTag(ComponentTag)
*/
@Override
protected void onComponentTag(final ComponentTag tag)
{
checkComponentTag(tag, "form");
setOutputMarkupId(true);
super.onComponentTag(tag);
// If the javascriptid is already generated then use that on even it was
// before the first render. Bbecause there could be a component which
// already uses it to submit the forum. This should be fixed when we
// pre parse the markup so that we know the id is at front.
tag.put("method", "post");
Page page = getPage();
boolean addAction = true;
if (bookmarkableHint && page.isStateless())
{
try
{
if (page.getClass().getConstructor((Class[])null) != null
|| page.getClass().getConstructor(new Class[] { PageParameters.class }) != null)
{
// PageParameters pp = new PageParameters();
// pp.add(WebRequestCodingStrategy.BOOKMARKABLE_FORM_PARAMETER_NAME,
// getPageRelativePath());
// tag.put("action", urlFor(page.getClass(), pp));
BookmarkableListenerInterfaceRequestTarget target = new BookmarkableListenerInterfaceRequestTarget(
page.getPageMap().getName(), page.getClass(), new PageParameters(), this, IFormSubmitListener.INTERFACE);
tag.put("action", urlFor(target));
addAction = false;
}
}
catch (Exception e)
{
// ignore
}
}
if (addAction)
{
tag.put("action", Strings.replaceAll(urlFor(IFormSubmitListener.INTERFACE), "&",
"&"));
}
if (multiPart)
{
tag.put("enctype", "multipart/form-data");
}
else
{
// sanity check
String enctype = (String)tag.getAttributes().get("enctype");
if ("multipart/form-data".equalsIgnoreCase(enctype))
{
// though not set explicitly in Java, this is a multipart form
setMultiPart(true);
}
}
}
/**
* @see wicket.MarkupContainer#isStateless()
*/
@Override
protected boolean isStateless()
{
if (bookmarkableHint)
{
try
{
Page page = getPage();
if (page.getClass().getConstructor((Class[])null) != null
|| page.getClass().getConstructor(new Class[] { PageParameters.class }) != null)
{
return super.isStateless();
}
}
catch (Exception e)
{
// ignore
}
}
return false;
}
/**
* Method to override if you want to do something special when an error
* occurs (other than simply displaying validation errors).
*/
protected void onError()
{
}
/**
* @see wicket.Component#onRender(MarkupStream)
*/
@Override
protected void onRender(final MarkupStream markupStream)
{
// Force multi-part on if any child form component is multi-part
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(FormComponent formComponent)
{
if (formComponent.isVisible() && formComponent.isMultiPart())
{
setMultiPart(true);
}
}
});
super.onRender(markupStream);
}
/**
* Implemented by subclasses to deal with form submits.
*/
protected void onSubmit()
{
}
/**
* Process the form. Though you can override this method to provide your
* whole own algorithm, it is not recommended to do so.
* <p>
* See the class documentation for further details on the form processing
* </p>
*
* @return False if the form had an error
*/
public boolean process()
{
// run validation
validate();
// If a validation error occurred
if (hasError())
{
// mark all children as invalid
markFormComponentsInvalid();
// let subclass handle error
onError();
// Form has an error
return false;
}
else
{
// mark all childeren as valid
markFormComponentsValid();
// before updating, call the interception method for clients
beforeUpdateFormComponentModels();
// Update model using form data
updateFormComponentModels();
// Persist FormComponents if requested
persistFormComponentData();
// Form has no error
return true;
}
}
/**
* Update the model of all form components using the fields that were sent
* with the current request.
*
* @see wicket.markup.html.form.FormComponent#updateModel()
*/
protected final void updateFormComponentModels()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// Only update the component when it is visible and valid
if (formComponent.isVisibleInHierarchy() && formComponent.isEnabled()
&& formComponent.isValid() && formComponent.isEnableAllowed())
{
// Potentially update the model
formComponent.updateModel();
}
}
});
}
/**
* Clears the input from the form's nested children of type
* {@link FormComponent}. This method is typically called when a form needs
* to be reset.
*/
public final void clearInput()
{
// Visit all the (visible) form components and clear the input on each.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// Clear input from form component
formComponent.clearInput();
}
}
});
}
/**
* Validates the form. This method is typically called before updating any
* models.
*/
protected void validate()
{
validateRequired();
validateConversion();
validateValidators();
validateFormValidators();
}
/**
* Triggers input required attribute validation on all form components
*/
private void validateRequired()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.validateRequired();
}
});
}
/**
* Triggers type conversion on form components
*/
private void validateConversion()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.convert();
}
});
}
/**
* Triggers all IValidator validators added to the form components
*/
private void validateValidators()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.validateValidators();
}
});
}
/**
* Triggers any added {@link IFormValidator}s.
*/
private void validateFormValidators()
{
final int multiCount = formValidators_size();
for (int i = 0; i < multiCount; i++)
{
final IFormValidator validator = formValidators_get(i);
final FormComponent[] dependents = validator.getDependentFormComponents();
boolean validate = true;
if (dependents != null)
{
for (final FormComponent dependent : dependents)
{
if (!dependent.isValid())
{
validate = false;
break;
}
}
}
if (validate)
{
validator.validate(this);
}
}
}
/**
* Find out whether there is any registered error for a form component.
*
* @return whether there is any registered error for a form component
*/
private boolean anyFormComponentError()
{
final Object value = visitChildren(new IVisitor()
{
public Object component(final Component component)
{
if (component.hasErrorMessage())
{
return STOP_TRAVERSAL;
}
// Traverse all children
return CONTINUE_TRAVERSAL;
}
});
return value == IVisitor.STOP_TRAVERSAL ? true : false;
}
/**
* @return False if form is multipart and upload failed
*/
private final boolean handleMultiPart()
{
if (multiPart)
{
// Change the request to a multipart web request so parameters are
// parsed out correctly
try
{
final WebRequest multipartWebRequest = ((WebRequest)getRequest())
.newMultipartWebRequest(this.maxSize);
getRequestCycle().setRequest(multipartWebRequest);
}
catch (WicketRuntimeException wre)
{
if (wre.getCause() == null || !(wre.getCause() instanceof FileUploadException))
{
throw wre;
}
FileUploadException e = (FileUploadException)wre.getCause();
// Create model with exception and maximum size values
final Map<String, Serializable> model = new HashMap<String, Serializable>();
model.put("exception", e);
model.put("maxSize", maxSize);
if (e instanceof SizeLimitExceededException)
{
// Resource key should be <form-id>.uploadTooLarge to
// override default message
final String defaultValue = "Upload must be less than " + maxSize;
String msg = getString(getId() + "." + UPLOAD_TOO_LARGE_RESOURCE_KEY, Model
.valueOf(model), defaultValue);
error(msg);
if (log.isDebugEnabled())
{
log.error(msg, e);
}
else
{
log.error(msg);
}
}
else
{
// Resource key should be <form-id>.uploadFailed to override
// default message
final String defaultValue = "Upload failed: " + e.getLocalizedMessage();
String msg = getString(getId() + "." + UPLOAD_FAILED_RESOURCE_KEY, Model
.valueOf(model), defaultValue);
error(msg);
log.error(msg, e);
}
// don't process the form if there is a FileUploadException
return false;
}
}
return true;
}
/**
* Persist (e.g. Cookie) FormComponent data to be reloaded and re-assigned
* to the FormComponent automatically when the page is visited by the user
* next time.
*
* @see wicket.markup.html.form.FormComponent#updateModel()
*/
private void persistFormComponentData()
{
// Cannot add cookies to request cycle unless it accepts them
// We could conceivably be HTML over some other protocol!
if (getRequestCycle() instanceof WebRequestCycle)
{
// The persistence manager responsible to persist and retrieve
// FormComponent data
final IValuePersister persister = getValuePersister();
// Search for FormComponent children. Ignore all other
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// If peristence is switched on for that FormComponent
// ...
if (formComponent.isPersistent())
{
// Save component's data (e.g. in a cookie)
persister.save(formComponent);
}
else
{
// Remove component's data (e.g. cookie)
persister.clear(formComponent);
}
}
}
});
}
}
/**
* Method for dispatching/calling a interface on a page from the given url.
* Used by {@link wicket.markup.html.form.Form#onFormSubmitted()} for
* dispatching events
*
* @param page
* The page where the event should be called on.
* @param url
* The url which describes the component path and the interface
* to be called.
*/
private void dispatchEvent(final Page page, final String url)
{
RequestCycle rc = RequestCycle.get();
IRequestCycleProcessor processor = rc.getProcessor();
final RequestParameters requestParameters = processor.getRequestCodingStrategy().decode(
new FormDispatchRequest(rc.getRequest(), url));
IRequestTarget rt = processor.resolve(rc, requestParameters);
if (rt instanceof ListenerInterfaceRequestTarget)
{
ListenerInterfaceRequestTarget interfaceTarget = ((ListenerInterfaceRequestTarget)rt);
interfaceTarget.getRequestListenerInterface().invoke(page, interfaceTarget.getTarget());
}
else
{
throw new WicketRuntimeException(
"Attempt to access unknown request listener interface "
+ requestParameters.getInterfaceName());
}
}
/**
* Visits the form's children FormComponents and inform them that a new user
* input is available in the Request
*/
private void inputChanged()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.inputChanged();
}
}
});
}
/**
* This generates a piece of javascript code that sets the url in the
* special hidden field and submits the form.
*
* Warning: This code should only be called in the rendering phase for form
* components inside the form because it uses the css/javascript id of the
* form which can be stored in the markup.
*
* @param url
* The interface url that has to be stored in the hidden field
* and submitted
* @return The javascript code that submits the form.
*/
public final CharSequence getJsForInterfaceUrl(CharSequence url)
{
return new AppendingStringBuffer("document.getElementById('").append(
getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT)).append("').value='").append(url)
.append("';document.getElementById('").append(getMarkupId()).append(
"').submit();");
}
/**
*
*/
class FormDispatchRequest extends Request
{
private final Request realRequest;
private final String url;
private final Map<String, Object> params = new HashMap<String, Object>(4);
/**
* Construct.
*
* @param realRequest
* @param url
*/
public FormDispatchRequest(final Request realRequest, final String url)
{
this.realRequest = realRequest;
this.url = realRequest.decodeURL(url);
String queryPart = this.url.substring(this.url.indexOf("?") + 1);
StringTokenizer paramsSt = new StringTokenizer(queryPart, "&");
while (paramsSt.hasMoreTokens())
{
String param = paramsSt.nextToken();
int equalsSign = param.indexOf("=");
if (equalsSign >= 0)
{
String paramName = param.substring(0, equalsSign);
String value = param.substring(equalsSign + 1);
params.put(paramName, value);
}
else
{
params.put(param, "");
}
}
}
/**
* @see wicket.Request#getLocale()
*/
@Override
public Locale getLocale()
{
return realRequest.getLocale();
}
/**
* @see wicket.Request#getParameter(java.lang.String)
*/
@Override
public String getParameter(String key)
{
return (String)params.get(key);
}
/**
* @see wicket.Request#getParameterMap()
*/
@Override
public Map<String, Object> getParameterMap()
{
return params;
}
/**
* @see wicket.Request#getParameters(java.lang.String)
*/
@Override
public String[] getParameters(String key)
{
String param = (String)params.get(key);
if (param != null)
{
return new String[] { param };
}
return new String[0];
}
/**
* @see wicket.Request#getPath()
*/
@Override
public String getPath()
{
return realRequest.getPath();
}
/**
* @see wicket.Request#getRelativeURL()
*/
@Override
public String getRelativeURL()
{
return url.substring(url.indexOf("/", 1));
}
/**
* @see wicket.Request#getURL()
*/
@Override
public String getURL()
{
return url;
}
}
/**
* Returns the prefix used when building validator keys. This allows a form
* to use a separate "set" of keys. For example if prefix "short" is
* returned, validator key short.RequiredValidator will be tried instead of
* RequiredValidator key.
* <p>
* This can be useful when different designs are used for a form. In a form
* where error messages are displayed next to their respective form
* components as opposed to at the top of the form, the ${label} attribute
* is of little use and only causes redundant information to appear in the
* message. Forms like these can return the "short" (or any other string)
* validator prefix and declare key: short.RequiredValidator=required to
* override the longer message which is usually declared like this:
* RequiredValidator=${label} is a required field
* <p>
* Returned prefix will be used for all form components. The prefix can also
* be overridden on form component level by overriding
* {@link FormComponent#getValidatorKeyPrefix()}
*
* @return prefix prepended to validator keys
*/
public String getValidatorKeyPrefix()
{
return null;
}
/**
* Adds a form validator to the form.
*
* @see IFormValidator
* @param validator
* validator
*/
public void add(IFormValidator validator)
{
if (validator == null)
{
throw new IllegalArgumentException("validator argument cannot be null");
}
formValidators_add(validator);
}
/**
* @param validator
* The form validator to add to the formValidators Object (which
* may be an array of IFormValidators or a single instance, for
* efficiency)
*/
private void formValidators_add(final IFormValidator validator)
{
if (this.formValidators == null)
{
this.formValidators = validator;
}
else
{
// Get current list size
final int size = formValidators_size();
// Create array that holds size + 1 elements
final IFormValidator[] validators = new IFormValidator[size + 1];
// Loop through existing validators copying them
for (int i = 0; i < size; i++)
{
validators[i] = formValidators_get(i);
}
// Add new validator to the end
validators[size] = validator;
// Save new validator list
this.formValidators = validators;
}
}
/**
* Gets form validator from formValidators Object (which may be an array of
* IFormValidators or a single instance, for efficiency) at the given index
*
* @param index
* The index of the validator to get
* @return The form validator
*/
private IFormValidator formValidators_get(int index)
{
if (this.formValidators == null)
{
throw new IndexOutOfBoundsException();
}
if (this.formValidators instanceof IFormValidator[])
{
return ((IFormValidator[])formValidators)[index];
}
return (IFormValidator)formValidators;
}
/**
* @return The number of form validators in the formValidators Object (which
* may be an array of IFormValidators or a single instance, for
* efficiency)
*/
private int formValidators_size()
{
if (this.formValidators == null)
{
return 0;
}
if (this.formValidators instanceof IFormValidator[])
{
return ((IFormValidator[])formValidators).length;
}
return 1;
}
/**
* /** Registers an error feedback message for this component
*
* @param error
* error message
* @param args
* argument replacement map for ${key} variables
*/
public final void error(String error, Map args)
{
error(new MapVariableInterpolator(error, args).toString());
}
/**
* @return the boolean if a form tries to generate a bookmarkable
* (stateless) link to itself.
*/
public final boolean isBookmarkableHint()
{
return bookmarkableHint;
}
/**
* Sets the hint boolean that a form can generate a bookmarkable link as its
* action url. It only does this if the page is stateless by itself because
* it doesn't make sense to use a bookmarkable/stateless form action url
* when the page is not stateless anyway. And it only does generate the
* bookmarkable url if the page where it is in does have a default or a
* PageParameters constructor (So it is really a bookmarkable page)
*
* If you set this boolean to true and the page is bookmarkable and
* stateless then be aware that in the onsubmit you don't have the previous
* state anymore you only have the state of a newly created form/page and
* the submit parameters.
*
*
* @param bookmarkableHint
* @return This
*/
public final Form setBookmarkableHint(boolean bookmarkableHint)
{
this.bookmarkableHint = bookmarkableHint;
return this;
}
}
|
wicket/src/java/wicket/markup/html/form/Form.java
|
/*
* $Id$ $Revision$ $Date:
* 2006-05-26 07:46:36 +0200 (vr, 26 mei 2006) $
*
* ==============================================================================
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package wicket.markup.html.form;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import wicket.Component;
import wicket.IRequestTarget;
import wicket.MarkupContainer;
import wicket.Page;
import wicket.PageParameters;
import wicket.Request;
import wicket.RequestCycle;
import wicket.WicketRuntimeException;
import wicket.markup.ComponentTag;
import wicket.markup.MarkupStream;
import wicket.markup.html.WebMarkupContainer;
import wicket.markup.html.border.Border;
import wicket.markup.html.form.persistence.CookieValuePersister;
import wicket.markup.html.form.persistence.IValuePersister;
import wicket.markup.html.form.validation.IFormValidator;
import wicket.model.IModel;
import wicket.model.Model;
import wicket.protocol.http.WebRequest;
import wicket.protocol.http.WebRequestCycle;
import wicket.protocol.http.request.WebClientInfo;
import wicket.protocol.http.request.WebRequestCodingStrategy;
import wicket.request.IRequestCycleProcessor;
import wicket.request.RequestParameters;
import wicket.request.target.component.BookmarkableListenerInterfaceRequestTarget;
import wicket.request.target.component.listener.ListenerInterfaceRequestTarget;
import wicket.util.lang.Bytes;
import wicket.util.string.AppendingStringBuffer;
import wicket.util.string.Strings;
import wicket.util.string.interpolator.MapVariableInterpolator;
import wicket.util.upload.FileUploadException;
import wicket.util.upload.FileUploadBase.SizeLimitExceededException;
/**
* Base class for forms. To implement a form, subclass this class, add
* FormComponents (such as CheckBoxes, ListChoices or TextFields) to the form.
* You can nest multiple buttons if you want to vary submit behavior. However,
* it is not necessary to use Wicket's button class, just putting e.g. <input
* type="submit" value="go"> suffices.
* <p>
* By default, the processing of a form works like this:
* <li> The submitting button is looked up. A submitting button is a button that
* is nested in this form (is a child component) and that was clicked by the
* user. If a submitting button was found, and it has the defaultFormProcessing
* field set to false (default is true), it's onSubmit method will be called
* right away, thus no validition is done, and things like updating form
* component models that would normally be done are skipped. In that respect,
* nesting a button with the defaultFormProcessing field set to false has the
* same effect as nesting a normal link. If you want you can call validate() to
* execute form validation, hasError() to find out whether validate() resulted
* in validation errors, and updateFormComponentModels() to update the models of
* nested form components. </li>
* <li> When no submitting button with defaultFormProcessing set to false was
* found, this form is processed (method process()). Now, two possible paths
* exist:
* <ul>
* <li> Form validation failed. All nested form components will be marked
* invalid, and onError() is called to allow clients to provide custom error
* handling code. </li>
* <li> Form validation succeeded. The nested components will be asked to update
* their models and persist their data is applicable. After that, method
* delegateSubmit with optionally the submitting button is called. The default
* when there is a submitting button is to first call onSubmit on that button,
* and after that call onSubmit on this form. Clients may override
* delegateSubmit if they want different behavior. </li>
* </ul>
* </li>
* </li>
* </p>
*
* Form for handling (file) uploads with multipart requests is supported by
* callign setMultiPart(true) ( although wicket will try to automatically detect
* this for you ). Use this with
* {@link wicket.markup.html.form.upload.FileUploadField} components. You can
* attach mutliple FileUploadField components for muliple file uploads.
* <p>
* In case of an upload error two resource keys are available to specify error
* messages: uploadTooLarge and uploadFailed
*
* ie in [page].properties
*
* [form-id].uploadTooLarge=You have uploaded a file that is over the allowed
* limit of 2Mb
*
* <p>
* If you want to have multiple buttons which submit the same form, simply put
* two or more button components somewhere in the hierarchy of components that
* are children of the form.
* </p>
* <p>
* To get form components to persist their values for users via cookies, simply
* call setPersistent(true) on the form component.
* </p>
*
* @param <T>
* Type of model object this component holds
*
* @author Jonathan Locke
* @author Juergen Donnerstag
* @author Eelco Hillenius
* @author Cameron Braid
* @author Johan Compagner
* @author Igor Vaynberg (ivaynberg)
*/
public class Form<T> extends WebMarkupContainer<T> implements IFormSubmitListener
{
/**
* The FAKE_SUBMIT hidden field name constant;
*/
public static final String HIDDEN_FIELD_FAKE_SUBMIT = "fs";
/**
* The WICKET_STATE hidden field name constant;
*/
public static final String HIDDEN_FIELD_WICKET_STATE = "ws";
/**
* Visitor used for validation
*
* @author Igor Vaynberg (ivaynberg)
*/
private static abstract class ValidationVisitor implements FormComponent.IVisitor
{
/**
* @see wicket.markup.html.form.FormComponent.IVisitor#formComponent(wicket.markup.html.form.FormComponent)
*/
public void formComponent(FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy() && formComponent.isValid()
&& formComponent.isEnabled() && formComponent.isEnableAllowed())
{
validate(formComponent);
}
}
/**
* Callback that should be used to validate form component
*
* @param formComponent
*/
public abstract void validate(FormComponent formComponent);
}
private static final String UPLOAD_TOO_LARGE_RESOURCE_KEY = "uploadTooLarge";
private static final String UPLOAD_FAILED_RESOURCE_KEY = "uploadFailed";
/** Flag that indicates this form has been submitted during this request */
private static final short FLAG_SUBMITTED = FLAG_RESERVED1;
private static final long serialVersionUID = 1L;
/** Log. */
private static final Log log = LogFactory.getLog(Form.class);
/** Maximum size of an upload in bytes */
private Bytes maxSize = Bytes.MAX;
/** True if the form has enctype of multipart/form-data */
private boolean multiPart = false;
/** multi-validators assigned to this form */
private Object formValidators = null;
/**
* Any default button. If set, a hidden submit button will be rendered right
* after the form tag, so that when users press enter in a textfield, this
* button's action will be selected. If no default button is set, nothing
* additional is rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*/
private Button defaultButton;
private boolean bookmarkableHint;
/**
* Constructs a form with no validation.
*
* @param parent
* The parent of this component
*
* @param id
* See Component
*/
public Form(MarkupContainer parent, final String id)
{
super(parent, id);
}
/**
* @param id
* See Component
* @param model
* See Component
* @see wicket.Component#Component(MarkupContainer,String, IModel)
*/
public Form(MarkupContainer parent, final String id, IModel<T> model)
{
super(parent, id, model);
}
/**
* Gets the default button. If set (not null), a hidden submit button will
* be rendered right after the form tag, so that when users press enter in a
* textfield, this button's action will be selected. If no default button is
* set (it is null), nothing additional is rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*
* @return The button to set as the default button, or null when you want to
* 'unset' any previously set default button
*/
public final Button getDefaultButton()
{
return defaultButton;
}
/**
* @return the maxSize of uploaded files
*/
public Bytes getMaxSize()
{
return this.maxSize;
}
/**
* THIS METHOD IS NOT PART OF THE WICKET PUBLIC API. DO NOT CALL IT.
* <p>
* Retrieves FormComponent values related to the page using the persister
* and assign the values to the FormComponent. Thus initializing them.
*/
public final void loadPersistentFormComponentValues()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// Component must implement persister interface and
// persistence for that component must be enabled.
// Else ignore the persisted value. It'll be deleted
// once the user submits the Form containing that FormComponent.
// Note: if that is true, values may remain persisted longer
// than really necessary
if (formComponent.isVisibleInHierarchy() && formComponent.isPersistent())
{
// The persister
final IValuePersister persister = getValuePersister();
// Retrieve persisted value
persister.load(formComponent);
}
}
});
}
/**
* THIS METHOD IS NOT PART OF THE WICKET API. DO NOT ATTEMPT TO OVERRIDE OR
* CALL IT.
*
* Handles form submissions. By default, this method simply calls validate()
* to validate the form and update the model if there is only one button. If
* there is more than one button, it calls the onClick() method for the
* button which submitted the form.
*
* @see Form#validate()
*/
public final void onFormSubmitted()
{
setFlag(FLAG_SUBMITTED, true);
if (handleMultiPart())
{
// Tells FormComponents that a new user input has come
inputChanged();
String url = getRequest().getParameter(getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT));
if (!Strings.isEmpty(url))
{
dispatchEvent(getPage(), url);
}
else
{
// First, see if the processing was triggered by a Wicket button
final Button submittingButton = findSubmittingButton();
// When processing was triggered by a Wicket button and that
// button indicates it wants to be called immediately
// (without processing), call Button.onSubmit() right away.
if (submittingButton != null && !submittingButton.getDefaultFormProcessing())
{
submittingButton.onSubmit();
}
else
{
// process the form for this request
if (process())
{
// let clients handle further processing
delegateSubmit(submittingButton);
}
}
}
}
}
/**
* Checks if this form has been submitted during the current request
*
* @return true if the form has been submitted during this request, false
* otherwise
*/
public final boolean isSubmitted()
{
return getFlag(FLAG_SUBMITTED);
}
/**
* @see wicket.Component#internalOnDetach()
*/
@Override
protected void internalOnDetach()
{
super.internalOnDetach();
setFlag(FLAG_SUBMITTED, false);
}
/**
* Removes already persisted data for all FormComponent childs and disable
* persistence for the same components.
*
* @see Page#removePersistedFormData(Class, boolean)
*
* @param disablePersistence
* if true, disable persistence for all FormComponents on that
* page. If false, it will remain unchanged.
*/
public void removePersistentFormComponentValues(final boolean disablePersistence)
{
// The persistence manager responsible to persist and retrieve
// FormComponent data
final IValuePersister persister = getValuePersister();
// Search for FormComponents like TextField etc.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// remove the FormComponent's persisted data
persister.clear(formComponent);
// Disable persistence if requested. Leave unchanged
// otherwise.
if (formComponent.isPersistent() && disablePersistence)
{
formComponent.setPersistent(false);
}
}
}
});
}
/**
* Sets the default button. If set (not null), a hidden submit button will
* be rendered right after the form tag, so that when users press enter in a
* textfield, this button's action will be selected. If no default button is
* set (so unset by calling this method with null), nothing additional is
* rendered.
* <p>
* WARNING: note that this is a best effort only. Unfortunately having a
* 'default' button in a form is ill defined in the standards, and of course
* IE has it's own way of doing things.
* </p>
*
* @param button
* The button to set as the default button, or null when you want
* to 'unset' any previously set default button
*/
public final void setDefaultButton(Button button)
{
this.defaultButton = button;
}
/**
* @param maxSize
* The maxSize for uploaded files
*/
public void setMaxSize(final Bytes maxSize)
{
this.maxSize = maxSize;
}
/**
* Set to true to use enctype='multipart/form-data', and to process file
* uplloads by default multiPart = false
*
* @param multiPart
* whether this form should behave as a multipart form
*/
public void setMultiPart(boolean multiPart)
{
this.multiPart = multiPart;
}
/**
* @see wicket.Component#setVersioned(boolean)
*/
@Override
public final Component setVersioned(final boolean isVersioned)
{
super.setVersioned(isVersioned);
// Search for FormComponents like TextField etc.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
formComponent.setVersioned(isVersioned);
}
});
return this;
}
/**
* Method made final because we want to ensure users call setVersioned.
*
* @see wicket.Component#isVersioned()
*/
@Override
public boolean isVersioned()
{
return super.isVersioned();
}
/**
* Convenient and typesafe way to visit all the form components on a form
*
* @param visitor
* The visitor interface to call
*/
public final void visitFormComponents(final FormComponent.IVisitor visitor)
{
visitChildren(FormComponent.class, new IVisitor()
{
public Object component(final Component component)
{
visitor.formComponent((FormComponent)component);
return CONTINUE_TRAVERSAL;
}
});
/**
* TODO Post 1.2 General: Maybe we should re-think how Borders are
* implemented, because there are just too many exceptions in the code
* base because of borders. This time it is to solve the problem tested
* in BoxBorderTestPage_3 where the Form is defined in the box border
* and the FormComponents are in the "body". Thus, the formComponents
* are not childs of the form. They are rather childs of the border, as
* the Form itself.
*/
if (getParent() instanceof Border)
{
MarkupContainer border = getParent();
Iterator iter = border.iterator();
while (iter.hasNext())
{
Component child = (Component)iter.next();
if (child instanceof FormComponent)
{
visitor.formComponent((FormComponent)child);
}
}
}
}
/**
* If a default button was set on this form, this method will be called to
* render an extra field with an invisible style so that pressing enter in
* one of the textfields will do a form submit using this button. This
* method is overridable as what we do is best effort only, and may not what
* you want in specific situations. So if you have specific usability
* concerns, or want to follow another strategy, you may override this
* method.
*
* @param markupStream
* The markup stream
* @param openTag
* The open tag for the body
*/
protected void appendDefaultButtonField(final MarkupStream markupStream,
final ComponentTag openTag)
{
AppendingStringBuffer buffer = new AppendingStringBuffer();
// get the value, first seeing whether the value attribute is set
// by a model
String value = defaultButton.getModelObjectAsString();
if (value == null || "".equals(value))
{
// nope it isn't; try to read from the attributes
// note that we're only trying lower case here
value = defaultButton.getMarkupAttributes().getString("value");
}
// append the button
String userAgent = ((WebClientInfo)getSession().getClientInfo()).getUserAgent();
buffer.append("<input type=\"submit\" value=\"").append(value).append("\" name=\"").append(
defaultButton.getInputName()).append("\"");
if (userAgent != null && userAgent.indexOf("MSIE") != -1)
{
buffer.append("style=\"width: 0px\"");
}
else
{
buffer.append(" style=\"display: none\"");
}
buffer.append("\" />");
getResponse().write(buffer);
}
/**
* Template method to allow clients to do any processing (like recording the
* current model so that, in case onSubmit does further validation, the
* model can be rolled back) before the actual updating of form component
* models is done.
*/
protected void beforeUpdateFormComponentModels()
{
}
/**
* Called (by the default implementation of 'process') when all fields
* validated, the form was updated and it's data was allowed to be
* persisted. It is meant for delegating further processing to clients.
* <p>
* This implementation first finds out whether the form processing was
* triggered by a nested button of this form. If that is the case, that
* button's onSubmit is called first.
* </p>
* <p>
* Regardless of whether a submitting button was found, the form's onSubmit
* method is called next.
* </p>
*
* @param submittingButton
* the button that triggered this form processing, or null if the
* processing was triggered by something else (like a non-Wicket
* submit button or a javascript execution)
*/
protected void delegateSubmit(Button submittingButton)
{
// when the given button is not null, it means that it was the
// submitting button
if (submittingButton != null)
{
submittingButton.onSubmit();
}
// Model was successfully updated with valid data
onSubmit();
}
/**
* Gets the button which submitted this form.
*
* @return The button which submitted this form or null if the processing
* was not trigger by a registered button component
*/
protected final Button findSubmittingButton()
{
Button button = (Button)visitChildren(Button.class, new IVisitor()
{
public Object component(final Component component)
{
// Get button
final Button button = (Button)component;
// Check for button-name or button-name.x request string
if (getRequest().getParameter(button.getInputName()) != null
|| getRequest().getParameter(button.getInputName() + ".x") != null)
{
if (!button.isVisible())
{
throw new WicketRuntimeException("Submit Button " + button.getInputName()
+ " (path=" + button.getPageRelativePath() + ") is not visible");
}
return button;
}
return CONTINUE_TRAVERSAL;
}
});
if (button == null)
{
button = (Button)getPage().visitChildren(SubmitLink.class, new IVisitor()
{
public Object component(final Component component)
{
// Get button
final SubmitLink button = (SubmitLink)component;
// Check for button-name or button-name.x request string
if (button.getForm() == Form.this
&& (getRequest().getParameter(button.getInputName()) != null || getRequest()
.getParameter(button.getInputName() + ".x") != null))
{
if (!button.isVisible())
{
throw new WicketRuntimeException("Submit Button is not visible");
}
return button;
}
return CONTINUE_TRAVERSAL;
}
});
}
return button;
}
/**
* Gets the form component persistence manager; it is lazy loaded.
*
* @return The form component value persister
*/
protected IValuePersister getValuePersister()
{
return new CookieValuePersister();
}
/**
* Gets whether the current form has any error registered.
*
* @return True if this form has at least one error.
*/
protected final boolean hasError()
{
// if this form itself has an error message
if (hasErrorMessage())
{
return true;
}
// the form doesn't have any errors, now check any nested form
// components
return anyFormComponentError();
}
/**
* @see wicket.Component#internalOnModelChanged()
*/
@Override
protected void internalOnModelChanged()
{
// Visit all the form components and validate each
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// If form component is using form model
if (formComponent.sameRootModel(Form.this))
{
formComponent.modelChanged();
}
}
});
}
/**
* Mark each form component on this form invalid.
*/
protected final void markFormComponentsInvalid()
{
// call invalidate methods of all nested form components
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.invalid();
}
}
});
}
/**
* Mark each form component on this form valid.
*/
protected final void markFormComponentsValid()
{
// call invalidate methods of all nested form components
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.valid();
}
}
});
}
/**
* Returns the HiddenFieldId which will be used as the name and id property
* of the hiddenfield that is generated by this Form. The only 2 fields that
* are supported now are 'fs' (fake submits) and 'ws' (wicket state)
*
* @param hiddenFieldName
* The name of the hiddenfield
*
* @return The name and id of the hidden field.
*/
public final String getHiddenFieldId(String hiddenFieldName)
{
return getMarkupId() + ":hf:" + hiddenFieldName;
}
/**
* Append an additional hidden input tag to support anchor tags that can
* submit a form.
*
* @param markupStream
* The markup stream
* @param openTag
* The open tag for the body
*/
@Override
protected void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag)
{
String fs = getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT);
getResponse().write(
new AppendingStringBuffer(
"\n<div style=\"display:none\"><input type=\"hidden\" name=\"").append(fs)
.append("\" id=\"").append(fs).append("\"/>"));
String ws = getHiddenFieldId(HIDDEN_FIELD_WICKET_STATE);
getResponse().write(
new AppendingStringBuffer("\n<input type=\"hidden\" name=\"wicketState\" id=\"")
.append(ws).append("\"/></div>"));
// if a default button was set, handle the rendering of that
if (defaultButton != null && defaultButton.isVisibleInHierarchy()
&& defaultButton.isEnabled())
{
appendDefaultButtonField(markupStream, openTag);
}
// do the rest of the processing
super.onComponentTagBody(markupStream, openTag);
}
/**
* @see wicket.Component#onComponentTag(ComponentTag)
*/
@Override
protected void onComponentTag(final ComponentTag tag)
{
checkComponentTag(tag, "form");
setOutputMarkupId(true);
super.onComponentTag(tag);
// If the javascriptid is already generated then use that on even it was
// before the first render. Bbecause there could be a component which
// already uses it to submit the forum. This should be fixed when we
// pre parse the markup so that we know the id is at front.
tag.put("method", "post");
Page page = getPage();
boolean addAction = true;
if (bookmarkableHint && page.isStateless())
{
try
{
if (page.getClass().getConstructor((Class[])null) != null
|| page.getClass().getConstructor(new Class[] { PageParameters.class }) != null)
{
// PageParameters pp = new PageParameters();
// pp.add(WebRequestCodingStrategy.BOOKMARKABLE_FORM_PARAMETER_NAME,
// getPageRelativePath());
// tag.put("action", urlFor(page.getClass(), pp));
BookmarkableListenerInterfaceRequestTarget target = new BookmarkableListenerInterfaceRequestTarget(
page.getPageMap().getName(), page.getClass(), new PageParameters(), this, IFormSubmitListener.INTERFACE);
tag.put("action", urlFor(target));
addAction = false;
}
}
catch (Exception e)
{
// ignore
}
}
if (addAction)
{
tag.put("action", Strings.replaceAll(urlFor(IFormSubmitListener.INTERFACE), "&",
"&"));
}
if (multiPart)
{
tag.put("enctype", "multipart/form-data");
}
else
{
// sanity check
String enctype = (String)tag.getAttributes().get("enctype");
if ("multipart/form-data".equalsIgnoreCase(enctype))
{
// though not set explicitly in Java, this is a multipart form
setMultiPart(true);
}
}
}
/**
* @see wicket.MarkupContainer#isStateless()
*/
@Override
protected boolean isStateless()
{
if (bookmarkableHint)
{
try
{
Page page = getPage();
if (page.getClass().getConstructor((Class[])null) != null
|| page.getClass().getConstructor(new Class[] { PageParameters.class }) != null)
{
return super.isStateless();
}
}
catch (Exception e)
{
// ignore
}
}
return false;
}
/**
* Method to override if you want to do something special when an error
* occurs (other than simply displaying validation errors).
*/
protected void onError()
{
}
/**
* @see wicket.Component#onRender(MarkupStream)
*/
@Override
protected void onRender(final MarkupStream markupStream)
{
// Force multi-part on if any child form component is multi-part
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(FormComponent formComponent)
{
if (formComponent.isVisible() && formComponent.isMultiPart())
{
setMultiPart(true);
}
}
});
super.onRender(markupStream);
}
/**
* Implemented by subclasses to deal with form submits.
*/
protected void onSubmit()
{
}
/**
* Process the form. Though you can override this method to provide your
* whole own algorithm, it is not recommended to do so.
* <p>
* See the class documentation for further details on the form processing
* </p>
*
* @return False if the form had an error
*/
public boolean process()
{
// run validation
validate();
// If a validation error occurred
if (hasError())
{
// mark all children as invalid
markFormComponentsInvalid();
// let subclass handle error
onError();
// Form has an error
return false;
}
else
{
// mark all childeren as valid
markFormComponentsValid();
// before updating, call the interception method for clients
beforeUpdateFormComponentModels();
// Update model using form data
updateFormComponentModels();
// Persist FormComponents if requested
persistFormComponentData();
// Form has no error
return true;
}
}
/**
* Update the model of all form components using the fields that were sent
* with the current request.
*
* @see wicket.markup.html.form.FormComponent#updateModel()
*/
protected final void updateFormComponentModels()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
// Only update the component when it is visible and valid
if (formComponent.isVisibleInHierarchy() && formComponent.isEnabled()
&& formComponent.isValid() && formComponent.isEnableAllowed())
{
// Potentially update the model
formComponent.updateModel();
}
}
});
}
/**
* Clears the input from the form's nested children of type
* {@link FormComponent}. This method is typically called when a form needs
* to be reset.
*/
public final void clearInput()
{
// Visit all the (visible) form components and clear the input on each.
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// Clear input from form component
formComponent.clearInput();
}
}
});
}
/**
* Validates the form. This method is typically called before updating any
* models.
*/
protected void validate()
{
validateRequired();
validateConversion();
validateValidators();
validateFormValidators();
}
/**
* Triggers input required attribute validation on all form components
*/
private void validateRequired()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.validateRequired();
}
});
}
/**
* Triggers type conversion on form components
*/
private void validateConversion()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.convert();
}
});
}
/**
* Triggers all IValidator validators added to the form components
*/
private void validateValidators()
{
visitFormComponents(new ValidationVisitor()
{
@Override
public void validate(final FormComponent formComponent)
{
formComponent.validateValidators();
}
});
}
/**
* Triggers any added {@link IFormValidator}s.
*/
private void validateFormValidators()
{
final int multiCount = formValidators_size();
for (int i = 0; i < multiCount; i++)
{
final IFormValidator validator = formValidators_get(i);
final FormComponent[] dependents = validator.getDependentFormComponents();
boolean validate = true;
if (dependents != null)
{
for (final FormComponent dependent : dependents)
{
if (!dependent.isValid())
{
validate = false;
break;
}
}
}
if (validate)
{
validator.validate(this);
}
}
}
/**
* Find out whether there is any registered error for a form component.
*
* @return whether there is any registered error for a form component
*/
private boolean anyFormComponentError()
{
final Object value = visitChildren(new IVisitor()
{
public Object component(final Component component)
{
if (component.hasErrorMessage())
{
return STOP_TRAVERSAL;
}
// Traverse all children
return CONTINUE_TRAVERSAL;
}
});
return value == IVisitor.STOP_TRAVERSAL ? true : false;
}
/**
* @return False if form is multipart and upload failed
*/
private final boolean handleMultiPart()
{
if (multiPart)
{
// Change the request to a multipart web request so parameters are
// parsed out correctly
try
{
final WebRequest multipartWebRequest = ((WebRequest)getRequest())
.newMultipartWebRequest(this.maxSize);
getRequestCycle().setRequest(multipartWebRequest);
}
catch (WicketRuntimeException wre)
{
if (wre.getCause() == null || !(wre.getCause() instanceof FileUploadException))
{
throw wre;
}
FileUploadException e = (FileUploadException)wre.getCause();
// Create model with exception and maximum size values
final Map<String, Serializable> model = new HashMap<String, Serializable>();
model.put("exception", e);
model.put("maxSize", maxSize);
if (e instanceof SizeLimitExceededException)
{
// Resource key should be <form-id>.uploadTooLarge to
// override default message
final String defaultValue = "Upload must be less than " + maxSize;
String msg = getString(getId() + "." + UPLOAD_TOO_LARGE_RESOURCE_KEY, Model
.valueOf(model), defaultValue);
error(msg);
if (log.isDebugEnabled())
{
log.error(msg, e);
}
else
{
log.error(msg);
}
}
else
{
// Resource key should be <form-id>.uploadFailed to override
// default message
final String defaultValue = "Upload failed: " + e.getLocalizedMessage();
String msg = getString(getId() + "." + UPLOAD_FAILED_RESOURCE_KEY, Model
.valueOf(model), defaultValue);
error(msg);
log.error(msg, e);
}
// don't process the form if there is a FileUploadException
return false;
}
}
return true;
}
/**
* Persist (e.g. Cookie) FormComponent data to be reloaded and re-assigned
* to the FormComponent automatically when the page is visited by the user
* next time.
*
* @see wicket.markup.html.form.FormComponent#updateModel()
*/
private void persistFormComponentData()
{
// Cannot add cookies to request cycle unless it accepts them
// We could conceivably be HTML over some other protocol!
if (getRequestCycle() instanceof WebRequestCycle)
{
// The persistence manager responsible to persist and retrieve
// FormComponent data
final IValuePersister persister = getValuePersister();
// Search for FormComponent children. Ignore all other
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
// If peristence is switched on for that FormComponent
// ...
if (formComponent.isPersistent())
{
// Save component's data (e.g. in a cookie)
persister.save(formComponent);
}
else
{
// Remove component's data (e.g. cookie)
persister.clear(formComponent);
}
}
}
});
}
}
/**
* Method for dispatching/calling a interface on a page from the given url.
* Used by {@link wicket.markup.html.form.Form#onFormSubmitted()} for
* dispatching events
*
* @param page
* The page where the event should be called on.
* @param url
* The url which describes the component path and the interface
* to be called.
*/
private void dispatchEvent(final Page page, final String url)
{
RequestCycle rc = RequestCycle.get();
IRequestCycleProcessor processor = rc.getProcessor();
final RequestParameters requestParameters = processor.getRequestCodingStrategy().decode(
new FormDispatchRequest(rc.getRequest(), url));
IRequestTarget rt = processor.resolve(rc, requestParameters);
if (rt instanceof ListenerInterfaceRequestTarget)
{
ListenerInterfaceRequestTarget interfaceTarget = ((ListenerInterfaceRequestTarget)rt);
interfaceTarget.getRequestListenerInterface().invoke(page, interfaceTarget.getTarget());
}
else
{
throw new WicketRuntimeException(
"Attempt to access unknown request listener interface "
+ requestParameters.getInterfaceName());
}
}
/**
* Visits the form's children FormComponents and inform them that a new user
* input is available in the Request
*/
private void inputChanged()
{
visitFormComponents(new FormComponent.IVisitor()
{
public void formComponent(final FormComponent formComponent)
{
if (formComponent.isVisibleInHierarchy())
{
formComponent.inputChanged();
}
}
});
}
/**
* This generates a piece of javascript code that sets the url in the
* special hidden field and submits the form.
*
* Warning: This code should only be called in the rendering phase for form
* components inside the form because it uses the css/javascript id of the
* form which can be stored in the markup.
*
* @param url
* The interface url that has to be stored in the hidden field
* and submitted
* @return The javascript code that submits the form.
*/
public final CharSequence getJsForInterfaceUrl(CharSequence url)
{
return new AppendingStringBuffer("document.getElementById('").append(
getHiddenFieldId(HIDDEN_FIELD_FAKE_SUBMIT)).append("').value='").append(url)
.append("';document.getElementById('").append(getMarkupId()).append(
"').submit();");
}
/**
*
*/
class FormDispatchRequest extends Request
{
private final Request realRequest;
private final String url;
private final Map<String, Object> params = new HashMap<String, Object>(4);
/**
* Construct.
*
* @param realRequest
* @param url
*/
public FormDispatchRequest(final Request realRequest, final String url)
{
this.realRequest = realRequest;
this.url = realRequest.decodeURL(url);
String queryPart = this.url.substring(this.url.indexOf("?") + 1);
StringTokenizer paramsSt = new StringTokenizer(queryPart, "&");
while (paramsSt.hasMoreTokens())
{
String param = paramsSt.nextToken();
int equalsSign = param.indexOf("=");
if (equalsSign >= 0)
{
String paramName = param.substring(0, equalsSign);
String value = param.substring(equalsSign + 1);
params.put(paramName, value);
}
else
{
params.put(param, "");
}
}
}
/**
* @see wicket.Request#getLocale()
*/
@Override
public Locale getLocale()
{
return realRequest.getLocale();
}
/**
* @see wicket.Request#getParameter(java.lang.String)
*/
@Override
public String getParameter(String key)
{
return (String)params.get(key);
}
/**
* @see wicket.Request#getParameterMap()
*/
@Override
public Map<String, Object> getParameterMap()
{
return params;
}
/**
* @see wicket.Request#getParameters(java.lang.String)
*/
@Override
public String[] getParameters(String key)
{
String param = (String)params.get(key);
if (param != null)
{
return new String[] { param };
}
return new String[0];
}
/**
* @see wicket.Request#getPath()
*/
@Override
public String getPath()
{
return realRequest.getPath();
}
/**
* @see wicket.Request#getRelativeURL()
*/
@Override
public String getRelativeURL()
{
return url.substring(url.indexOf("/", 1));
}
/**
* @see wicket.Request#getURL()
*/
@Override
public String getURL()
{
return url;
}
}
/**
* Returns the prefix used when building validator keys. This allows a form
* to use a separate "set" of keys. For example if prefix "short" is
* returned, validator key short.RequiredValidator will be tried instead of
* RequiredValidator key.
* <p>
* This can be useful when different designs are used for a form. In a form
* where error messages are displayed next to their respective form
* components as opposed to at the top of the form, the ${label} attribute
* is of little use and only causes redundant information to appear in the
* message. Forms like these can return the "short" (or any other string)
* validator prefix and declare key: short.RequiredValidator=required to
* override the longer message which is usually declared like this:
* RequiredValidator=${label} is a required field
* <p>
* Returned prefix will be used for all form components. The prefix can also
* be overridden on form component level by overriding
* {@link FormComponent#getValidatorKeyPrefix()}
*
* @return prefix prepended to validator keys
*/
public String getValidatorKeyPrefix()
{
return null;
}
/**
* Adds a form validator to the form.
*
* @see IFormValidator
* @param validator
* validator
*/
public void add(IFormValidator validator)
{
if (validator == null)
{
throw new IllegalArgumentException("validator argument cannot be null");
}
formValidators_add(validator);
}
/**
* @param validator
* The form validator to add to the formValidators Object (which
* may be an array of IFormValidators or a single instance, for
* efficiency)
*/
private void formValidators_add(final IFormValidator validator)
{
if (this.formValidators == null)
{
this.formValidators = validator;
}
else
{
// Get current list size
final int size = formValidators_size();
// Create array that holds size + 1 elements
final IFormValidator[] validators = new IFormValidator[size + 1];
// Loop through existing validators copying them
for (int i = 0; i < size; i++)
{
validators[i] = formValidators_get(i);
}
// Add new validator to the end
validators[size] = validator;
// Save new validator list
this.formValidators = validators;
}
}
/**
* Gets form validator from formValidators Object (which may be an array of
* IFormValidators or a single instance, for efficiency) at the given index
*
* @param index
* The index of the validator to get
* @return The form validator
*/
private IFormValidator formValidators_get(int index)
{
if (this.formValidators == null)
{
throw new IndexOutOfBoundsException();
}
if (this.formValidators instanceof IFormValidator[])
{
return ((IFormValidator[])formValidators)[index];
}
return (IFormValidator)formValidators;
}
/**
* @return The number of form validators in the formValidators Object (which
* may be an array of IFormValidators or a single instance, for
* efficiency)
*/
private int formValidators_size()
{
if (this.formValidators == null)
{
return 0;
}
if (this.formValidators instanceof IFormValidator[])
{
return ((IFormValidator[])formValidators).length;
}
return 1;
}
/**
* /** Registers an error feedback message for this component
*
* @param error
* error message
* @param args
* argument replacement map for ${key} variables
*/
public final void error(String error, Map args)
{
error(new MapVariableInterpolator(error, args).toString());
}
/**
* @return the boolean if a form tries to generate a bookmarkable
* (stateless) link to itself.
*/
public final boolean isBookmarkableHint()
{
return bookmarkableHint;
}
/**
* Sets the hint boolean that a form can generate a bookmarkable link as its
* action url. It only does this if the page is stateless by itself because
* it doesn't make sense to use a bookmarkable/stateless form action url
* when the page is not stateless anyway. And it only does generate the
* bookmarkable url if the page where it is in does have a default or a
* PageParameters constructor (So it is really a bookmarkable page)
*
* If you set this boolean to true and the page is bookmarkable and
* stateless then be aware that in the onsubmit you don't have the previous
* state anymore you only have the state of a newly created form/page and
* the submit parameters.
*
*
* @param bookmarkableHint
* @return This
*/
public final Form setBookmarkableHint(boolean bookmarkableHint)
{
this.bookmarkableHint = bookmarkableHint;
return this;
}
}
|
removed stale javadoc
git-svn-id: ac804e38dcddf5e42ac850d29d9218b7df6087b7@461251 13f79535-47bb-0310-9956-ffa450edef68
|
wicket/src/java/wicket/markup/html/form/Form.java
|
removed stale javadoc
|
|
Java
|
apache-2.0
|
f8a8cd03a1e2a8fcc32ae40ac35b8e9e814a5cc9
| 0
|
wso2/devstudio-tooling-esb,nwnpallewela/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,susinda/devstudio-tooling-esb,wso2/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,nwnpallewela/devstudio-tooling-esb,sohaniwso2/devstudio-tooling-esb,sohaniwso2/devstudio-tooling-esb,wso2/devstudio-tooling-esb,susinda/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,wso2/devstudio-tooling-esb
|
package org.wso2.developerstudio.datamapper.diagram.edit.parts;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.eclipse.draw2d.Clickable;
import org.eclipse.draw2d.ColorConstants;
import org.eclipse.draw2d.Figure;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.ImageFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.MarginBorder;
import org.eclipse.draw2d.MouseEvent;
import org.eclipse.draw2d.MouseListener;
import org.eclipse.draw2d.MouseMotionListener;
import org.eclipse.draw2d.PositionConstants;
import org.eclipse.draw2d.RectangleFigure;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.ToolbarLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.palette.PaletteContainer;
import org.eclipse.gef.palette.ToolEntry;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderedShapeEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator;
import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.edit.policies.reparent.CreationEditPolicyWithCustomReparent;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.wso2.developerstudio.datamapper.PropertyKeyValuePair;
import org.wso2.developerstudio.datamapper.TreeNode;
import org.wso2.developerstudio.datamapper.diagram.edit.parts.custom.CustomNonResizableEditPolicyEx;
import org.wso2.developerstudio.datamapper.diagram.part.DataMapperVisualIDRegistry;
/**
* @generated
*/
public class TreeNode3EditPart extends AbstractBorderedShapeEditPart {
private static final String PARENT_ICON = "icons/gmf/parent.gif";
private static final String ARRAY_ICON = "icons/gmf/parent.gif";
private static final String ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM = "org.wso2.developerstudio.visualdatamapper.diagram";
private static final String JSON_SCHEMA_ARRAY_ITEMS_VALUE_TYPE = "items_value_type";
private static final String JSON_SCHEMA_OBJECT_VALUE_TYPE = "object_value_type";
private static final String JSON_SCHEMA_ARRAY_ITEMS_TYPE = "items_type";
private static final String NULL_VALUE = "null";
public static final String JSON_SCHEMA_TYPE = "type";
public static final String JSON_SCHEMA_ARRAY = "array";
public static final String JSON_SCHEMA_OBJECT = "object";
/**
* @generated NOT
*/
List<IFigure> childrenIFigure;
/**
* @generated NOT
*/
boolean isActivated = false;
/**
* @generated NOT
*/
NodeFigure figure;
/**
* @generated
*/
public static final int VISUAL_ID = 3011;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
protected IFigure primaryShape;
/**
* @generated
*/
public TreeNode3EditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
@Override
public void activate() {
super.activate();
if (!isActivated) {
List<IFigure> figures = new ArrayList<IFigure>();
childrenIFigure = new ArrayList<IFigure>();
int count = getPrimaryShape().getChildren().size();
for (int i = 0; i < count; i++) {
IFigure figure = (IFigure) getPrimaryShape().getChildren().get(0);
figures.add(figure);
childrenIFigure.add(figure);
getPrimaryShape().getChildren().remove(figure);
}
for (int i = 0; i < count; i++) {
getPrimaryShape().getChildren().add(figures.get(i));
}
((Figure) (getPrimaryShape().getChildren().get(0))).setPreferredSize(100, 40);
childrenIFigure.remove(childrenIFigure.size() - 1);
isActivated = true;
}
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicyWithCustomReparent(
org.wso2.developerstudio.datamapper.diagram.part.DataMapperVisualIDRegistry.TYPED_INSTANCE));
super.createDefaultEditPolicies();
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE,
new org.wso2.developerstudio.datamapper.diagram.edit.policies.TreeNode3ItemSemanticEditPolicy());
installEditPolicy(EditPolicyRoles.CANONICAL_ROLE,
new org.wso2.developerstudio.datamapper.diagram.edit.policies.TreeNode3CanonicalEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
/* Disable dragging and resizing */
NonResizableEditPolicy selectionPolicy = new CustomNonResizableEditPolicyEx();
selectionPolicy.setDragAllowed(false);
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, selectionPolicy);
/* remove balloon */
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.POPUPBAR_ROLE);
}
/*
* (non-Javadoc)
*
* @see org.eclipse.gmf.runtime.diagram.ui.editparts.GraphicalEditPart#
* isSelectable()
*
*/
@Override
public boolean isSelectable() {
return true;
}
/**
* @generated NOT
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated NOT
*/
protected IFigure createNodeShape() {
return primaryShape = new TreeNodeFigure();
}
/**
* @generated
*/
public TreeNodeFigure getPrimaryShape() {
return (TreeNodeFigure) primaryShape;
}
private EditPart getParentBox() {
EditPart temp = this.getParent();
while ((!(temp instanceof DataMapperRootEditPart)) && (temp != null)) {
if (temp instanceof InputEditPart || temp instanceof OutputEditPart) {
break;
}
temp = temp.getParent();
}
return temp;
}
/**
* @generated
*/
protected boolean addFixedChild(EditPart childEditPart) {
String type = getNodeType();
EditPart temp = this.getParentBox();
if (childEditPart instanceof TreeNodeName3EditPart) {
((TreeNodeName3EditPart) childEditPart).setLabel(getPrimaryShape().getFigureTreeNodeNameFigure());
return true;
}
if (childEditPart instanceof InNodeEditPart) {
if (temp instanceof InputEditPart) {
createEmptyInNode(childEditPart);
} else {
//If an element has children, then disable the innode connector arrow
if (((TreeNode) ((View) getModel()).getElement()).getNode().size() > 0) {
String value = getNodeValue(type);
// If an element has values then enable the connector arrow
if (StringUtils.isNotEmpty(value)) {
return createInNode(childEditPart);
}else {
createEmptyInNode(childEditPart);
}
} else {
if (type.equals(JSON_SCHEMA_OBJECT) || type.equals(JSON_SCHEMA_ARRAY)) {
String itemsType = getItemsType();
// If an element has values then enable the connector
// arrow
if (itemsType.equals(NULL_VALUE)) {
createEmptyInNode(childEditPart);
} else {
return createInNode(childEditPart);
}
} else {
if (type.equals(NULL_VALUE)) {
// If type is null, then disable the in node
// connector
createEmptyInNode(childEditPart);
} else {
return createInNode(childEditPart);
}
}
}
}
}
if (childEditPart instanceof OutNodeEditPart) {
if (temp instanceof OutputEditPart) {
createEmptyOutNode(childEditPart);
} else {
//If an element has children, then disable the outnode connector arrow
if (((TreeNode) ((View) getModel()).getElement()).getNode().size() > 0) {
String value = getNodeValue(type);
// If an element has values then enable the connector arrow
if (StringUtils.isNotEmpty(value)) {
return createOutNode(childEditPart);
}else {
createEmptyOutNode(childEditPart);
}
} else {
if (type.equals(JSON_SCHEMA_OBJECT) || type.equals(JSON_SCHEMA_ARRAY)) {
String itemsType = getItemsType();
// If an element has values then enable the connector
// arrow
if (itemsType.equals(NULL_VALUE)) {
createEmptyOutNode(childEditPart);
} else {
return createOutNode(childEditPart);
}
} else {
if (type.equals(NULL_VALUE)) {
// If type is null, then disable the out node
// connector
createEmptyOutNode(childEditPart);
} else {
return createOutNode(childEditPart);
}
}
}
}
}
return false;
}
public String getItemsType(){
String type = "";
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_ARRAY_ITEMS_TYPE)) {
type = keyValue.getValue();
break;
}
}
return type;
}
private void createEmptyInNode(EditPart childEditPart) {
NodeFigure figureInput = (NodeFigure) ((InNodeEditPart) childEditPart).getFigure();
figureInput.removeAll();
Figure emptyFigure = new Figure();
figureInput.add(emptyFigure);
}
private void createEmptyOutNode(EditPart childEditPart) {
NodeFigure figureInput = (NodeFigure) ((OutNodeEditPart) childEditPart).getFigure();
figureInput.removeAll();
Figure emptyFigure = new Figure();
figureInput.add(emptyFigure);
}
private boolean createInNode(EditPart childEditPart) {
BorderItemLocator locator = new BorderItemLocator(getMainFigure(), PositionConstants.WEST);
getBorderedFigure().getBorderItemContainer().add(((InNodeEditPart) childEditPart).getFigure(),
locator);
return true;
}
private boolean createOutNode(EditPart childEditPart) {
BorderItemLocator locator = new BorderItemLocator(getMainFigure(), PositionConstants.EAST);
getBorderedFigure().getBorderItemContainer().add(((OutNodeEditPart) childEditPart).getFigure(),
locator);
return true;
}
public String getNodeType() {
String type = "";
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_TYPE)) {
type = keyValue.getValue();
break;
}
}
return type;
}
public String getNodeValue(String type) {
String value = "";
if (type.equals(JSON_SCHEMA_ARRAY)) {
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_ARRAY_ITEMS_VALUE_TYPE)) {
value = keyValue.getValue();
break;
}
}
} else if (type.equals(JSON_SCHEMA_OBJECT)) {
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_OBJECT_VALUE_TYPE)) {
value = keyValue.getValue();
break;
}
}
}
return value;
}
/**
* @generated
*/
protected boolean removeFixedChild(EditPart childEditPart) {
if (childEditPart instanceof TreeNodeName3EditPart) {
return true;
}
if (childEditPart instanceof InNodeEditPart) {
getBorderedFigure().getBorderItemContainer().remove(((InNodeEditPart) childEditPart).getFigure());
return true;
}
if (childEditPart instanceof OutNodeEditPart) {
getBorderedFigure().getBorderItemContainer().remove(((OutNodeEditPart) childEditPart).getFigure());
return true;
}
return false;
}
/**
* @generated
*/
protected void addChildVisual(EditPart childEditPart, int index) {
if (addFixedChild(childEditPart)) {
return;
}
super.addChildVisual(childEditPart, -1);
}
/**
* @generated
*/
protected void removeChildVisual(EditPart childEditPart) {
if (removeFixedChild(childEditPart)) {
return;
}
super.removeChildVisual(childEditPart);
}
/**
* @generated
*/
protected IFigure getContentPaneFor(IGraphicalEditPart editPart) {
if (editPart instanceof IBorderItemEditPart) {
return getBorderedFigure().getBorderItemContainer();
}
return getContentPane();
}
/**
* @generated
*/
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40);
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model so
* you may safely remove <i>generated</i> tag and modify it.
*
* @generated
*/
protected NodeFigure createMainFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShape();
figure.add(shape);
contentPane = setupContentPane(shape);
return figure;
}
/**
* Default implementation treats passed figure as content pane. Respects
* layout one may have set for generated figure.
*
* @param nodeShape
* instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
if (nodeShape.getLayoutManager() == null) {
ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout();
layout.setSpacing(5);
nodeShape.setLayoutManager(layout);
}
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated
*/
public EditPart getPrimaryChildEditPart() {
return getChildBySemanticHint(DataMapperVisualIDRegistry.getType(TreeNodeName3EditPart.VISUAL_ID));
}
/**
* @generated NOT
*/
public class TreeNodeFigure extends RectangleFigure {
private static final String ELEMENT_ICON = "icons/gmf/symbol_element_of.gif";
private static final String ARRAY_ICON = "icons/gmf/arrays.jpg";
private static final String OBJECT_ICON = "icons/gmf/object.jpg";
private static final String ATTRIBUTE_ICON = "icons/gmf/AttributeIcon.png";
private static final String ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM = "org.wso2.developerstudio.visualdatamapper.diagram";
private static final String JSON_SCHEMA_TYPE = "type";
private static final String JSON_SCHEMA_ARRAY = "array";
private static final String JSON_SCHEMA_OBJECT = "object";
private static final String PREFIX = "@";
/**
* @generated
*/
private WrappingLabel fFigureTreeNodeNameFigure;
/**
* @generated NOT
*/
boolean isExpanded = true;
/**
* @generated NOT
*/
ClickNode clickNode;
/**
* @generated NOT
*/
public TreeNodeFigure() {
ToolbarLayout layoutThis = new ToolbarLayout();
layoutThis.setStretchMinorAxis(true);
layoutThis.setMinorAlignment(ToolbarLayout.ALIGN_TOPLEFT);
// layoutThis.setSpacing(1);
layoutThis.setVertical(true);
this.setLayoutManager(layoutThis);
this.setOpaque(false);
this.setFill(false);
this.setOutline(false);
createContents();
}
/**
* @generated NOT
*/
private void createContents() {
RectangleFigure figure = new RectangleFigure();
ToolbarLayout l = new ToolbarLayout();
l.setVertical(false);
figure.setLayoutManager(l);
figure.setPreferredSize(100, 3);
figure.setBorder(null);
figure.setOpaque(true);
RectangleFigure figure2 = new RectangleFigure();
figure2.setBorder(null);
figure2.setOpaque(true);
ImageDescriptor mainImgDescCollapse = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ELEMENT_ICON);// plus
ImageDescriptor attributeImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ATTRIBUTE_ICON);
ImageDescriptor arrayImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ARRAY_ICON);
ImageDescriptor objectImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, OBJECT_ICON);
final ImageFigure mainImg = new ImageFigure(mainImgDescCollapse.createImage());
mainImg.setSize(new Dimension(10, 8));
ImageFigure attributeImg = new ImageFigure(attributeImgDesc.createImage()); // attribute
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure arrayImg = new ImageFigure(arrayImgDesc.createImage()); // array
// symbole
// figure
arrayImg.setSize(new Dimension(10, 8));
ImageFigure objectImg = new ImageFigure(objectImgDesc.createImage()); // array
// symbole
// figure
arrayImg.setSize(new Dimension(10, 8));
RectangleFigure mainImageRectangle = new RectangleFigure();
mainImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
mainImageRectangle.setPreferredSize(new Dimension(10, 7));
mainImageRectangle.add(mainImg);
mainImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure attributeImageRectangle = new RectangleFigure();
attributeImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
attributeImageRectangle.setPreferredSize(new Dimension(10, 7));
attributeImageRectangle.add(attributeImg);
mainImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure arrayImageRectangle = new RectangleFigure();
arrayImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
arrayImageRectangle.setPreferredSize(new Dimension(10, 7));
arrayImageRectangle.add(attributeImg);
arrayImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure objectImageRectangle = new RectangleFigure();
objectImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
objectImageRectangle.setPreferredSize(new Dimension(10, 7));
objectImageRectangle.add(attributeImg);
objectImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
fFigureTreeNodeNameFigure = new WrappingLabel();
/*
* String name = (((TreeNode) ((View)
* getModel()).getElement()).getName()).split(",")[1]; int count =
* Integer.parseInt((((TreeNode) ((View)
* getModel()).getElement()).getName()) .split(",")[0]);
*/
String name = (((TreeNode) ((View) getModel()).getElement()).getName());
String type = null;
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_TYPE)) {
type = keyValue.getValue();
break;
}
}
int count = ((TreeNode) ((View) getModel()).getElement()).getLevel();
fFigureTreeNodeNameFigure.setText(name);
fFigureTreeNodeNameFigure.setForegroundColor(ColorConstants.black);
fFigureTreeNodeNameFigure.setFont(new Font(null, "Arial", 10, SWT.BOLD));
String newName = null;
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
String[] fullName = name.split(PREFIX);
newName = fullName[1];
} else {
newName = name;
}
figure2.setPreferredSize((count - 1) * 22, 3);
final Label nodeLabel = new Label();
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
nodeLabel.setIcon(attributeImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_ARRAY)) {
nodeLabel.setIcon(arrayImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_OBJECT)) {
nodeLabel.setIcon(objectImg.getImage());
} else {
nodeLabel.setIcon(mainImg.getImage());
}
Display display = Display.getCurrent();
final Color black = display.getSystemColor(SWT.COLOR_BLACK);
nodeLabel.setForegroundColor(black);
nodeLabel.setText(newName);
nodeLabel.setSize(new Dimension(100, 5));
this.addMouseMotionListener(new MouseMotionListener() {
@Override
public void mouseDragged(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseEntered(MouseEvent me) {
highlightElementOnSelection();
getEditDomain().getPaletteViewer().setActiveTool((ToolEntry) (((PaletteContainer) getEditDomain()
.getPaletteViewer().getPaletteRoot().getChildren().get(1)).getChildren().get(0)));
}
@Override
public void mouseExited(MouseEvent me) {
removeHighlight();
getEditDomain().getPaletteViewer().setActiveTool(null);
}
@Override
public void mouseHover(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseMoved(MouseEvent me) {
}
});
this.addMouseListener(new MouseListener() {
@Override
public void mouseReleased(MouseEvent me) {
removeHighlight();
}
@Override
public void mousePressed(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseDoubleClicked(MouseEvent me) {
highlightElementOnSelection();
}
});
figure.setOutline(false);
figure2.setOutline(false);
figure.add(figure2);
figure.add(nodeLabel);
figure.setFill(false);
figure2.setFill(false);
this.setFill(false);
this.setOutline(false);
this.add(figure);
}
/**
* @generated
*/
public WrappingLabel getFigureTreeNodeNameFigure() {
return fFigureTreeNodeNameFigure;
}
/**
* @generated NOT
*/
public void repaint(boolean Expanded, ImageFigure image) {
if (!Expanded) {
clickNode.setContents(image);
isExpanded = true;
for (int i = childrenIFigure.size() - 1; i >= 0; i--) {
getPrimaryShape().getChildren().add(childrenIFigure.get(i));
}
} else {
clickNode.setContents(image);
isExpanded = false;
for (int i = 0; i < childrenIFigure.size(); i++) {
getPrimaryShape().getChildren().remove(childrenIFigure.get(i));
}
}
}
/**
* @generated NOT
*/
public class ClickNode extends Clickable {
public ClickNode(ImageFigure image) {
this.setContents(image);
}
@Override
protected void setContents(IFigure contents) {
super.setContents(contents);
}
}
public void renameElement(String name, String type) {
ImageDescriptor mainImgDescCollapse = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ELEMENT_ICON);
ImageDescriptor attributeImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ATTRIBUTE_ICON);
ImageDescriptor arrayImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ARRAY_ICON);
ImageDescriptor objectImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, OBJECT_ICON);
final ImageFigure mainImg = new ImageFigure(mainImgDescCollapse.createImage());
mainImg.setSize(new Dimension(10, 8));
ImageFigure attributeImg = new ImageFigure(attributeImgDesc.createImage()); // attribute
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure arrayImg = new ImageFigure(arrayImgDesc.createImage()); // array
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure objectImg = new ImageFigure(objectImgDesc.createImage()); // object
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
Label nodeLabel = new Label();
String newName = null;
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
String[] fullName = name.split(PREFIX);
newName = fullName[1];
} else {
newName = name;
}
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
nodeLabel.setIcon(attributeImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_ARRAY)) {
nodeLabel.setIcon(arrayImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_OBJECT)) {
nodeLabel.setIcon(objectImg.getImage());
} else {
nodeLabel.setIcon(mainImg.getImage());
}
Display display = Display.getCurrent();
Color black = display.getSystemColor(SWT.COLOR_BLACK);
nodeLabel.setForegroundColor(black);
nodeLabel.setText(newName);
nodeLabel.setSize(new Dimension(100, 5));
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
rectFigure.remove(childrenList.get(1));
rectFigure.add(nodeLabel);
}
public void highlightElementOnSelection() {
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
Display display = Display.getCurrent();
Color bckGrndColor = new Color(null, 0, 125, 133);
Label newLabel = (Label) childrenList.get(1);
newLabel.setForegroundColor(bckGrndColor);
rectFigure.remove(childrenList.get(1));
rectFigure.add(newLabel);
}
public void removeHighlight() {
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
Display display = Display.getCurrent();
Color bckGrndColor = display.getSystemColor(SWT.COLOR_BLACK);
Label newLabel = (Label) childrenList.get(1);
newLabel.setForegroundColor(bckGrndColor);
rectFigure.remove(childrenList.get(1));
rectFigure.add(newLabel);
}
}
public void renameElementItem(String newName, String type) {
getPrimaryShape().renameElement(newName, type);
}
public void removeHighlightOnElem() {
getPrimaryShape().removeHighlight();
}
public void highlightElementItem() {
getPrimaryShape().highlightElementOnSelection();
}
}
|
plugins/org.wso2.developerstudio.visualdatamapper.diagram/src/org/wso2/developerstudio/datamapper/diagram/edit/parts/TreeNode3EditPart.java
|
package org.wso2.developerstudio.datamapper.diagram.edit.parts;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.eclipse.draw2d.Clickable;
import org.eclipse.draw2d.ColorConstants;
import org.eclipse.draw2d.Figure;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.ImageFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.MarginBorder;
import org.eclipse.draw2d.MouseEvent;
import org.eclipse.draw2d.MouseListener;
import org.eclipse.draw2d.MouseMotionListener;
import org.eclipse.draw2d.PositionConstants;
import org.eclipse.draw2d.RectangleFigure;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.ToolbarLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.palette.PaletteContainer;
import org.eclipse.gef.palette.ToolEntry;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderedShapeEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator;
import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.edit.policies.reparent.CreationEditPolicyWithCustomReparent;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.wso2.developerstudio.datamapper.PropertyKeyValuePair;
import org.wso2.developerstudio.datamapper.TreeNode;
import org.wso2.developerstudio.datamapper.diagram.edit.parts.custom.CustomNonResizableEditPolicyEx;
import org.wso2.developerstudio.datamapper.diagram.part.DataMapperVisualIDRegistry;
/**
* @generated
*/
public class TreeNode3EditPart extends AbstractBorderedShapeEditPart {
private static final String PARENT_ICON = "icons/gmf/parent.gif";
private static final String ARRAY_ICON = "icons/gmf/parent.gif";
private static final String ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM = "org.wso2.developerstudio.visualdatamapper.diagram";
private static final String JSON_SCHEMA_ARRAY_ITEMS_VALUE_TYPE = "items_value_type";
private static final String JSON_SCHEMA_OBJECT_VALUE_TYPE = "object_value_type";
private static final String JSON_SCHEMA_ARRAY_ITEMS_TYPE = "items_type";
private static final String NULL_VALUE = "null";
public static final String JSON_SCHEMA_TYPE = "type";
public static final String JSON_SCHEMA_ARRAY = "array";
public static final String JSON_SCHEMA_OBJECT = "object";
/**
* @generated NOT
*/
List<IFigure> childrenIFigure;
/**
* @generated NOT
*/
boolean isActivated = false;
/**
* @generated NOT
*/
NodeFigure figure;
/**
* @generated
*/
public static final int VISUAL_ID = 3011;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
protected IFigure primaryShape;
/**
* @generated
*/
public TreeNode3EditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
@Override
public void activate() {
super.activate();
if (!isActivated) {
List<IFigure> figures = new ArrayList<IFigure>();
childrenIFigure = new ArrayList<IFigure>();
int count = getPrimaryShape().getChildren().size();
for (int i = 0; i < count; i++) {
IFigure figure = (IFigure) getPrimaryShape().getChildren().get(0);
figures.add(figure);
childrenIFigure.add(figure);
getPrimaryShape().getChildren().remove(figure);
}
for (int i = 0; i < count; i++) {
getPrimaryShape().getChildren().add(figures.get(i));
}
((Figure) (getPrimaryShape().getChildren().get(0))).setPreferredSize(100, 40);
childrenIFigure.remove(childrenIFigure.size() - 1);
isActivated = true;
}
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicyWithCustomReparent(
org.wso2.developerstudio.datamapper.diagram.part.DataMapperVisualIDRegistry.TYPED_INSTANCE));
super.createDefaultEditPolicies();
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE,
new org.wso2.developerstudio.datamapper.diagram.edit.policies.TreeNode3ItemSemanticEditPolicy());
installEditPolicy(EditPolicyRoles.CANONICAL_ROLE,
new org.wso2.developerstudio.datamapper.diagram.edit.policies.TreeNode3CanonicalEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
/* Disable dragging and resizing */
NonResizableEditPolicy selectionPolicy = new CustomNonResizableEditPolicyEx();
selectionPolicy.setDragAllowed(false);
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, selectionPolicy);
/* remove balloon */
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.POPUPBAR_ROLE);
}
/*
* (non-Javadoc)
*
* @see org.eclipse.gmf.runtime.diagram.ui.editparts.GraphicalEditPart#
* isSelectable()
*
*/
@Override
public boolean isSelectable() {
return true;
}
/**
* @generated NOT
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated NOT
*/
protected IFigure createNodeShape() {
return primaryShape = new TreeNodeFigure();
}
/**
* @generated
*/
public TreeNodeFigure getPrimaryShape() {
return (TreeNodeFigure) primaryShape;
}
private EditPart getParentBox() {
EditPart temp = this.getParent();
while ((!(temp instanceof DataMapperRootEditPart)) && (temp != null)) {
if (temp instanceof InputEditPart || temp instanceof OutputEditPart) {
break;
}
temp = temp.getParent();
}
return temp;
}
/**
* @generated
*/
protected boolean addFixedChild(EditPart childEditPart) {
String type = getNodeType();
EditPart temp = this.getParentBox();
if (childEditPart instanceof TreeNodeName3EditPart) {
((TreeNodeName3EditPart) childEditPart).setLabel(getPrimaryShape().getFigureTreeNodeNameFigure());
return true;
}
if (childEditPart instanceof InNodeEditPart) {
if (temp instanceof InputEditPart) {
createEmptyInNode(childEditPart);
} else {
//If an element has children, then disable the innode connector arrow
if (((TreeNode) ((View) getModel()).getElement()).getNode().size() > 0) {
String value = getNodeValue(type);
// If an element has values then enable the connector arrow
if (StringUtils.isNotEmpty(value)) {
return createInNode(childEditPart);
}else {
createEmptyInNode(childEditPart);
}
} else {
if (type.equals(JSON_SCHEMA_OBJECT) || type.equals(JSON_SCHEMA_ARRAY)) {
String itemsType = getItemsType();
// If an element has values then enable the connector
// arrow
if (itemsType.equals(NULL_VALUE)) {
createEmptyInNode(childEditPart);
} else {
return createInNode(childEditPart);
}
} else {
if (type.equals(NULL_VALUE)) {
// If type is null, then disable the in node
// connector
createEmptyInNode(childEditPart);
} else {
return createInNode(childEditPart);
}
}
}
}
}
if (childEditPart instanceof OutNodeEditPart) {
if (temp instanceof OutputEditPart) {
createEmptyInNode(childEditPart);
} else {
//If an element has children, then disable the outnode connector arrow
if (((TreeNode) ((View) getModel()).getElement()).getNode().size() > 0) {
String value = getNodeValue(type);
// If an element has values then enable the connector arrow
if (StringUtils.isNotEmpty(value)) {
return createOutNode(childEditPart);
}else {
createEmptyOutNode(childEditPart);
}
} else {
if (type.equals(JSON_SCHEMA_OBJECT) || type.equals(JSON_SCHEMA_ARRAY)) {
String itemsType = getItemsType();
// If an element has values then enable the connector
// arrow
if (itemsType.equals(NULL_VALUE)) {
createEmptyOutNode(childEditPart);
} else {
return createOutNode(childEditPart);
}
} else {
if (type.equals(NULL_VALUE)) {
// If type is null, then disable the out node
// connector
createEmptyOutNode(childEditPart);
} else {
return createOutNode(childEditPart);
}
}
}
}
}
return false;
}
public String getItemsType(){
String type = "";
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_ARRAY_ITEMS_TYPE)) {
type = keyValue.getValue();
break;
}
}
return type;
}
private void createEmptyInNode(EditPart childEditPart) {
NodeFigure figureInput = (NodeFigure) ((InNodeEditPart) childEditPart).getFigure();
figureInput.removeAll();
Figure emptyFigure = new Figure();
figureInput.add(emptyFigure);
}
private void createEmptyOutNode(EditPart childEditPart) {
NodeFigure figureInput = (NodeFigure) ((OutNodeEditPart) childEditPart).getFigure();
figureInput.removeAll();
Figure emptyFigure = new Figure();
figureInput.add(emptyFigure);
}
private boolean createInNode(EditPart childEditPart) {
BorderItemLocator locator = new BorderItemLocator(getMainFigure(), PositionConstants.WEST);
getBorderedFigure().getBorderItemContainer().add(((InNodeEditPart) childEditPart).getFigure(),
locator);
return true;
}
private boolean createOutNode(EditPart childEditPart) {
BorderItemLocator locator = new BorderItemLocator(getMainFigure(), PositionConstants.EAST);
getBorderedFigure().getBorderItemContainer().add(((OutNodeEditPart) childEditPart).getFigure(),
locator);
return true;
}
public String getNodeType() {
String type = "";
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_TYPE)) {
type = keyValue.getValue();
break;
}
}
return type;
}
public String getNodeValue(String type) {
String value = "";
if (type.equals(JSON_SCHEMA_ARRAY)) {
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_ARRAY_ITEMS_VALUE_TYPE)) {
value = keyValue.getValue();
break;
}
}
} else if (type.equals(JSON_SCHEMA_OBJECT)) {
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_OBJECT_VALUE_TYPE)) {
value = keyValue.getValue();
break;
}
}
}
return value;
}
/**
* @generated
*/
protected boolean removeFixedChild(EditPart childEditPart) {
if (childEditPart instanceof TreeNodeName3EditPart) {
return true;
}
if (childEditPart instanceof InNodeEditPart) {
getBorderedFigure().getBorderItemContainer().remove(((InNodeEditPart) childEditPart).getFigure());
return true;
}
if (childEditPart instanceof OutNodeEditPart) {
getBorderedFigure().getBorderItemContainer().remove(((OutNodeEditPart) childEditPart).getFigure());
return true;
}
return false;
}
/**
* @generated
*/
protected void addChildVisual(EditPart childEditPart, int index) {
if (addFixedChild(childEditPart)) {
return;
}
super.addChildVisual(childEditPart, -1);
}
/**
* @generated
*/
protected void removeChildVisual(EditPart childEditPart) {
if (removeFixedChild(childEditPart)) {
return;
}
super.removeChildVisual(childEditPart);
}
/**
* @generated
*/
protected IFigure getContentPaneFor(IGraphicalEditPart editPart) {
if (editPart instanceof IBorderItemEditPart) {
return getBorderedFigure().getBorderItemContainer();
}
return getContentPane();
}
/**
* @generated
*/
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40);
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model so
* you may safely remove <i>generated</i> tag and modify it.
*
* @generated
*/
protected NodeFigure createMainFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShape();
figure.add(shape);
contentPane = setupContentPane(shape);
return figure;
}
/**
* Default implementation treats passed figure as content pane. Respects
* layout one may have set for generated figure.
*
* @param nodeShape
* instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
if (nodeShape.getLayoutManager() == null) {
ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout();
layout.setSpacing(5);
nodeShape.setLayoutManager(layout);
}
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated
*/
public EditPart getPrimaryChildEditPart() {
return getChildBySemanticHint(DataMapperVisualIDRegistry.getType(TreeNodeName3EditPart.VISUAL_ID));
}
/**
* @generated NOT
*/
public class TreeNodeFigure extends RectangleFigure {
private static final String ELEMENT_ICON = "icons/gmf/symbol_element_of.gif";
private static final String ARRAY_ICON = "icons/gmf/arrays.jpg";
private static final String OBJECT_ICON = "icons/gmf/object.jpg";
private static final String ATTRIBUTE_ICON = "icons/gmf/AttributeIcon.png";
private static final String ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM = "org.wso2.developerstudio.visualdatamapper.diagram";
private static final String JSON_SCHEMA_TYPE = "type";
private static final String JSON_SCHEMA_ARRAY = "array";
private static final String JSON_SCHEMA_OBJECT = "object";
private static final String PREFIX = "@";
/**
* @generated
*/
private WrappingLabel fFigureTreeNodeNameFigure;
/**
* @generated NOT
*/
boolean isExpanded = true;
/**
* @generated NOT
*/
ClickNode clickNode;
/**
* @generated NOT
*/
public TreeNodeFigure() {
ToolbarLayout layoutThis = new ToolbarLayout();
layoutThis.setStretchMinorAxis(true);
layoutThis.setMinorAlignment(ToolbarLayout.ALIGN_TOPLEFT);
// layoutThis.setSpacing(1);
layoutThis.setVertical(true);
this.setLayoutManager(layoutThis);
this.setOpaque(false);
this.setFill(false);
this.setOutline(false);
createContents();
}
/**
* @generated NOT
*/
private void createContents() {
RectangleFigure figure = new RectangleFigure();
ToolbarLayout l = new ToolbarLayout();
l.setVertical(false);
figure.setLayoutManager(l);
figure.setPreferredSize(100, 3);
figure.setBorder(null);
figure.setOpaque(true);
RectangleFigure figure2 = new RectangleFigure();
figure2.setBorder(null);
figure2.setOpaque(true);
ImageDescriptor mainImgDescCollapse = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ELEMENT_ICON);// plus
ImageDescriptor attributeImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ATTRIBUTE_ICON);
ImageDescriptor arrayImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ARRAY_ICON);
ImageDescriptor objectImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, OBJECT_ICON);
final ImageFigure mainImg = new ImageFigure(mainImgDescCollapse.createImage());
mainImg.setSize(new Dimension(10, 8));
ImageFigure attributeImg = new ImageFigure(attributeImgDesc.createImage()); // attribute
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure arrayImg = new ImageFigure(arrayImgDesc.createImage()); // array
// symbole
// figure
arrayImg.setSize(new Dimension(10, 8));
ImageFigure objectImg = new ImageFigure(objectImgDesc.createImage()); // array
// symbole
// figure
arrayImg.setSize(new Dimension(10, 8));
RectangleFigure mainImageRectangle = new RectangleFigure();
mainImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
mainImageRectangle.setPreferredSize(new Dimension(10, 7));
mainImageRectangle.add(mainImg);
mainImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure attributeImageRectangle = new RectangleFigure();
attributeImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
attributeImageRectangle.setPreferredSize(new Dimension(10, 7));
attributeImageRectangle.add(attributeImg);
mainImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure arrayImageRectangle = new RectangleFigure();
arrayImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
arrayImageRectangle.setPreferredSize(new Dimension(10, 7));
arrayImageRectangle.add(attributeImg);
arrayImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
RectangleFigure objectImageRectangle = new RectangleFigure();
objectImageRectangle.setBackgroundColor(new Color(null, 255, 255, 255));
objectImageRectangle.setPreferredSize(new Dimension(10, 7));
objectImageRectangle.add(attributeImg);
objectImageRectangle.setBorder(new MarginBorder(1, 1, 1, 1));
fFigureTreeNodeNameFigure = new WrappingLabel();
/*
* String name = (((TreeNode) ((View)
* getModel()).getElement()).getName()).split(",")[1]; int count =
* Integer.parseInt((((TreeNode) ((View)
* getModel()).getElement()).getName()) .split(",")[0]);
*/
String name = (((TreeNode) ((View) getModel()).getElement()).getName());
String type = null;
for (PropertyKeyValuePair keyValue : (((TreeNode) ((View) getModel()).getElement()).getProperties())) {
if (keyValue.getKey().equals(JSON_SCHEMA_TYPE)) {
type = keyValue.getValue();
break;
}
}
int count = ((TreeNode) ((View) getModel()).getElement()).getLevel();
fFigureTreeNodeNameFigure.setText(name);
fFigureTreeNodeNameFigure.setForegroundColor(ColorConstants.black);
fFigureTreeNodeNameFigure.setFont(new Font(null, "Arial", 10, SWT.BOLD));
String newName = null;
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
String[] fullName = name.split(PREFIX);
newName = fullName[1];
} else {
newName = name;
}
figure2.setPreferredSize((count - 1) * 22, 3);
final Label nodeLabel = new Label();
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
nodeLabel.setIcon(attributeImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_ARRAY)) {
nodeLabel.setIcon(arrayImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_OBJECT)) {
nodeLabel.setIcon(objectImg.getImage());
} else {
nodeLabel.setIcon(mainImg.getImage());
}
Display display = Display.getCurrent();
final Color black = display.getSystemColor(SWT.COLOR_BLACK);
nodeLabel.setForegroundColor(black);
nodeLabel.setText(newName);
nodeLabel.setSize(new Dimension(100, 5));
this.addMouseMotionListener(new MouseMotionListener() {
@Override
public void mouseDragged(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseEntered(MouseEvent me) {
highlightElementOnSelection();
getEditDomain().getPaletteViewer().setActiveTool((ToolEntry) (((PaletteContainer) getEditDomain()
.getPaletteViewer().getPaletteRoot().getChildren().get(1)).getChildren().get(0)));
}
@Override
public void mouseExited(MouseEvent me) {
removeHighlight();
getEditDomain().getPaletteViewer().setActiveTool(null);
}
@Override
public void mouseHover(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseMoved(MouseEvent me) {
}
});
this.addMouseListener(new MouseListener() {
@Override
public void mouseReleased(MouseEvent me) {
removeHighlight();
}
@Override
public void mousePressed(MouseEvent me) {
highlightElementOnSelection();
}
@Override
public void mouseDoubleClicked(MouseEvent me) {
highlightElementOnSelection();
}
});
figure.setOutline(false);
figure2.setOutline(false);
figure.add(figure2);
figure.add(nodeLabel);
figure.setFill(false);
figure2.setFill(false);
this.setFill(false);
this.setOutline(false);
this.add(figure);
}
/**
* @generated
*/
public WrappingLabel getFigureTreeNodeNameFigure() {
return fFigureTreeNodeNameFigure;
}
/**
* @generated NOT
*/
public void repaint(boolean Expanded, ImageFigure image) {
if (!Expanded) {
clickNode.setContents(image);
isExpanded = true;
for (int i = childrenIFigure.size() - 1; i >= 0; i--) {
getPrimaryShape().getChildren().add(childrenIFigure.get(i));
}
} else {
clickNode.setContents(image);
isExpanded = false;
for (int i = 0; i < childrenIFigure.size(); i++) {
getPrimaryShape().getChildren().remove(childrenIFigure.get(i));
}
}
}
/**
* @generated NOT
*/
public class ClickNode extends Clickable {
public ClickNode(ImageFigure image) {
this.setContents(image);
}
@Override
protected void setContents(IFigure contents) {
super.setContents(contents);
}
}
public void renameElement(String name, String type) {
ImageDescriptor mainImgDescCollapse = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ELEMENT_ICON);
ImageDescriptor attributeImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ATTRIBUTE_ICON);
ImageDescriptor arrayImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, ARRAY_ICON);
ImageDescriptor objectImgDesc = AbstractUIPlugin
.imageDescriptorFromPlugin(ORG_WSO2_DEVELOPERSTUDIO_VISUALDATAMAPPER_DIAGRAM, OBJECT_ICON);
final ImageFigure mainImg = new ImageFigure(mainImgDescCollapse.createImage());
mainImg.setSize(new Dimension(10, 8));
ImageFigure attributeImg = new ImageFigure(attributeImgDesc.createImage()); // attribute
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure arrayImg = new ImageFigure(arrayImgDesc.createImage()); // array
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
ImageFigure objectImg = new ImageFigure(objectImgDesc.createImage()); // object
// symbole
// figure
attributeImg.setSize(new Dimension(10, 8));
Label nodeLabel = new Label();
String newName = null;
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
String[] fullName = name.split(PREFIX);
newName = fullName[1];
} else {
newName = name;
}
if (StringUtils.isNotEmpty(name) && name.startsWith(PREFIX)) {
nodeLabel.setIcon(attributeImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_ARRAY)) {
nodeLabel.setIcon(arrayImg.getImage());
} else if (type != null && type.equals(JSON_SCHEMA_OBJECT)) {
nodeLabel.setIcon(objectImg.getImage());
} else {
nodeLabel.setIcon(mainImg.getImage());
}
Display display = Display.getCurrent();
Color black = display.getSystemColor(SWT.COLOR_BLACK);
nodeLabel.setForegroundColor(black);
nodeLabel.setText(newName);
nodeLabel.setSize(new Dimension(100, 5));
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
rectFigure.remove(childrenList.get(1));
rectFigure.add(nodeLabel);
}
public void highlightElementOnSelection() {
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
Display display = Display.getCurrent();
Color bckGrndColor = new Color(null, 0, 125, 133);
Label newLabel = (Label) childrenList.get(1);
newLabel.setForegroundColor(bckGrndColor);
rectFigure.remove(childrenList.get(1));
rectFigure.add(newLabel);
}
public void removeHighlight() {
RectangleFigure rectFigure = (RectangleFigure) this.getChildren().get(0);
List<Figure> childrenList = rectFigure.getChildren();
Display display = Display.getCurrent();
Color bckGrndColor = display.getSystemColor(SWT.COLOR_BLACK);
Label newLabel = (Label) childrenList.get(1);
newLabel.setForegroundColor(bckGrndColor);
rectFigure.remove(childrenList.get(1));
rectFigure.add(newLabel);
}
}
public void renameElementItem(String newName, String type) {
getPrimaryShape().renameElement(newName, type);
}
public void removeHighlightOnElem() {
getPrimaryShape().removeHighlight();
}
public void highlightElementItem() {
getPrimaryShape().highlightElementOnSelection();
}
}
|
Correction to TreeNode3EditPart for connectors
|
plugins/org.wso2.developerstudio.visualdatamapper.diagram/src/org/wso2/developerstudio/datamapper/diagram/edit/parts/TreeNode3EditPart.java
|
Correction to TreeNode3EditPart for connectors
|
|
Java
|
apache-2.0
|
9178b8b3649407a56bcbe50bbf06f0b998844d9b
| 0
|
iqrfsdk/jsimply,iqrfsdk/jsimply
|
/*
* Copyright 2016 MICRORISC s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microrisc.simply.iqrf.dpa.v22x.examples.services;
import com.microrisc.simply.Node;
import com.microrisc.simply.SimplyException;
import com.microrisc.simply.iqrf.dpa.DPA_Network;
import com.microrisc.simply.iqrf.dpa.DPA_Node;
import com.microrisc.simply.iqrf.dpa.DPA_Simply;
import com.microrisc.simply.iqrf.dpa.v22x.DPA_SimplyFactory;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeProcessingInfo;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeResult;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeService;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeServiceParameters;
import com.microrisc.simply.iqrf.dpa.v22x.types.LoadingCodeProperties;
import com.microrisc.simply.services.ServiceResult;
import java.io.File;
import java.util.Collection;
import java.util.LinkedList;
/**
* Loading code into one specified node.
*
* @author Michal Konopa
* @author Martin Strouhal
*/
public class LoadCodeServiceExample {
private static DPA_Simply simply = null;
// prints out specified message, destroys the Simply and exits
private static void printMessageAndExit(String message) {
System.out.println(message);
if ( simply != null) {
simply.destroy();
}
System.exit(1);
}
public static void main(String[] args) {
// creating Simply instance
try {
simply = DPA_SimplyFactory.getSimply("config" + File.separator + "simply" + File.separator + "Simply.properties");
} catch ( SimplyException ex ) {
printMessageAndExit("Error while creating Simply: " + ex.getMessage());
}
// getting network 1
DPA_Network network1 = simply.getNetwork("1", DPA_Network.class);
if ( network1 == null ) {
printMessageAndExit("Network 1 doesn't exist");
}
// getting coordinator
DPA_Node coordinator = network1.getNode("0");
if ( coordinator == null ) {
printMessageAndExit("Coordinator doesn't exist.");
}
// getting Load Code Service on node 0
LoadCodeService loadCodeService = coordinator.getService(LoadCodeService.class);
if ( loadCodeService == null ) {
printMessageAndExit("Coordinator doesn't support Load Code Service.");
}
// loading code
ServiceResult<LoadCodeResult, LoadCodeProcessingInfo> serviceResult
= loadCodeService.loadCode(
new LoadCodeServiceParameters(
"config" + File.separator + "custom-dpa-handlers" + File.separator + "CustomDpaHandler-LED-Green-On-7xD-V228-160912.hex",
0x0800,
LoadingCodeProperties.LoadingAction.ComputeAndMatchChecksumWithCodeLoading,
LoadingCodeProperties.LoadingContent.Hex
)
);
/*
ServiceResult<LoadCodeResult, LoadCodeProcessingInfo> serviceResult
= loadCodeService.loadCode(
new LoadCodeServiceParameters(
"config" + File.separator + "custom-dpa-handlers" + File.separator + "CustomDpaHandler-LED-Red-On-7xD-V228-160912.hex"
0x0800,
LoadingCodeProperties.LoadingAction.ComputeAndMatchChecksumWithCodeLoading,
LoadingCodeProperties.LoadingContent.Hex
)
);
*/
// getting results
if ( serviceResult.getStatus() == ServiceResult.Status.SUCCESSFULLY_COMPLETED ) {
System.out.println("Code successfully loaded.");
} else {
System.out.println("Code load was NOT successful.");
// find out details
LoadCodeProcessingInfo procInfo = serviceResult.getProcessingInfo();
System.out.println(procInfo);
// ...
}
simply.destroy();
}
}
|
simply-modules/simply-iqrf-dpa-v22x-examples/src/main/java/com/microrisc/simply/iqrf/dpa/v22x/examples/services/LoadCodeServiceExample.java
|
/*
* Copyright 2016 MICRORISC s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microrisc.simply.iqrf.dpa.v22x.examples.services;
import com.microrisc.simply.Node;
import com.microrisc.simply.SimplyException;
import com.microrisc.simply.iqrf.dpa.DPA_Network;
import com.microrisc.simply.iqrf.dpa.DPA_Node;
import com.microrisc.simply.iqrf.dpa.DPA_Simply;
import com.microrisc.simply.iqrf.dpa.v22x.DPA_SimplyFactory;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeProcessingInfo;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeResult;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeService;
import com.microrisc.simply.iqrf.dpa.v22x.services.node.load_code.LoadCodeServiceParameters;
import com.microrisc.simply.iqrf.dpa.v22x.types.LoadingCodeProperties;
import com.microrisc.simply.services.ServiceResult;
import java.io.File;
import java.util.Collection;
import java.util.LinkedList;
/**
* Usage of Load Code Service.
*
* @author Michal Konopa
* @author Martin Strouhal
*/
public class LoadCodeServiceExample {
private static DPA_Simply simply = null;
// prints out specified message, destroys the Simply and exits
private static void printMessageAndExit(String message) {
System.out.println(message);
if ( simply != null) {
simply.destroy();
}
System.exit(1);
}
public static void main(String[] args) {
// creating Simply instance
try {
simply = DPA_SimplyFactory.getSimply("config" + File.separator + "simply" + File.separator + "Simply.properties");
} catch ( SimplyException ex ) {
printMessageAndExit("Error while creating Simply: " + ex.getMessage());
}
// getting network 1
DPA_Network network1 = simply.getNetwork("1", DPA_Network.class);
if ( network1 == null ) {
printMessageAndExit("Network 1 doesn't exist");
}
// getting coordinator
DPA_Node coordinator = network1.getNode("0");
if ( coordinator == null ) {
printMessageAndExit("Coordinator doesn't exist.");
}
// getting Load Code Service on node 0
LoadCodeService loadCodeService = coordinator.getService(LoadCodeService.class);
if ( loadCodeService == null ) {
printMessageAndExit("Coordinator doesn't support Load Code Service.");
}
Collection<Node> targetNodes = new LinkedList<>();
targetNodes.add( network1.getNode("1") );
// loading code
ServiceResult<LoadCodeResult, LoadCodeProcessingInfo> serviceResult
= loadCodeService.loadCode(
new LoadCodeServiceParameters(
"config" + File.separator + "custom-dpa-handlers" + File.separator + "CustomDpaHandler-LED-Green-On-7xD-V228-160912.hex",
0x0800,
LoadingCodeProperties.LoadingAction.ComputeAndMatchChecksumWithCodeLoading,
LoadingCodeProperties.LoadingContent.Hex,
targetNodes
)
);
/*
ServiceResult<LoadCodeResult, LoadCodeProcessingInfo> serviceResult
= loadCodeService.loadCode(
new LoadCodeServiceParameters(
"config" + File.separator + "custom-dpa-handlers" + File.separator + "CustomDpaHandler-LED-Red-On-7xD-V228-160912.hex"
0x0800,
LoadingCodeProperties.LoadingAction.ComputeAndMatchChecksumWithCodeLoading,
LoadingCodeProperties.LoadingContent.Hex
)
);
*/
// getting results
if ( serviceResult.getStatus() == ServiceResult.Status.SUCCESSFULLY_COMPLETED ) {
System.out.println("Code successfully loaded.");
} else {
System.out.println("Code load was NOT successful.");
// find out details
LoadCodeProcessingInfo procInfo = serviceResult.getProcessingInfo();
System.out.println(procInfo);
// ...
}
simply.destroy();
}
}
|
Simply IQRF DPA v22x:
- improved Load Code Service example for loading code into one node
|
simply-modules/simply-iqrf-dpa-v22x-examples/src/main/java/com/microrisc/simply/iqrf/dpa/v22x/examples/services/LoadCodeServiceExample.java
|
Simply IQRF DPA v22x: - improved Load Code Service example for loading code into one node
|
|
Java
|
apache-2.0
|
c98234f0dfe7158439f4cac2cc460d5fb9e64b3a
| 0
|
alvinkwekel/camel,gnodet/camel,adessaigne/camel,pmoerenhout/camel,adessaigne/camel,tdiesler/camel,adessaigne/camel,mcollovati/camel,tadayosi/camel,nikhilvibhav/camel,tadayosi/camel,pmoerenhout/camel,christophd/camel,cunningt/camel,nicolaferraro/camel,nicolaferraro/camel,apache/camel,tadayosi/camel,nikhilvibhav/camel,pax95/camel,tdiesler/camel,pax95/camel,tadayosi/camel,nicolaferraro/camel,apache/camel,cunningt/camel,adessaigne/camel,adessaigne/camel,alvinkwekel/camel,christophd/camel,pax95/camel,pmoerenhout/camel,cunningt/camel,pmoerenhout/camel,apache/camel,mcollovati/camel,nicolaferraro/camel,cunningt/camel,pmoerenhout/camel,christophd/camel,apache/camel,mcollovati/camel,pax95/camel,pax95/camel,gnodet/camel,cunningt/camel,alvinkwekel/camel,alvinkwekel/camel,gnodet/camel,tdiesler/camel,tdiesler/camel,gnodet/camel,cunningt/camel,apache/camel,tdiesler/camel,christophd/camel,tadayosi/camel,christophd/camel,tadayosi/camel,gnodet/camel,adessaigne/camel,pax95/camel,nikhilvibhav/camel,pmoerenhout/camel,apache/camel,nikhilvibhav/camel,tdiesler/camel,christophd/camel,mcollovati/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.internal.streaming;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.camel.CamelException;
import org.apache.camel.component.salesforce.SalesforceComponent;
import org.apache.camel.component.salesforce.SalesforceConsumer;
import org.apache.camel.component.salesforce.SalesforceEndpoint;
import org.apache.camel.component.salesforce.SalesforceEndpointConfig;
import org.apache.camel.component.salesforce.SalesforceHttpClient;
import org.apache.camel.component.salesforce.api.SalesforceException;
import org.apache.camel.component.salesforce.internal.SalesforceSession;
import org.apache.camel.support.service.ServiceSupport;
import org.cometd.bayeux.Message;
import org.cometd.bayeux.client.ClientSessionChannel;
import org.cometd.client.BayeuxClient;
import org.cometd.client.BayeuxClient.State;
import org.cometd.client.transport.ClientTransport;
import org.cometd.client.transport.LongPollingTransport;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.cometd.bayeux.Channel.META_CONNECT;
import static org.cometd.bayeux.Channel.META_DISCONNECT;
import static org.cometd.bayeux.Channel.META_HANDSHAKE;
import static org.cometd.bayeux.Channel.META_SUBSCRIBE;
import static org.cometd.bayeux.Channel.META_UNSUBSCRIBE;
import static org.cometd.bayeux.Message.ERROR_FIELD;
import static org.cometd.bayeux.Message.SUBSCRIPTION_FIELD;
public class SubscriptionHelper extends ServiceSupport {
static final ReplayExtension REPLAY_EXTENSION = new ReplayExtension();
private static final Logger LOG = LoggerFactory.getLogger(SubscriptionHelper.class);
private static final int CONNECT_TIMEOUT = 110;
private static final int CHANNEL_TIMEOUT = 40;
private static final String FAILURE_FIELD = "failure";
private static final String EXCEPTION_FIELD = "exception";
private static final String SFDC_FIELD = "sfdc";
private static final String FAILURE_REASON_FIELD = "failureReason";
private static final int DISCONNECT_INTERVAL = 5000;
private static final String SERVER_TOO_BUSY_ERROR = "503::";
BayeuxClient client;
private final SalesforceComponent component;
private final SalesforceSession session;
private final long timeout = 60 * 1000L;
private final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> listenerMap;
private final long maxBackoff;
private final long backoffIncrement;
private ClientSessionChannel.MessageListener handshakeListener;
private ClientSessionChannel.MessageListener connectListener;
private ClientSessionChannel.MessageListener disconnectListener;
private volatile String handshakeError;
private volatile Exception handshakeException;
private volatile String connectError;
private volatile Exception connectException;
private volatile boolean reconnecting;
private final AtomicLong restartBackoff;
public SubscriptionHelper(final SalesforceComponent component) throws SalesforceException {
this.component = component;
this.session = component.getSession();
this.listenerMap = new ConcurrentHashMap<>();
restartBackoff = new AtomicLong(0);
backoffIncrement = component.getConfig().getBackoffIncrement();
maxBackoff = component.getConfig().getMaxBackoff();
}
@Override
protected void doStart() throws Exception {
// create CometD client
this.client = createClient(component);
// reset all error conditions
handshakeError = null;
handshakeException = null;
connectError = null;
connectException = null;
// listener for handshake error or exception
if (handshakeListener == null) {
// first start
handshakeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_HANDSHAKE]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Handshake failure: {}", message);
handshakeError = (String)message.get(ERROR_FIELD);
handshakeException = getFailure(message);
if (handshakeError != null) {
// refresh oauth token, if it's a 401 error
if (handshakeError.startsWith("401::")) {
try {
LOG.info("Refreshing OAuth token...");
session.login(session.getAccessToken());
LOG.info("Refreshed OAuth token for re-handshake");
} catch (SalesforceException e) {
LOG.warn("Error renewing OAuth token on 401 error: " + e.getMessage(), e);
}
}
if (handshakeError.startsWith("403::")) {
try {
LOG.info("Cleaning session (logout) from SalesforceSession before restarting client");
session.logout();
} catch (SalesforceException e) {
LOG.warn("Error while cleaning session: " + e.getMessage(), e);
}
}
}
// restart if handshake fails for any reason
restartClient();
} else if (!listenerMap.isEmpty()) {
reconnecting = true;
}
}
};
}
client.getChannel(META_HANDSHAKE).addListener(handshakeListener);
// listener for connect error
if (connectListener == null) {
connectListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_CONNECT]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Connect failure: {}", message);
connectError = (String)message.get(ERROR_FIELD);
connectException = getFailure(message);
} else if (reconnecting) {
reconnecting = false;
LOG.debug("Refreshing subscriptions to {} channels on reconnect", listenerMap.size());
// reconnected to Salesforce, subscribe to existing
// channels
final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> map = new HashMap<>();
map.putAll(listenerMap);
listenerMap.clear();
for (Map.Entry<SalesforceConsumer, ClientSessionChannel.MessageListener> entry : map.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topicName = consumer.getTopicName();
subscribe(topicName, consumer);
}
}
}
};
}
client.getChannel(META_CONNECT).addListener(connectListener);
// handle fatal disconnects by reconnecting asynchronously
if (disconnectListener == null) {
disconnectListener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel clientSessionChannel, Message message) {
restartClient();
}
};
}
client.getChannel(META_DISCONNECT).addListener(disconnectListener);
// connect to Salesforce cometd endpoint
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
if (!client.waitFor(waitMs, BayeuxClient.State.CONNECTED)) {
if (handshakeException != null) {
throw new CamelException(String.format("Exception during HANDSHAKE: %s", handshakeException.getMessage()), handshakeException);
} else if (handshakeError != null) {
throw new CamelException(String.format("Error during HANDSHAKE: %s", handshakeError));
} else if (connectException != null) {
throw new CamelException(String.format("Exception during CONNECT: %s", connectException.getMessage()), connectException);
} else if (connectError != null) {
throw new CamelException(String.format("Error during CONNECT: %s", connectError));
} else {
throw new CamelException(String.format("Handshake request timeout after %s seconds", CONNECT_TIMEOUT));
}
}
}
// launch an async task to restart
private void restartClient() {
// launch a new restart command
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
httpClient.getExecutor().execute(new Runnable() {
@Override
public void run() {
LOG.info("Restarting on unexpected disconnect from Salesforce...");
boolean abort = false;
// wait for disconnect
LOG.debug("Waiting to disconnect...");
while (!client.isDisconnected()) {
try {
Thread.sleep(DISCONNECT_INTERVAL);
} catch (InterruptedException e) {
LOG.error("Aborting restart on interrupt!");
abort = true;
}
}
if (!abort) {
// update restart attempt backoff
final long backoff = restartBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Restart aborted after exceeding {} msecs backoff", maxBackoff);
abort = true;
} else {
// pause before restart attempt
LOG.debug("Pausing for {} msecs before restart attempt", backoff);
try {
Thread.sleep(backoff);
} catch (InterruptedException e) {
LOG.error("Aborting restart on interrupt!");
abort = true;
}
}
if (!abort) {
Exception lastError = new SalesforceException("Unknown error", null);
try {
// reset client
doStop();
// register listeners and restart
doStart();
} catch (Exception e) {
LOG.error("Error restarting: " + e.getMessage(), e);
lastError = e;
}
if (client != null && client.isHandshook()) {
LOG.info("Successfully restarted!");
// reset backoff interval
restartBackoff.set(client.getBackoffIncrement());
} else {
LOG.error("Failed to restart after pausing for {} msecs", backoff);
if ((backoff + backoffIncrement) > maxBackoff) {
// notify all consumers
String abortMsg = "Aborting restart attempt due to: " + lastError.getMessage();
SalesforceException ex = new SalesforceException(abortMsg, lastError);
for (SalesforceConsumer consumer : listenerMap.keySet()) {
consumer.handleException(abortMsg, ex);
}
}
}
}
}
}
});
}
@SuppressWarnings("unchecked")
private static Exception getFailure(Message message) {
Exception exception = null;
if (message.get(EXCEPTION_FIELD) != null) {
exception = (Exception)message.get(EXCEPTION_FIELD);
} else if (message.get(FAILURE_FIELD) != null) {
exception = (Exception)((Map<String, Object>)message.get("failure")).get("exception");
} else {
String failureReason = getFailureReason(message);
if (failureReason != null) {
exception = new SalesforceException(failureReason, null);
}
}
return exception;
}
@Override
protected void doStop() throws Exception {
client.getChannel(META_DISCONNECT).removeListener(disconnectListener);
client.getChannel(META_CONNECT).removeListener(connectListener);
client.getChannel(META_HANDSHAKE).removeListener(handshakeListener);
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component), timeout);
client.abort();
}
client = null;
LOG.debug("Stopped the helper and destroyed the client");
}
static BayeuxClient createClient(final SalesforceComponent component) throws SalesforceException {
// use default Jetty client from SalesforceComponent, its shared by all
// consumers
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
Map<String, Object> options = new HashMap<>();
options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, httpClient.getTimeout());
if (component.getLongPollingTransportProperties() != null) {
options = component.getLongPollingTransportProperties();
}
final SalesforceSession session = component.getSession();
// check login access token
if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) {
session.login(null);
}
LongPollingTransport transport = new LongPollingTransport(options, httpClient) {
@Override
protected void customize(Request request) {
super.customize(request);
//accessToken might be null due to lazy login
String accessToken = session.getAccessToken();
if (accessToken == null) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
request.getHeaders().put(HttpHeader.AUTHORIZATION, "OAuth " + accessToken);
}
};
BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport);
// added eagerly to check for support during handshake
client.addExtension(REPLAY_EXTENSION);
return client;
}
public void subscribe(final String topicName, final SalesforceConsumer consumer) {
// create subscription for consumer
final String channelName = getChannelName(topicName);
setupReplay((SalesforceEndpoint)consumer.getEndpoint());
// channel message listener
LOG.info("Subscribing to channel {}...", channelName);
final ClientSessionChannel.MessageListener listener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("Received Message: {}", message);
// convert CometD message to Camel Message
consumer.processMessage(channel, message);
}
};
final ClientSessionChannel clientChannel = client.getChannel(channelName);
// listener for subscription
final ClientSessionChannel.MessageListener subscriptionListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_SUBSCRIBE]: {}", message);
final String subscribedChannelName = message.get(SUBSCRIPTION_FIELD).toString();
if (channelName.equals(subscribedChannelName)) {
if (!message.isSuccessful()) {
String error = (String)message.get(ERROR_FIELD);
if (error == null) {
error = "Missing error message";
}
Exception failure = getFailure(message);
String msg = String.format("Error subscribing to %s: %s", topicName, failure != null ? failure.getMessage() : error);
boolean abort = true;
if (isTemporaryError(message)) {
LOG.warn(msg);
// retry after delay
final long backoff = restartBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Subscribe aborted after exceeding {} msecs backoff", maxBackoff);
} else {
abort = false;
try {
LOG.debug("Pausing for {} msecs before subscribe attempt", backoff);
Thread.sleep(backoff);
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
httpClient.getExecutor().execute(new Runnable() {
@Override
public void run() {
subscribe(topicName, consumer);
}
});
} catch (InterruptedException e) {
LOG.warn("Aborting subscribe on interrupt!", e);
}
}
}
if (abort && client != null) {
consumer.handleException(msg, new SalesforceException(msg, failure));
}
} else {
// remember subscription
LOG.info("Subscribed to channel {}", subscribedChannelName);
listenerMap.put(consumer, listener);
// reset backoff interval
restartBackoff.set(0);
}
// remove this subscription listener
if (client != null) {
client.getChannel(META_SUBSCRIBE).removeListener(this);
} else {
LOG.warn("Trying to handle a subscription message but the client is already destroyed");
}
}
}
};
client.getChannel(META_SUBSCRIBE).addListener(subscriptionListener);
// subscribe asynchronously
clientChannel.subscribe(listener);
}
private static boolean isTemporaryError(Message message) {
String failureReason = getFailureReason(message);
return failureReason != null && failureReason.startsWith(SERVER_TOO_BUSY_ERROR);
}
private static String getFailureReason(Message message) {
String failureReason = null;
if (message.getExt() != null) {
Map<String, Object> sfdcFields = (Map<String, Object>) message.getExt().get(SFDC_FIELD);
if (sfdcFields != null) {
failureReason = (String) sfdcFields.get(FAILURE_REASON_FIELD);
}
}
return failureReason;
}
void setupReplay(final SalesforceEndpoint endpoint) {
final String topicName = endpoint.getTopicName();
final Optional<Long> replayId = determineReplayIdFor(endpoint, topicName);
if (replayId.isPresent()) {
final String channelName = getChannelName(topicName);
final Long replayIdValue = replayId.get();
LOG.info("Set Replay extension to replay from `{}` for channel `{}`", replayIdValue, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, replayIdValue);
}
}
static Optional<Long> determineReplayIdFor(final SalesforceEndpoint endpoint, final String topicName) {
final String channelName = getChannelName(topicName);
final Long replayId = endpoint.getReplayId();
final SalesforceComponent component = endpoint.getComponent();
final SalesforceEndpointConfig endpointConfiguration = endpoint.getConfiguration();
final Map<String, Long> endpointInitialReplayIdMap = endpointConfiguration.getInitialReplayIdMap();
final Long endpointReplayId = endpointInitialReplayIdMap.getOrDefault(topicName, endpointInitialReplayIdMap.get(channelName));
final Long endpointDefaultReplayId = endpointConfiguration.getDefaultReplayId();
final SalesforceEndpointConfig componentConfiguration = component.getConfig();
final Map<String, Long> componentInitialReplayIdMap = componentConfiguration.getInitialReplayIdMap();
final Long componentReplayId = componentInitialReplayIdMap.getOrDefault(topicName, componentInitialReplayIdMap.get(channelName));
final Long componentDefaultReplayId = componentConfiguration.getDefaultReplayId();
// the endpoint values have priority over component values, and the
// default values posteriority
// over give topic values
return Stream.of(replayId, endpointReplayId, componentReplayId, endpointDefaultReplayId, componentDefaultReplayId).filter(Objects::nonNull).findFirst();
}
static String getChannelName(final String topicName) {
final StringBuilder channelName = new StringBuilder();
if (topicName.charAt(0) != '/') {
channelName.append('/');
}
if (topicName.indexOf('/', 1) > 0) {
channelName.append(topicName);
} else {
channelName.append("topic/");
channelName.append(topicName);
}
return channelName.toString();
}
public void unsubscribe(String topicName, SalesforceConsumer consumer) throws CamelException {
// channel name
final String channelName = getChannelName(topicName);
// listen for unsubscribe error
final CountDownLatch latch = new CountDownLatch(1);
final String[] unsubscribeError = {null};
final Exception[] unsubscribeFailure = {null};
final ClientSessionChannel.MessageListener unsubscribeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_UNSUBSCRIBE]: {}", message);
Object subscription = message.get(SUBSCRIPTION_FIELD);
if (subscription != null) {
String unsubscribedChannelName = subscription.toString();
if (channelName.equals(unsubscribedChannelName)) {
if (!message.isSuccessful()) {
unsubscribeError[0] = (String)message.get(ERROR_FIELD);
unsubscribeFailure[0] = getFailure(message);
} else {
// forget subscription
LOG.info("Unsubscribed from channel {}", unsubscribedChannelName);
}
latch.countDown();
}
}
}
};
client.getChannel(META_UNSUBSCRIBE).addListener(unsubscribeListener);
try {
// unsubscribe from channel
final ClientSessionChannel.MessageListener listener = listenerMap.remove(consumer);
if (listener != null) {
LOG.info("Unsubscribing from channel {}...", channelName);
final ClientSessionChannel clientChannel = client.getChannel(channelName);
clientChannel.unsubscribe(listener);
// confirm unsubscribe
try {
if (!latch.await(CHANNEL_TIMEOUT, SECONDS)) {
String message;
if (unsubscribeFailure[0] != null) {
message = String.format("Error unsubscribing from topic %s: %s", topicName, unsubscribeFailure[0].getMessage());
} else if (unsubscribeError[0] != null) {
message = String.format("Error unsubscribing from topic %s: %s", topicName, unsubscribeError[0]);
} else {
message = String.format("Timeout error unsubscribing from topic %s after %s seconds", topicName, CHANNEL_TIMEOUT);
}
throw new CamelException(message, unsubscribeFailure[0]);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
// probably shutting down, forget unsubscribe and return
}
}
} finally {
client.getChannel(META_UNSUBSCRIBE).removeListener(unsubscribeListener);
}
}
static String getEndpointUrl(final SalesforceComponent component) {
// In version 36.0 replay is only enabled on a separate endpoint
if (Double.valueOf(component.getConfig().getApiVersion()) == 36.0) {
boolean replayOptionsPresent = component.getConfig().getDefaultReplayId() != null || !component.getConfig().getInitialReplayIdMap().isEmpty();
if (replayOptionsPresent) {
return component.getSession().getInstanceUrl() + "/cometd/replay/" + component.getConfig().getApiVersion();
}
}
return component.getSession().getInstanceUrl() + "/cometd/" + component.getConfig().getApiVersion();
}
}
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/streaming/SubscriptionHelper.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.internal.streaming;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.camel.CamelException;
import org.apache.camel.component.salesforce.SalesforceComponent;
import org.apache.camel.component.salesforce.SalesforceConsumer;
import org.apache.camel.component.salesforce.SalesforceEndpoint;
import org.apache.camel.component.salesforce.SalesforceEndpointConfig;
import org.apache.camel.component.salesforce.SalesforceHttpClient;
import org.apache.camel.component.salesforce.api.SalesforceException;
import org.apache.camel.component.salesforce.internal.SalesforceSession;
import org.apache.camel.support.service.ServiceSupport;
import org.cometd.bayeux.Message;
import org.cometd.bayeux.client.ClientSessionChannel;
import org.cometd.client.BayeuxClient;
import org.cometd.client.BayeuxClient.State;
import org.cometd.client.transport.ClientTransport;
import org.cometd.client.transport.LongPollingTransport;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.cometd.bayeux.Channel.META_CONNECT;
import static org.cometd.bayeux.Channel.META_DISCONNECT;
import static org.cometd.bayeux.Channel.META_HANDSHAKE;
import static org.cometd.bayeux.Channel.META_SUBSCRIBE;
import static org.cometd.bayeux.Channel.META_UNSUBSCRIBE;
import static org.cometd.bayeux.Message.ERROR_FIELD;
import static org.cometd.bayeux.Message.SUBSCRIPTION_FIELD;
public class SubscriptionHelper extends ServiceSupport {
static final ReplayExtension REPLAY_EXTENSION = new ReplayExtension();
private static final Logger LOG = LoggerFactory.getLogger(SubscriptionHelper.class);
private static final int CONNECT_TIMEOUT = 110;
private static final int CHANNEL_TIMEOUT = 40;
private static final String FAILURE_FIELD = "failure";
private static final String EXCEPTION_FIELD = "exception";
private static final String SFDC_FIELD = "sfdc";
private static final String FAILURE_REASON_FIELD = "failureReason";
private static final int DISCONNECT_INTERVAL = 5000;
private static final String SERVER_TOO_BUSY_ERROR = "503::";
BayeuxClient client;
private final SalesforceComponent component;
private final SalesforceSession session;
private final long timeout = 60 * 1000L;
private final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> listenerMap;
private final long maxBackoff;
private final long backoffIncrement;
private ClientSessionChannel.MessageListener handshakeListener;
private ClientSessionChannel.MessageListener connectListener;
private ClientSessionChannel.MessageListener disconnectListener;
private volatile String handshakeError;
private volatile Exception handshakeException;
private volatile String connectError;
private volatile Exception connectException;
private volatile boolean reconnecting;
private final AtomicLong restartBackoff;
public SubscriptionHelper(final SalesforceComponent component) throws SalesforceException {
this.component = component;
this.session = component.getSession();
this.listenerMap = new ConcurrentHashMap<>();
restartBackoff = new AtomicLong(0);
backoffIncrement = component.getConfig().getBackoffIncrement();
maxBackoff = component.getConfig().getMaxBackoff();
}
@Override
protected void doStart() throws Exception {
// create CometD client
this.client = createClient(component);
// reset all error conditions
handshakeError = null;
handshakeException = null;
connectError = null;
connectException = null;
// listener for handshake error or exception
if (handshakeListener == null) {
// first start
handshakeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_HANDSHAKE]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Handshake failure: {}", message);
handshakeError = (String)message.get(ERROR_FIELD);
handshakeException = getFailure(message);
if (handshakeError != null) {
// refresh oauth token, if it's a 401 error
if (handshakeError.startsWith("401::")) {
try {
LOG.info("Refreshing OAuth token...");
session.login(session.getAccessToken());
LOG.info("Refreshed OAuth token for re-handshake");
} catch (SalesforceException e) {
LOG.warn("Error renewing OAuth token on 401 error: " + e.getMessage(), e);
}
}
if (handshakeError.startsWith("403::")) {
try {
LOG.info("Cleaning session (logout) from SalesforceSession before restarting client");
session.logout();
} catch (SalesforceException e) {
LOG.warn("Error while cleaning session: " + e.getMessage(), e);
}
}
}
// restart if handshake fails for any reason
restartClient();
} else if (!listenerMap.isEmpty()) {
reconnecting = true;
}
}
};
}
client.getChannel(META_HANDSHAKE).addListener(handshakeListener);
// listener for connect error
if (connectListener == null) {
connectListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_CONNECT]: {}", message);
if (!message.isSuccessful()) {
LOG.warn("Connect failure: {}", message);
connectError = (String)message.get(ERROR_FIELD);
connectException = getFailure(message);
} else if (reconnecting) {
reconnecting = false;
LOG.debug("Refreshing subscriptions to {} channels on reconnect", listenerMap.size());
// reconnected to Salesforce, subscribe to existing
// channels
final Map<SalesforceConsumer, ClientSessionChannel.MessageListener> map = new HashMap<>();
map.putAll(listenerMap);
listenerMap.clear();
for (Map.Entry<SalesforceConsumer, ClientSessionChannel.MessageListener> entry : map.entrySet()) {
final SalesforceConsumer consumer = entry.getKey();
final String topicName = consumer.getTopicName();
subscribe(topicName, consumer);
}
}
}
};
}
client.getChannel(META_CONNECT).addListener(connectListener);
// handle fatal disconnects by reconnecting asynchronously
if (disconnectListener == null) {
disconnectListener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel clientSessionChannel, Message message) {
restartClient();
}
};
}
client.getChannel(META_DISCONNECT).addListener(disconnectListener);
// connect to Salesforce cometd endpoint
client.handshake();
final long waitMs = MILLISECONDS.convert(CONNECT_TIMEOUT, SECONDS);
if (!client.waitFor(waitMs, BayeuxClient.State.CONNECTED)) {
if (handshakeException != null) {
throw new CamelException(String.format("Exception during HANDSHAKE: %s", handshakeException.getMessage()), handshakeException);
} else if (handshakeError != null) {
throw new CamelException(String.format("Error during HANDSHAKE: %s", handshakeError));
} else if (connectException != null) {
throw new CamelException(String.format("Exception during CONNECT: %s", connectException.getMessage()), connectException);
} else if (connectError != null) {
throw new CamelException(String.format("Error during CONNECT: %s", connectError));
} else {
throw new CamelException(String.format("Handshake request timeout after %s seconds", CONNECT_TIMEOUT));
}
}
}
// launch an async task to restart
private void restartClient() {
// launch a new restart command
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
httpClient.getExecutor().execute(new Runnable() {
@Override
public void run() {
LOG.info("Restarting on unexpected disconnect from Salesforce...");
boolean abort = false;
// wait for disconnect
LOG.debug("Waiting to disconnect...");
while (!client.isDisconnected()) {
try {
Thread.sleep(DISCONNECT_INTERVAL);
} catch (InterruptedException e) {
LOG.error("Aborting restart on interrupt!");
abort = true;
}
}
if (!abort) {
// update restart attempt backoff
final long backoff = restartBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Restart aborted after exceeding {} msecs backoff", maxBackoff);
abort = true;
} else {
// pause before restart attempt
LOG.debug("Pausing for {} msecs before restart attempt", backoff);
try {
Thread.sleep(backoff);
} catch (InterruptedException e) {
LOG.error("Aborting restart on interrupt!");
abort = true;
}
}
if (!abort) {
Exception lastError = new SalesforceException("Unknown error", null);
try {
// reset client
doStop();
// register listeners and restart
doStart();
} catch (Exception e) {
LOG.error("Error restarting: " + e.getMessage(), e);
lastError = e;
}
if (client != null && client.isHandshook()) {
LOG.info("Successfully restarted!");
// reset backoff interval
restartBackoff.set(client.getBackoffIncrement());
} else {
LOG.error("Failed to restart after pausing for {} msecs", backoff);
if ((backoff + backoffIncrement) > maxBackoff) {
// notify all consumers
String abortMsg = "Aborting restart attempt due to: " + lastError.getMessage();
SalesforceException ex = new SalesforceException(abortMsg, lastError);
for (SalesforceConsumer consumer : listenerMap.keySet()) {
consumer.handleException(abortMsg, ex);
}
}
}
}
}
}
});
}
@SuppressWarnings("unchecked")
private static Exception getFailure(Message message) {
Exception exception = null;
if (message.get(EXCEPTION_FIELD) != null) {
exception = (Exception)message.get(EXCEPTION_FIELD);
} else if (message.get(FAILURE_FIELD) != null) {
exception = (Exception)((Map<String, Object>)message.get("failure")).get("exception");
} else {
String failureReason = getFailureReason(message);
if (failureReason != null) {
exception = new SalesforceException(failureReason, null);
}
}
return exception;
}
@Override
protected void doStop() throws Exception {
client.getChannel(META_DISCONNECT).removeListener(disconnectListener);
client.getChannel(META_CONNECT).removeListener(connectListener);
client.getChannel(META_HANDSHAKE).removeListener(handshakeListener);
client.disconnect();
boolean disconnected = client.waitFor(timeout, State.DISCONNECTED);
if (!disconnected) {
LOG.warn("Could not disconnect client connected to: {} after: {} msec.", getEndpointUrl(component), timeout);
client.abort();
}
client = null;
LOG.debug("Stopped the helper and destroyed the client");
}
static BayeuxClient createClient(final SalesforceComponent component) throws SalesforceException {
// use default Jetty client from SalesforceComponent, its shared by all
// consumers
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
Map<String, Object> options = new HashMap<>();
options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, httpClient.getTimeout());
if (component.getLongPollingTransportProperties() != null) {
options = component.getLongPollingTransportProperties();
}
final SalesforceSession session = component.getSession();
// check login access token
if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) {
session.login(null);
}
LongPollingTransport transport = new LongPollingTransport(options, httpClient) {
@Override
protected void customize(Request request) {
super.customize(request);
//accessToken might be null due to lazy login
String accessToken = session.getAccessToken();
if (accessToken == null) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
request.getHeaders().put(HttpHeader.AUTHORIZATION, "OAuth " + accessToken);
}
};
BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport);
// added eagerly to check for support during handshake
client.addExtension(REPLAY_EXTENSION);
return client;
}
public void subscribe(final String topicName, final SalesforceConsumer consumer) {
// create subscription for consumer
final String channelName = getChannelName(topicName);
setupReplay((SalesforceEndpoint)consumer.getEndpoint());
// channel message listener
LOG.info("Subscribing to channel {}...", channelName);
final ClientSessionChannel.MessageListener listener = new ClientSessionChannel.MessageListener() {
@Override
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("Received Message: {}", message);
// convert CometD message to Camel Message
consumer.processMessage(channel, message);
}
};
final ClientSessionChannel clientChannel = client.getChannel(channelName);
// listener for subscription
final ClientSessionChannel.MessageListener subscriptionListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_SUBSCRIBE]: {}", message);
final String subscribedChannelName = message.get(SUBSCRIPTION_FIELD).toString();
if (channelName.equals(subscribedChannelName)) {
if (!message.isSuccessful()) {
String error = (String)message.get(ERROR_FIELD);
if (error == null) {
error = "Missing error message";
}
Exception failure = getFailure(message);
String msg = String.format("Error subscribing to %s: %s", topicName, failure != null ? failure.getMessage() : error);
boolean abort = true;
if (isTemporaryError(message)) {
LOG.warn(msg);
// retry after delay
final long backoff = restartBackoff.getAndAdd(backoffIncrement);
if (backoff > maxBackoff) {
LOG.error("Subscribe aborted after exceeding {} msecs backoff", maxBackoff);
} else {
abort = false;
try {
LOG.debug("Pausing for {} msecs before subscribe attempt", backoff);
Thread.sleep(backoff);
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
httpClient.getExecutor().execute(new Runnable() {
@Override
public void run() {
subscribe(topicName, consumer);
}
});
} catch (InterruptedException e) {
LOG.warn("Aborting subscribe on interrupt!", e);
}
}
}
if (abort && client != null) {
consumer.handleException(msg, new SalesforceException(msg, failure));
}
} else {
// remember subscription
LOG.info("Subscribed to channel {}", subscribedChannelName);
listenerMap.put(consumer, listener);
// reset backoff interval
restartBackoff.set(0);
}
// remove this subscription listener
if (client != null) {
client.getChannel(META_SUBSCRIBE).removeListener(this);
} else {
LOG.warn("Trying to handle a subscription message but the client is already destroyed");
}
}
}
};
client.getChannel(META_SUBSCRIBE).addListener(subscriptionListener);
// subscribe asynchronously
clientChannel.subscribe(listener);
}
private static boolean isTemporaryError(Message message) {
String failureReason = getFailureReason(message);
return failureReason != null && failureReason.startsWith(SERVER_TOO_BUSY_ERROR);
}
private static String getFailureReason(Message message) {
String failureReason = null;
if (message.getExt() != null) {
Map<String, Object> sfdcFields = (Map<String, Object>) message.getExt().get(SFDC_FIELD);
if (sfdcFields != null) {
failureReason = (String) sfdcFields.get(FAILURE_REASON_FIELD);
}
}
return failureReason;
}
void setupReplay(final SalesforceEndpoint endpoint) {
final String topicName = endpoint.getTopicName();
final Optional<Long> replayId = determineReplayIdFor(endpoint, topicName);
if (replayId.isPresent()) {
final String channelName = getChannelName(topicName);
final Long replayIdValue = replayId.get();
LOG.info("Set Replay extension to replay from `{}` for channel `{}`", replayIdValue, channelName);
REPLAY_EXTENSION.addChannelReplayId(channelName, replayIdValue);
}
}
static Optional<Long> determineReplayIdFor(final SalesforceEndpoint endpoint, final String topicName) {
final String channelName = getChannelName(topicName);
final Long replayId = endpoint.getReplayId();
final SalesforceComponent component = endpoint.getComponent();
final SalesforceEndpointConfig endpointConfiguration = endpoint.getConfiguration();
final Map<String, Long> endpointInitialReplayIdMap = endpointConfiguration.getInitialReplayIdMap();
final Long endpointReplayId = endpointInitialReplayIdMap.getOrDefault(topicName, endpointInitialReplayIdMap.get(channelName));
final Long endpointDefaultReplayId = endpointConfiguration.getDefaultReplayId();
final SalesforceEndpointConfig componentConfiguration = component.getConfig();
final Map<String, Long> componentInitialReplayIdMap = componentConfiguration.getInitialReplayIdMap();
final Long componentReplayId = componentInitialReplayIdMap.getOrDefault(topicName, componentInitialReplayIdMap.get(channelName));
final Long componentDefaultReplayId = componentConfiguration.getDefaultReplayId();
// the endpoint values have priority over component values, and the
// default values posteriority
// over give topic values
return Stream.of(replayId, endpointReplayId, componentReplayId, endpointDefaultReplayId, componentDefaultReplayId).filter(Objects::nonNull).findFirst();
}
static String getChannelName(final String topicName) {
final StringBuilder channelName = new StringBuilder();
if (topicName.charAt(0) != '/') {
channelName.append('/');
}
if (topicName.indexOf('/', 1) > 0) {
channelName.append(topicName);
} else {
channelName.append("topic/");
channelName.append(topicName);
}
final int typeIdx = channelName.indexOf("/", 1);
if ("event".equals(channelName.substring(1, typeIdx)) && !topicName.endsWith("__e")) {
channelName.append("__e");
}
return channelName.toString();
}
public void unsubscribe(String topicName, SalesforceConsumer consumer) throws CamelException {
// channel name
final String channelName = getChannelName(topicName);
// listen for unsubscribe error
final CountDownLatch latch = new CountDownLatch(1);
final String[] unsubscribeError = {null};
final Exception[] unsubscribeFailure = {null};
final ClientSessionChannel.MessageListener unsubscribeListener = new ClientSessionChannel.MessageListener() {
public void onMessage(ClientSessionChannel channel, Message message) {
LOG.debug("[CHANNEL:META_UNSUBSCRIBE]: {}", message);
Object subscription = message.get(SUBSCRIPTION_FIELD);
if (subscription != null) {
String unsubscribedChannelName = subscription.toString();
if (channelName.equals(unsubscribedChannelName)) {
if (!message.isSuccessful()) {
unsubscribeError[0] = (String)message.get(ERROR_FIELD);
unsubscribeFailure[0] = getFailure(message);
} else {
// forget subscription
LOG.info("Unsubscribed from channel {}", unsubscribedChannelName);
}
latch.countDown();
}
}
}
};
client.getChannel(META_UNSUBSCRIBE).addListener(unsubscribeListener);
try {
// unsubscribe from channel
final ClientSessionChannel.MessageListener listener = listenerMap.remove(consumer);
if (listener != null) {
LOG.info("Unsubscribing from channel {}...", channelName);
final ClientSessionChannel clientChannel = client.getChannel(channelName);
clientChannel.unsubscribe(listener);
// confirm unsubscribe
try {
if (!latch.await(CHANNEL_TIMEOUT, SECONDS)) {
String message;
if (unsubscribeFailure[0] != null) {
message = String.format("Error unsubscribing from topic %s: %s", topicName, unsubscribeFailure[0].getMessage());
} else if (unsubscribeError[0] != null) {
message = String.format("Error unsubscribing from topic %s: %s", topicName, unsubscribeError[0]);
} else {
message = String.format("Timeout error unsubscribing from topic %s after %s seconds", topicName, CHANNEL_TIMEOUT);
}
throw new CamelException(message, unsubscribeFailure[0]);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
// probably shutting down, forget unsubscribe and return
}
}
} finally {
client.getChannel(META_UNSUBSCRIBE).removeListener(unsubscribeListener);
}
}
static String getEndpointUrl(final SalesforceComponent component) {
// In version 36.0 replay is only enabled on a separate endpoint
if (Double.valueOf(component.getConfig().getApiVersion()) == 36.0) {
boolean replayOptionsPresent = component.getConfig().getDefaultReplayId() != null || !component.getConfig().getInitialReplayIdMap().isEmpty();
if (replayOptionsPresent) {
return component.getSession().getInstanceUrl() + "/cometd/replay/" + component.getConfig().getApiVersion();
}
}
return component.getSession().getInstanceUrl() + "/cometd/" + component.getConfig().getApiVersion();
}
}
|
CAMEL-15338 salesforce Standard Platform Events support (#4050)
removed hardcoded suffix for salesforce Platform Event Channel.
API names of standard platform events, such as AssetTokenEvent, don’t include a suffix. See https://developer.salesforce.com/docs/atlas.en-us.platform_events.meta/platform_events/platform_events_define_ui.htm
Hardcoding the suffix in channel name results in subscription failure for standard salesforce events.
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/streaming/SubscriptionHelper.java
|
CAMEL-15338 salesforce Standard Platform Events support (#4050)
|
|
Java
|
apache-2.0
|
470bfcec12517aa19d96f4a4c5cd806805c0ef3b
| 0
|
Akeshihiro/dsworkbench,Torridity/dsworkbench,extremeCrazyCoder/dsworkbench,extremeCrazyCoder/dsworkbench,Akeshihiro/dsworkbench,Torridity/dsworkbench,extremeCrazyCoder/dsworkbench,Torridity/dsworkbench,Akeshihiro/dsworkbench,Torridity/dsworkbench,Akeshihiro/dsworkbench,extremeCrazyCoder/dsworkbench
|
/*
* Copyright 2015 Torridity.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tor.tribes.util.village;
import de.tor.tribes.control.ManageableType;
import de.tor.tribes.io.DataHolder;
import de.tor.tribes.types.FightReport;
import de.tor.tribes.types.ext.Village;
import de.tor.tribes.util.BuildingSettings;
import de.tor.tribes.util.ServerSettings;
import java.awt.Color;
import java.util.Arrays;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jdom2.Element;
/**
* @author extremeCrazyCoder
*
* Class to hold extra information of a Village like
* spy information
* watchtower / church Range
*/
public class KnownVillage extends ManageableType {
private static Logger logger = LogManager.getLogger("KnownVillage");
private int[] buildings;
private Village village;
private long lastUpdate;
public KnownVillage(Village pVillage) {
buildings = new int[BuildingSettings.BUILDING_NAMES.length];
Arrays.fill(buildings, -1);
this.village = pVillage;
updateTime();
}
public KnownVillage(Element e) {
buildings = new int[BuildingSettings.BUILDING_NAMES.length];
Arrays.fill(buildings, -1);
loadFromXml(e);
}
@Override
public final void loadFromXml(Element pElement) {
this.village = DataHolder.getSingleton().getVillagesById().get(Integer.parseInt(pElement.getChild("id").getText()));
this.lastUpdate = Long.parseLong(pElement.getChild("update").getText());
Element buildingElm = pElement.getChild("buildings");
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
String val = buildingElm.getAttributeValue(BuildingSettings.BUILDING_NAMES[i]);
if(val != null) {
try {
this.buildings[i] = Integer.parseInt(val);
} catch(NumberFormatException e) {
this.buildings[i] = -1;
logger.debug("unable to decode property: {} with {}", BuildingSettings.BUILDING_NAMES[i], val, e);
}
}
else
logger.debug("property null: {}", BuildingSettings.BUILDING_NAMES[i]);
}
}
@Override
public Element toXml(String elementName) {
Element kVillage = new Element(elementName);
try {
kVillage.addContent(new Element("id").setText(Integer.toString(village.getId())));
kVillage.addContent(new Element("update").setText(Long.toString(lastUpdate)));
Element buildingsE = new Element("buildings");
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
buildingsE.setAttribute(BuildingSettings.BUILDING_NAMES[i], Integer.toString(buildings[i]));
}
kVillage.addContent(buildingsE);
} catch (Exception e) {
return null;
}
return kVillage;
}
public Village getVillage() {
return village;
}
public void updateInformation(KnownVillage other) {
if(lastUpdate > other.getLastUpdate()) {
//This is newer.... Just get Information that has not been discovered here
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
if(buildings[i] == -1) {
buildings[i] = other.getBuildingLevelByName(BuildingSettings.BUILDING_NAMES[i]);
}
}
}
else {
//Other is newer ... Copy everything that is valid
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
int level = other.getBuildingLevelByName(BuildingSettings.BUILDING_NAMES[i]);
if(level != -1) {
buildings[i] = level;
}
}
lastUpdate = other.getLastUpdate();
}
}
/**
* @return the range
*/
public int getChurchRange() {
int level = getBuildingLevelByName("church");
if(level == -1) return -1;
return BuildingSettings.CHURCH_RANGE[level];
}
/**
* @return the range
*/
public double getWatchtowerRange() {
int level = getBuildingLevelByName("watchtower");
if(level == -1) return -1;
return BuildingSettings.WATCHTOWER_RANGE[level];
}
/**
* @return the rangeColor
*/
public Color getRangeColor() {
return village.getTribe().getMarkerColor();
}
public boolean hasChurch() {
return getBuildingLevelByName("church") > 0;
}
public boolean hasWatchtower() {
return getBuildingLevelByName("watchtower") > 0;
}
public void setChurchLevel(int pLevel) {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to set Church level " + pLevel + "on server without");
return;
}
setBuildingLevelByName("church", pLevel);
updateTime();
}
public void removeChurchInfo() {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to remove Church on server without");
return;
}
setBuildingLevelByName("church", -1);
updateTime();
}
public void setWatchtowerLevel(int pLevel) {
if(!ServerSettings.getSingleton().isWatchtower()) {
logger.info("Tried to set Watchtower level " + pLevel + "on server without");
return;
}
setBuildingLevelByName("watchtower", pLevel);
updateTime();
}
public void removeWatchtowerInfo() {
if(!ServerSettings.getSingleton().isWatchtower()) {
logger.info("Tried to remove Watchtower on server without");
return;
}
setBuildingLevelByName("watchtower", -1);
updateTime();
}
public long getLastUpdate() {
return lastUpdate;
}
private void updateTime() {
lastUpdate = System.currentTimeMillis() / 1000L;
}
/**
* This Function returns the Level of a given Building
* will return -1 if there is no Informaion about that Building stored
* will return -2 if the building has no been found (logged as info)
* @param building: Name of the Building
* @return: The Level
*/
public int getBuildingLevelByName(String pBuilding) {
int id = BuildingSettings.getBuildingIdByName(pBuilding);
if(id == -2) {
logger.info("Building " + pBuilding + " not found");
return -2;
}
return buildings[id];
}
public void setBuildingLevelByName(String pBuilding, int pLevel) {
int id = BuildingSettings.getBuildingIdByName(pBuilding);
if(id == -2) {
logger.info("Building " + pBuilding + " not found");
return;
}
if(!BuildingSettings.isBuildingLevelValid(pBuilding, pLevel)) {
logger.error("Building level invalid " + pBuilding + ": " + pLevel);
return;
}
buildings[id] = pLevel;
}
public void setBuildingLevelById(int pBuildingId, int pLevel) {
if(!BuildingSettings.isBuildingLevelValid(pBuildingId, pLevel)) {
logger.error("Building level invalid " + pBuildingId + ": " + pLevel);
return;
}
buildings[pBuildingId] = pLevel;
}
@Override
public String toString() {
return village.getFullName();
}
void updateInformation(FightReport pReport) {
if (pReport.getSpyLevel() >= pReport.SPY_LEVEL_BUILDINGS) {
for(int i = 0; i < buildings.length; i++) {
if(pReport.getBuilding(i) != -1) {
//Building was spyed
if(BuildingSettings.getMaxBuildingLevel(BuildingSettings.BUILDING_NAMES[i]) > 0) {
//Building can be build
setBuildingLevelById(i, pReport.getBuilding(i));
updateTime();
}
}
}
} else if (pReport.getWallAfter() != -1) {
// set wall destruction (works also without spying)
setBuildingLevelByName("wall", pReport.getWallAfter());
}
}
public int getFarmSpace() {
if(getBuildingLevelByName("farm") < 0) return -1; //building not yet read
int maxFarmSpace = BuildingSettings.getMaxFarmSpace(getBuildingLevelByName("farm"));
int buildingPop = 0;
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
logger.trace("Building Farm {} / {} / {}", i, buildings[i], BuildingSettings.getPopUsageById(i, buildings[i]));
buildingPop += BuildingSettings.getPopUsageById(i, buildings[i]);
}
logger.debug("Getting Farm Space {} / {} / {}", village.getCoordAsString(), maxFarmSpace, buildingPop);
return maxFarmSpace - buildingPop;
}
}
|
Core/src/main/java/de/tor/tribes/util/village/KnownVillage.java
|
/*
* Copyright 2015 Torridity.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tor.tribes.util.village;
import de.tor.tribes.control.ManageableType;
import de.tor.tribes.io.DataHolder;
import de.tor.tribes.types.FightReport;
import de.tor.tribes.types.ext.Village;
import de.tor.tribes.util.BuildingSettings;
import de.tor.tribes.util.ServerSettings;
import java.awt.Color;
import java.util.Arrays;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jdom2.Element;
/**
* @author extremeCrazyCoder
*
* Class to hold extra information of a Village like
* spy information
* watchtower / church Range
*/
public class KnownVillage extends ManageableType {
private static Logger logger = LogManager.getLogger("KnownVillage");
private int[] buildings;
private Village village;
private long lastUpdate;
public KnownVillage(Village pVillage) {
buildings = new int[BuildingSettings.BUILDING_NAMES.length];
Arrays.fill(buildings, -1);
this.village = pVillage;
updateTime();
}
public KnownVillage(Element e) {
buildings = new int[BuildingSettings.BUILDING_NAMES.length];
Arrays.fill(buildings, -1);
loadFromXml(e);
}
@Override
public final void loadFromXml(Element pElement) {
this.village = DataHolder.getSingleton().getVillagesById().get(Integer.parseInt(pElement.getChild("id").getText()));
this.lastUpdate = Long.parseLong(pElement.getChild("update").getText());
Element buildingElm = pElement.getChild("buildings");
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
String val = buildingElm.getAttributeValue(BuildingSettings.BUILDING_NAMES[i]);
if(val != null) {
try {
this.buildings[i] = Integer.parseInt(val);
} catch(NumberFormatException e) {
this.buildings[i] = -1;
logger.debug("unable to decode property: {} with {}", BuildingSettings.BUILDING_NAMES[i], val, e);
}
}
else
logger.debug("property null: {}", BuildingSettings.BUILDING_NAMES[i]);
}
}
@Override
public Element toXml(String elementName) {
Element kVillage = new Element(elementName);
try {
kVillage.addContent(new Element("id").setText(Integer.toString(village.getId())));
kVillage.addContent(new Element("update").setText(Long.toString(lastUpdate)));
Element buildingsE = new Element("buildings");
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
buildingsE.setAttribute(BuildingSettings.BUILDING_NAMES[i], Integer.toString(buildings[i]));
}
kVillage.addContent(buildingsE);
} catch (Exception e) {
return null;
}
return kVillage;
}
public Village getVillage() {
return village;
}
public void updateInformation(KnownVillage other) {
if(lastUpdate > other.getLastUpdate()) {
//This is newer.... Just get Information that has not been discovered here
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
if(buildings[i] == -1) {
buildings[i] = other.getBuildingLevelByName(BuildingSettings.BUILDING_NAMES[i]);
}
}
}
else {
//Other is newer ... Copy everything that is valid
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
int level = other.getBuildingLevelByName(BuildingSettings.BUILDING_NAMES[i]);
if(level != -1) {
buildings[i] = level;
}
}
lastUpdate = other.getLastUpdate();
}
}
/**
* @return the range
*/
public int getChurchRange() {
int level = getBuildingLevelByName("church");
if(level == -1) return -1;
return BuildingSettings.CHURCH_RANGE[level];
}
/**
* @return the range
*/
public double getWatchtowerRange() {
int level = getBuildingLevelByName("watchtower");
if(level == -1) return -1;
return BuildingSettings.WATCHTOWER_RANGE[level];
}
/**
* @return the rangeColor
*/
public Color getRangeColor() {
return village.getTribe().getMarkerColor();
}
public boolean hasChurch() {
return getBuildingLevelByName("church") > 0;
}
public boolean hasWatchtower() {
return getBuildingLevelByName("watchtower") > 0;
}
public void setChurchLevel(int pLevel) {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to set Church level " + pLevel + "on server without");
return;
}
setBuildingLevelByName("church", pLevel);
updateTime();
}
public void removeChurchInfo() {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to remove Church on server without");
return;
}
setBuildingLevelByName("church", -1);
updateTime();
}
public void setWatchtowerLevel(int pLevel) {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to set Watchtower level " + pLevel + "on server without");
return;
}
setBuildingLevelByName("watchtower", pLevel);
updateTime();
}
public void removeWatchtowerInfo() {
if(!ServerSettings.getSingleton().isChurch()) {
logger.info("Tried to remove Watchtower on server without");
return;
}
setBuildingLevelByName("watchtower", -1);
updateTime();
}
public long getLastUpdate() {
return lastUpdate;
}
private void updateTime() {
lastUpdate = System.currentTimeMillis() / 1000L;
}
/**
* This Function returns the Level of a given Building
* will return -1 if there is no Informaion about that Building stored
* will return -2 if the building has no been found (logged as info)
* @param building: Name of the Building
* @return: The Level
*/
public int getBuildingLevelByName(String pBuilding) {
int id = BuildingSettings.getBuildingIdByName(pBuilding);
if(id == -2) {
logger.info("Building " + pBuilding + " not found");
return -2;
}
return buildings[id];
}
public void setBuildingLevelByName(String pBuilding, int pLevel) {
int id = BuildingSettings.getBuildingIdByName(pBuilding);
if(id == -2) {
logger.info("Building " + pBuilding + " not found");
return;
}
if(!BuildingSettings.isBuildingLevelValid(pBuilding, pLevel)) {
logger.error("Building level invalid " + pBuilding + ": " + pLevel);
return;
}
buildings[id] = pLevel;
}
public void setBuildingLevelById(int pBuildingId, int pLevel) {
if(!BuildingSettings.isBuildingLevelValid(pBuildingId, pLevel)) {
logger.error("Building level invalid " + pBuildingId + ": " + pLevel);
return;
}
buildings[pBuildingId] = pLevel;
}
@Override
public String toString() {
return village.getFullName();
}
void updateInformation(FightReport pReport) {
if (pReport.getSpyLevel() >= pReport.SPY_LEVEL_BUILDINGS) {
for(int i = 0; i < buildings.length; i++) {
if(pReport.getBuilding(i) != -1) {
//Building was spyed
if(BuildingSettings.getMaxBuildingLevel(BuildingSettings.BUILDING_NAMES[i]) > 0) {
//Building can be build
setBuildingLevelById(i, pReport.getBuilding(i));
updateTime();
}
}
}
} else if (pReport.getWallAfter() != -1) {
// set wall destruction (works also without spying)
setBuildingLevelByName("wall", pReport.getWallAfter());
}
}
public int getFarmSpace() {
if(getBuildingLevelByName("farm") < 0) return -1; //building not yet read
int maxFarmSpace = BuildingSettings.getMaxFarmSpace(getBuildingLevelByName("farm"));
int buildingPop = 0;
for(int i = 0; i < BuildingSettings.BUILDING_NAMES.length; i++) {
logger.trace("Building Farm {} / {} / {}", i, buildings[i], BuildingSettings.getPopUsageById(i, buildings[i]));
buildingPop += BuildingSettings.getPopUsageById(i, buildings[i]);
}
logger.debug("Getting Farm Space {} / {} / {}", village.getCoordAsString(), maxFarmSpace, buildingPop);
return maxFarmSpace - buildingPop;
}
}
|
Fix #67
|
Core/src/main/java/de/tor/tribes/util/village/KnownVillage.java
|
Fix #67
|
|
Java
|
apache-2.0
|
18e83f8962df379e005eca313a5d56f0808c4689
| 0
|
b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl
|
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.datastore.quicksearch;
import java.util.Map;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.core.quicksearch.FullQuickSearchElement;
import com.b2international.snowowl.core.quicksearch.QuickSearchContentResult;
import com.b2international.snowowl.core.terminology.ComponentCategory;
import com.b2international.snowowl.datastore.IBranchPathMap;
import com.b2international.snowowl.datastore.quicksearch.IQuickSearchContentProvider;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.SnomedPackage;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.datastore.SnomedTerminologyBrowser;
import com.b2international.snowowl.snomed.datastore.id.SnomedIdentifier;
import com.b2international.snowowl.snomed.datastore.id.SnomedIdentifiers;
import com.google.common.collect.ImmutableList;
/**
* @since 4.4
*/
public class MissingSnomedComponentQuickSearchContentProvider implements IQuickSearchContentProvider {
@Override
public QuickSearchContentResult getComponents(String queryExpression, IBranchPathMap branchPathMap, int limit, Map<String, Object> configuration) {
try {
SnomedIdentifier identifier = SnomedIdentifiers.of(queryExpression);
if (ComponentCategory.CONCEPT.equals(identifier.getComponentCategory())) {
final IBranchPath branch = branchPathMap.getBranchPath(SnomedPackage.eINSTANCE);
if (!ApplicationContext.getInstance().getServiceChecked(SnomedTerminologyBrowser.class).exists(branch, queryExpression)) {
return new QuickSearchContentResult(1, ImmutableList.of(new FullQuickSearchElement(queryExpression, Concepts.ROOT_CONCEPT, queryExpression, false, SnomedTerminologyComponentConstants.CONCEPT)));
}
}
} catch (IllegalArgumentException e) {
// ignore invalid SNOMED CT IDs and return empty result
}
return new QuickSearchContentResult();
}
}
|
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/quicksearch/MissingSnomedComponentQuickSearchContentProvider.java
|
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.datastore.quicksearch;
import java.util.Map;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.core.quicksearch.CompactQuickSearchElement;
import com.b2international.snowowl.core.quicksearch.QuickSearchContentResult;
import com.b2international.snowowl.datastore.IBranchPathMap;
import com.b2international.snowowl.datastore.quicksearch.IQuickSearchContentProvider;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.SnomedPackage;
import com.b2international.snowowl.snomed.datastore.SnomedTerminologyBrowser;
import com.b2international.snowowl.snomed.datastore.id.SnomedIdentifiers;
import com.google.common.collect.ImmutableList;
/**
* @since 4.4
*/
public class MissingSnomedComponentQuickSearchContentProvider implements IQuickSearchContentProvider {
@Override
public QuickSearchContentResult getComponents(String queryExpression, IBranchPathMap branchPathMap, int limit, Map<String, Object> configuration) {
try {
SnomedIdentifiers.validate(queryExpression);
final IBranchPath branch = branchPathMap.getBranchPath(SnomedPackage.eINSTANCE);
if (!ApplicationContext.getInstance().getServiceChecked(SnomedTerminologyBrowser.class).exists(branch, queryExpression)) {
return new QuickSearchContentResult(1, ImmutableList.of(new CompactQuickSearchElement(queryExpression, Concepts.ROOT_CONCEPT, queryExpression, false)));
}
} catch (IllegalArgumentException e) {
// ignore invalid SNOMED CT IDs and return empty result
}
return new QuickSearchContentResult();
}
}
|
[quicksearch] Fix validation of missing component IDs
|
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/quicksearch/MissingSnomedComponentQuickSearchContentProvider.java
|
[quicksearch] Fix validation of missing component IDs
|
|
Java
|
apache-2.0
|
78e79856af264d621659168abaace2c7b0eae1bf
| 0
|
jcshen007/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,cinderella/incubator-cloudstack,resmo/cloudstack,cinderella/incubator-cloudstack,wido/cloudstack,cinderella/incubator-cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,wido/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,resmo/cloudstack,wido/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,wido/cloudstack,wido/cloudstack,wido/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,argv0/cloudstack
|
/**
* Copyright (C) 2010 Cloud.com. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later
version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.capacity;
import java.util.List;
import org.apache.log4j.Logger;
import com.cloud.agent.Listener;
import com.cloud.agent.api.AgentControlAnswer;
import com.cloud.agent.api.AgentControlCommand;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupStorageCommand;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.capacity.dao.CapacityDaoImpl;
import com.cloud.exception.ConnectionException;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.storage.Storage;
import com.cloud.utils.db.SearchCriteria;
public class StorageCapacityListener implements Listener {
CapacityDao _capacityDao;
float _overProvisioningFactor = 1.0f;
public StorageCapacityListener(CapacityDao _capacityDao,
float _overProvisioningFactor) {
super();
this._capacityDao = _capacityDao;
this._overProvisioningFactor = _overProvisioningFactor;
}
@Override
public boolean processAnswers(long agentId, long seq, Answer[] answers) {
return false;
}
@Override
public boolean processCommands(long agentId, long seq, Command[] commands) {
return false;
}
@Override
public AgentControlAnswer processControlCommand(long agentId,
AgentControlCommand cmd) {
return null;
}
@Override
public void processConnect(HostVO server, StartupCommand startup, boolean forRebalance) throws ConnectionException {
if (!(startup instanceof StartupStorageCommand)) {
return;
}
SearchCriteria<CapacityVO> capacitySC = _capacityDao.createSearchCriteria();
capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId());
capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ,
server.getDataCenterId());
capacitySC.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId());
List<CapacityVO> capacities = _capacityDao.search(capacitySC, null);
StartupStorageCommand ssCmd = (StartupStorageCommand) startup;
if (ssCmd.getResourceType() == Storage.StorageResourceType.STORAGE_HOST) {
CapacityVO capacity = new CapacityVO(server.getId(),
server.getDataCenterId(), server.getPodId(), server.getClusterId(), 0L,
(long) (server.getTotalSize() * _overProvisioningFactor),
CapacityVO.CAPACITY_TYPE_STORAGE_ALLOCATED);
_capacityDao.persist(capacity);
}
}
@Override
public boolean processDisconnect(long agentId, Status state) {
return false;
}
@Override
public boolean isRecurring() {
return false;
}
@Override
public int getTimeout() {
return 0;
}
@Override
public boolean processTimeout(long agentId, long seq) {
return false;
}
}
|
server/src/com/cloud/capacity/StorageCapacityListener.java
|
/**
* Copyright (C) 2010 Cloud.com. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later
version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.capacity;
import java.util.List;
import com.cloud.agent.Listener;
import com.cloud.agent.api.AgentControlAnswer;
import com.cloud.agent.api.AgentControlCommand;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupStorageCommand;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.exception.ConnectionException;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.storage.Storage;
import com.cloud.utils.db.SearchCriteria;
public class StorageCapacityListener implements Listener {
CapacityDao _capacityDao;
float _overProvisioningFactor = 1.0f;
public StorageCapacityListener(CapacityDao _capacityDao,
float _overProvisioningFactor) {
super();
this._capacityDao = _capacityDao;
this._overProvisioningFactor = _overProvisioningFactor;
}
@Override
public boolean processAnswers(long agentId, long seq, Answer[] answers) {
return false;
}
@Override
public boolean processCommands(long agentId, long seq, Command[] commands) {
return false;
}
@Override
public AgentControlAnswer processControlCommand(long agentId,
AgentControlCommand cmd) {
return null;
}
@Override
public void processConnect(HostVO server, StartupCommand startup, boolean forRebalance) throws ConnectionException {
if (!(startup instanceof StartupStorageCommand)) {
return;
}
SearchCriteria<CapacityVO> capacitySC = _capacityDao.createSearchCriteria();
capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId());
capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ,
server.getDataCenterId());
capacitySC.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId());
List<CapacityVO> capacities = _capacityDao.search(capacitySC, null);
// remove old entries, we'll recalculate them anyway
if ((capacities != null) && !capacities.isEmpty()) {
for (CapacityVO capacity : capacities) {
_capacityDao.remove(capacity.getId());
}
}
StartupStorageCommand ssCmd = (StartupStorageCommand) startup;
if (ssCmd.getResourceType() == Storage.StorageResourceType.STORAGE_HOST) {
CapacityVO capacity = new CapacityVO(server.getId(),
server.getDataCenterId(), server.getPodId(), server.getClusterId(), 0L,
(long) (server.getTotalSize() * _overProvisioningFactor),
CapacityVO.CAPACITY_TYPE_STORAGE_ALLOCATED);
_capacityDao.persist(capacity);
}
}
@Override
public boolean processDisconnect(long agentId, Status state) {
return false;
}
@Override
public boolean isRecurring() {
return false;
}
@Override
public int getTimeout() {
return 0;
}
@Override
public boolean processTimeout(long agentId, long seq) {
return false;
}
}
|
Bug 13059: Removed the code which was deleting capacity entries on every MS restart.
Reviewed-By: Kishan
|
server/src/com/cloud/capacity/StorageCapacityListener.java
|
Bug 13059: Removed the code which was deleting capacity entries on every MS restart. Reviewed-By: Kishan
|
|
Java
|
bsd-3-clause
|
c9344649869df97a7c952481145f130a3b5d2ebd
| 0
|
sba1/bio-ontology-zp
|
package de.charite.zpgen;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* Provides a method to walkes a ZFIN file. Calls {@link ZFINVisitor#visit(ZFINEntry)} for each encountered entry.
*
* See http://zfin.org/downloads for a current format.
*
* @author Sebastian Bauer
* @author Sebastian Koehler
*/
public class ZFINWalker {
/**
* The current file format for pheno.txt as of: 8 Sep 2013<br>
* 0 Gene ID<br>
* 1 Entrez Zebrafish Gene ID<br>
* 2 Entrez Human Gene ID<br>
* 3 ZFIN Gene Symbol<br>
* 4 Affected Structure or Process 1 subterm OBO ID<br>
* 5 Affected Structure or Process 1 subterm name<br>
* 6 Post-Composed Relationship ID<br>
* 7 Post-Composed Relationship Name<br>
* 8 Affected Structure or Process 1 superterm OBO ID<br>
* 9 Affected Structure or Process 1 superterm name<br>
* 10 Phenotype Keyword OBO ID<br>
* 11 Phenotype Quality<br>
* 12 Phenotype Tag<br>
* 13 Affected Structure or Process 2 subterm OBO ID<br>
* 14 Affected Structure or Process 2 subterm name<br>
* 15 Post-Composed Relationship ID<br>
* 16 Post-Composed Relationship Name<br>
* 17 Affected Structure or Process 2 superterm OBO ID<br>
* 18 Affected Structure or Process 2 superterm name<br>
*/
/**
* The current file format for phenotype.txt as of: 2015<br>
* 0 Genotype ID <br>
* 1 Genotype Name <br>
* 2 Start Stage ID <br>
* 3 Start Stage Name<br>
* 4 End Stage ID<br>
* 5 End Stage Name <br>
* 6 Affected Structure or Process 1 subterm ID <br>
* 7 Affected Structure or Process 1 subterm Name <br>
* 8 Post-composed Relationship ID <br>
* 9 Post-composed Relationship Name <br>
* 10 Affected Structure or Process 1 superterm ID <br>
* 11 Affected Structure or Process 1 superterm Name <br>
* 12 Phenotype Keyword ID <br>
* 13 Phenotype Keyword Name <br>
* 14 Phenotype Tag <br>
* 15 Affected Structure or Process 2 subterm ID <br>
* 16 Affected Structure or Process 2 subterm name <br>
* 17 Post-composed Relationship (rel) ID <br>
* 18 Post-composed Relationship (rel) Name <br>
* 19 Affected Structure or Process 2 superterm ID <br>
* 20 Affected Structure or Process 2 superterm name <br>
* 21 Publication ID <br>
* 22 Environment ID<br>
*/
private ZFINWalker() {
};
static int COLUMN_ZFIN_GENE_ID = 0;
static int COLUMN_TERM1_SUBTERM_ID = 4;
static int COLUMN_TERM1_SUBTERM_NAME = 5;
static int COLUMN_TERM1_SUPERTERM_ID = 8;
static int COLUMN_TERM1_SUPERTERM_NAME = 9;
static int COLUMN_TERM2_SUBTERM_ID = 13;
static int COLUMN_TERM2_SUBTERM_NAME = 14;
static int COLUMN_TERM2_SUPERTERM_ID = 17;
static int COLUMN_TERM2_SUPERTERM_NAME = 18;
static int COLUMN_PATO_ID = 10;
static int COLUMN_PATO_NAME = 11;
static int COLUMN_PATO_MODIFIER = 12;
static public void walk(InputStream input, ZFINVisitor visitor) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(input));
String line;
while ((line = in.readLine()) != null) {
try {
ZFINEntry entry = new ZFINEntry();
String[] sp = null;
if (line.contains("|"))
sp = line.split("\\|", -1);
else
sp = line.split("\t", -1);
entry.geneZfinID = sp[COLUMN_ZFIN_GENE_ID];
entry.entity1SupertermId = sp[COLUMN_TERM1_SUPERTERM_ID];
entry.entity1SupertermName = sp[COLUMN_TERM1_SUPERTERM_NAME];
entry.entity1SubtermId = sp[COLUMN_TERM1_SUBTERM_ID];
entry.entity1SubtermName = sp[COLUMN_TERM1_SUBTERM_NAME];
entry.entity2SupertermId = sp[COLUMN_TERM2_SUPERTERM_ID];
entry.entity2SupertermName = sp[COLUMN_TERM2_SUPERTERM_NAME];
entry.entity2SubtermId = sp[COLUMN_TERM2_SUBTERM_ID];
entry.entity2SubtermName = sp[COLUMN_TERM2_SUBTERM_NAME];
entry.patoID = sp[COLUMN_PATO_ID];
entry.patoName = sp[COLUMN_PATO_NAME];
entry.isAbnormal = sp[COLUMN_PATO_MODIFIER].equalsIgnoreCase("abnormal");
visitor.visit(entry);
} catch (Exception e) {
System.out.println("Problem in line: " + line);
e.printStackTrace();
System.exit(1);
}
}
}
}
|
zpgen/src/main/java/de/charite/zpgen/ZFINWalker.java
|
package de.charite.zpgen;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* Provides a method to walkes a ZFIN file.
* Calls {@link ZFINVisitor#visit(ZFINEntry)} for each encountered entry.
*
* See http://zfin.org/downloads for a current format.
*
* @author Sebastian Bauer
* @author Sebastian Koehler
*/
public class ZFINWalker
{
private ZFINWalker() {};
/*
* The current file format as of: 8 Sep 2013
* 0 Gene ID
* 1 Entrez Zebrafish Gene ID
* 2 Entrez Human Gene ID
* 3 ZFIN Gene Symbol
* 4 Affected Structure or Process 1 subterm OBO ID
* 5 Affected Structure or Process 1 subterm name
* 6 Post-Composed Relationship ID
* 7 Post-Composed Relationship Name
* 8 Affected Structure or Process 1 superterm OBO ID
* 9 Affected Structure or Process 1 superterm name
* 10 Phenotype Keyword OBO ID
* 11 Phenotype Quality
* 12 Phenotype Tag
* 13 Affected Structure or Process 2 subterm OBO ID
* 14 Affected Structure or Process 2 subterm name
* 15 Post-Composed Relationship ID
* 16 Post-Composed Relationship Name
* 17 Affected Structure or Process 2 superterm OBO ID
* 18 Affected Structure or Process 2 superterm name
*/
static int COLUMN_ZFIN_GENE_ID = 0;
static int COLUMN_TERM1_SUBTERM_ID = 4;
static int COLUMN_TERM1_SUBTERM_NAME = 5;
static int COLUMN_TERM1_SUPERTERM_ID = 8;
static int COLUMN_TERM1_SUPERTERM_NAME = 9;
static int COLUMN_TERM2_SUBTERM_ID = 13;
static int COLUMN_TERM2_SUBTERM_NAME = 14;
static int COLUMN_TERM2_SUPERTERM_ID = 17;
static int COLUMN_TERM2_SUPERTERM_NAME = 18;
static int COLUMN_PATO_ID = 10;
static int COLUMN_PATO_NAME = 11;
static int COLUMN_PATO_MODIFIER = 12;
static public void walk(InputStream input, ZFINVisitor visitor) throws IOException
{
BufferedReader in = new BufferedReader(new InputStreamReader(input));
String line;
while ((line = in.readLine()) != null)
{
try{
ZFINEntry entry = new ZFINEntry();
String [] sp = null;
if (line.contains("|"))
sp = line.split("\\|",-1);
else
sp = line.split("\t",-1);
entry.geneZfinID = sp[COLUMN_ZFIN_GENE_ID];
entry.entity1SupertermId = sp[COLUMN_TERM1_SUPERTERM_ID];
entry.entity1SupertermName = sp[COLUMN_TERM1_SUPERTERM_NAME];
entry.entity1SubtermId = sp[COLUMN_TERM1_SUBTERM_ID];
entry.entity1SubtermName = sp[COLUMN_TERM1_SUBTERM_NAME];
entry.entity2SupertermId = sp[COLUMN_TERM2_SUPERTERM_ID];
entry.entity2SupertermName = sp[COLUMN_TERM2_SUPERTERM_NAME];
entry.entity2SubtermId = sp[COLUMN_TERM2_SUBTERM_ID];
entry.entity2SubtermName = sp[COLUMN_TERM2_SUBTERM_NAME];
entry.patoID = sp[COLUMN_PATO_ID];
entry.patoName = sp[COLUMN_PATO_NAME];
entry.isAbnormal = sp[COLUMN_PATO_MODIFIER].equalsIgnoreCase("abnormal");
visitor.visit(entry);
}
catch (Exception e) {
System.out.println("Problem in line: "+line);
e.printStackTrace();
System.exit(1);
}
}
}
}
|
preparation for paring additional file phenotype.txt instead of
pheno.txt
|
zpgen/src/main/java/de/charite/zpgen/ZFINWalker.java
|
preparation for paring additional file phenotype.txt instead of pheno.txt
|
|
Java
|
bsd-3-clause
|
01b333d841a872e4cb57696b7a5c83885887de51
| 0
|
agmip/quadui
|
package org.agmip.ui.quadui;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Scanner;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.agmip.dome.DomeUtil;
import org.agmip.dome.Engine;
import org.agmip.translators.csv.DomeInput;
import org.agmip.util.MapUtil;
import org.apache.pivot.util.concurrent.Task;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplyDomeTask extends Task<HashMap> {
private static Logger log = LoggerFactory.getLogger(ApplyDomeTask.class);
private HashMap<String, HashMap<String, Object>> ovlDomes = new HashMap<String, HashMap<String, Object>>();
private HashMap<String, HashMap<String, Object>> stgDomes = new HashMap<String, HashMap<String, Object>>();
private HashMap<String, Object> linkDomes = new HashMap<String, Object>();
private HashMap<String, String> ovlLinks = new HashMap<String, String>();
private HashMap<String, String> stgLinks = new HashMap<String, String>();
private LinkedHashMap<String, String> orgOvlLinks = new LinkedHashMap<String, String>();
private LinkedHashMap<String, String> orgStgLinks = new LinkedHashMap<String, String>();
// private HashMap<String, ArrayList<String>> wthLinks = new HashMap<String, ArrayList<String>>();
// private HashMap<String, ArrayList<String>> soilLinks = new HashMap<String, ArrayList<String>>();
private HashMap source;
private String mode;
private boolean autoApply;
public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply) {
this.source = m;
this.mode = mode;
this.autoApply = autoApply;
// Setup the domes here.
loadDomeLinkFile(linkFile);
log.debug("link csv: {}", ovlLinks);
if (mode.equals("strategy")) {
loadDomeFile(strategyFile, stgDomes);
}
loadDomeFile(fieldFile, ovlDomes);
}
private void loadDomeLinkFile(String fileName) {
String fileNameTest = fileName.toUpperCase();
log.debug("Loading LINK file: {}", fileName);
linkDomes = null;
try {
// if (fileNameTest.endsWith(".ZIP")) {
// log.debug("Entering Zip file handling");
// ZipFile z = null;
// try {
// z = new ZipFile(fileName);
// Enumeration entries = z.entries();
// while (entries.hasMoreElements()) {
// // Do we handle nested zips? Not yet.
// ZipEntry entry = (ZipEntry) entries.nextElement();
// File zipFileName = new File(entry.getName());
// if (zipFileName.getName().toLowerCase().endsWith(".csv") && ! zipFileName.getName().startsWith(".")) {
// log.debug("Processing file: {}", zipFileName.getName());
// DomeInput translator = new DomeInput();
// translator.readCSV(z.getInputStream(entry));
// HashMap<String, Object> link = translator.getDome();
// log.debug("link info: {}", link.toString());
// if (!link.isEmpty()) {
// if (link.containsKey("link_overlay")) {
// // Combine csv link
// }
// if (link.containsKey("link_stragty")) {
// // Combine csv link
// }
// }
// }
// }
// z.close();
// } catch (Exception ex) {
// log.error("Error processing DOME file: {}", ex.getMessage());
// HashMap<String, Object> d = new HashMap<String, Object>();
// d.put("errors", ex.getMessage());
// }
// } else
if (fileNameTest.endsWith(".CSV")) {
log.debug("Entering single CSV file DOME handling");
DomeInput translator = new DomeInput();
linkDomes = (HashMap<String, Object>) translator.readFile(fileName);
}
else if (fileNameTest.endsWith(".ACEB")) {
log.debug("Entering single ACEB file DOME handling");
ObjectMapper mapper = new ObjectMapper();
String json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next();
linkDomes = mapper.readValue(json, new TypeReference<HashMap<String, Object>>() {});
linkDomes = (HashMap) linkDomes.values().iterator().next();
}
if (linkDomes != null) {
log.debug("link info: {}", linkDomes.toString());
try {
if (!linkDomes.isEmpty()) {
if (linkDomes.containsKey("link_overlay")) {
ovlLinks = (HashMap<String, String>) linkDomes.get("link_overlay");
}
if (linkDomes.containsKey("link_stragty")) {
stgLinks = (HashMap<String, String>) linkDomes.get("link_stragty");
}
}
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
private String getLinkIds(String domeType, HashMap entry) {
String exname = MapUtil.getValueOr(entry, "exname", "");
String wst_id = MapUtil.getValueOr(entry, "wst_id", "");
String soil_id = MapUtil.getValueOr(entry, "soil_id", "");
String linkIdsExp = getLinkIds(domeType, "EXNAME", exname);
String linkIdsWst = getLinkIds(domeType, "WST_ID", wst_id);
String linkIdsSoil = getLinkIds(domeType, "SOIL_ID", soil_id);
String ret = "";
if (!linkIdsExp.equals("")) {
ret += linkIdsExp + "|";
}
if (!linkIdsWst.equals("")) {
ret += linkIdsWst + "|";
}
if (!linkIdsSoil.equals("")) {
ret += linkIdsSoil;
}
if (ret.endsWith("|")) {
ret = ret.substring(0, ret.length() - 1);
}
return ret;
}
private String getLinkIds(String domeType, String idType, String id) {
HashMap<String, String> links;
if (domeType.equals("strategy")) {
links = stgLinks;
} else if (domeType.equals("overlay")) {
links = ovlLinks;
} else {
return "";
}
if (links.isEmpty() || id.equals("")) {
return "";
}
String linkIds = "";
ArrayList<String> altLinkIds = new ArrayList();
altLinkIds.add(idType + "_ALL");
if (id.matches("[^_]+_\\d+$") && domeType.equals("strategy")) {
altLinkIds.add(idType + "_" + id.replaceAll("_\\d+$", ""));
} else if (id.matches(".+_\\d+__\\d+$") && domeType.equals("overlay")) {
altLinkIds.add(idType + "_" + id.replaceAll("__\\d+$", ""));
altLinkIds.add(idType + "_" + id.replaceAll("_\\d+__\\d+$", ""));
}
altLinkIds.add(idType + "_" + id);
for (String linkId : altLinkIds) {
if (links.containsKey(linkId)) {
linkIds += links.get(linkId) + "|";
}
}
if (linkIds.endsWith("|")) {
linkIds = linkIds.substring(0, linkIds.length() - 1);
}
return linkIds;
}
private void setOriLinkIds(HashMap entry, String domeIds, String domeType) {
HashMap<String, String> links;
if (domeType.equals("strategy")) {
links = orgStgLinks;
} else if (domeType.equals("overlay")) {
links = orgOvlLinks;
} else {
return;
}
String exname = MapUtil.getValueOr(entry, "exname", "");
if (exname.matches(".+_\\d+__\\d+$") && "Y".equals(MapUtil.getValueOr(entry, "seasonal_dome_applied", ""))) {
exname = exname.replaceAll("__\\d+$", "");
}
if (!exname.equals("")) {
links.put("EXNAME_" + exname, domeIds);
} else {
String soil_id = MapUtil.getValueOr(entry, "soil_id", "");
String wst_id = MapUtil.getValueOr(entry, "wst_id", "");
if (!soil_id.equals("")) {
links.put("SOIL_ID_" + soil_id, domeIds);
} else if (!wst_id.equals("")) {
links.put("WST_ID_" + wst_id, domeIds);
}
}
}
private void loadDomeFile(String fileName, HashMap<String, HashMap<String, Object>> domes) {
String fileNameTest = fileName.toUpperCase();
log.info("Loading DOME file: {}", fileName);
if (fileNameTest.endsWith(".ZIP")) {
log.debug("Entering Zip file handling");
ZipFile z = null;
try {
z = new ZipFile(fileName);
Enumeration entries = z.entries();
while (entries.hasMoreElements()) {
// Do we handle nested zips? Not yet.
ZipEntry entry = (ZipEntry) entries.nextElement();
File zipFileName = new File(entry.getName());
if (zipFileName.getName().toLowerCase().endsWith(".csv") && !zipFileName.getName().startsWith(".")) {
log.debug("Processing file: {}", zipFileName.getName());
DomeInput translator = new DomeInput();
translator.readCSV(z.getInputStream(entry));
HashMap<String, Object> dome = translator.getDome();
log.debug("dome info: {}", dome.toString());
String domeName = DomeUtil.generateDomeName(dome);
if (!domeName.equals("----")) {
domes.put(domeName, new HashMap<String, Object>(dome));
}
}
}
z.close();
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
} else if (fileNameTest.endsWith(".CSV")) {
log.debug("Entering single CSV file DOME handling");
try {
DomeInput translator = new DomeInput();
HashMap<String, Object> dome = (HashMap<String, Object>) translator.readFile(fileName);
String domeName = DomeUtil.generateDomeName(dome);
log.debug("Dome name: {}", domeName);
log.debug("Dome layout: {}", dome.toString());
domes.put(domeName, dome);
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
} else if (fileNameTest.endsWith(".ACEB")) {
log.debug("Entering single ACEB file DOME handling");
try {
ObjectMapper mapper = new ObjectMapper();
String json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next();
HashMap<String, HashMap<String, Object>> tmp = mapper.readValue(json, new TypeReference<HashMap<String, HashMap<String, Object>>>() {});
// domes.putAll(tmp);
for (HashMap dome : tmp.values()) {
String domeName = DomeUtil.generateDomeName(dome);
if (!domeName.equals("----")) {
domes.put(domeName, new HashMap<String, Object>(dome));
}
}
log.debug("Domes layout: {}", domes.toString());
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
}
@Override
public HashMap<String, Object> execute() {
// First extract all the domes and put them in a HashMap by DOME_NAME
// The read the DOME_NAME field of the CSV file
// Split the DOME_NAME, and then apply sequentially to the HashMap.
// PLEASE NOTE: This can be a massive undertaking if the source map
// is really large. Need to find optimization points.
HashMap<String, Object> output = new HashMap<String, Object>();
//HashMap<String, ArrayList<HashMap<String, String>>> dome;
// Load the dome
if (ovlDomes.isEmpty() && stgDomes.isEmpty()) {
log.info("No DOME to apply.");
HashMap<String, Object> d = new HashMap<String, Object>();
//d.put("domeinfo", new HashMap<String, String>());
d.put("domeoutput", source);
return d;
}
if (autoApply) {
HashMap<String, Object> d = new HashMap<String, Object>();
if (ovlDomes.size() > 1) {
log.error("Auto-Apply feature only allows one field overlay file per run");
d.put("errors", "Auto-Apply feature only allows one field overlay file per run");
return d;
} else if (stgDomes.size() > 1) {
log.error("Auto-Apply feature only allows one seasonal strategy file per run");
d.put("errors", "Auto-Apply feature only allows one seasonal strategy file per run");
return d;
}
}
// Flatten the data and apply the dome.
Engine domeEngine;
ArrayList<HashMap<String, Object>> flattenedData = MapUtil.flatPack(source);
boolean noExpMode = false;
if (flattenedData.isEmpty()) {
log.info("No experiment data detected, will try Weather and Soil data only mode");
noExpMode = true;
flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils"));
flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers"));
// flatSoilAndWthData(flattenedData, "soil");
// flatSoilAndWthData(flattenedData, "weather");
if (flattenedData.isEmpty()) {
HashMap<String, Object> d = new HashMap<String, Object>();
log.error("No data found from input file, no DOME will be applied for data set {}", source.toString());
d.put("errors", "Loaded raw data is invalid, please check input files");
return d;
}
}
if (mode.equals("strategy")) {
log.debug("Domes: {}", stgDomes.toString());
log.debug("Entering Strategy mode!");
if (!noExpMode) {
updateWthReferences(updateExpReferences(true));
flattenedData = MapUtil.flatPack(source);
}
// int cnt = 0;
// for (HashMap<String, Object> entry : MapUtil.getRawPackageContents(source, "experiments")) {
//
// log.debug("Exp at {}: {}, {}",
// cnt,
// entry.get("wst_id"),
// entry.get("clim_id"),
// ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"),
// ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id")
// );
// cnt++;
// }
String stgDomeName = "";
if (autoApply) {
for (String domeName : stgDomes.keySet()) {
stgDomeName = domeName;
}
log.info("Auto apply seasonal strategy: {}", stgDomeName);
}
Engine generatorEngine;
ArrayList<HashMap<String, Object>> strategyResults = new ArrayList<HashMap<String, Object>>();
for (HashMap<String, Object> entry : flattenedData) {
if (autoApply) {
entry.put("seasonal_strategy", stgDomeName);
}
String domeName = getLinkIds("strategy", entry);
if (domeName.equals("")) {
domeName = MapUtil.getValueOr(entry, "seasonal_strategy", "");
} else {
entry.put("seasonal_strategy", domeName);
log.debug("Apply seasonal strategy domes from link csv: {}", domeName);
}
setOriLinkIds(entry, domeName, "strategy");
String tmp[] = domeName.split("[|]");
String strategyName;
if (tmp.length > 1) {
log.warn("Multiple seasonal strategy dome is not supported yet, only the first dome will be applied");
for (int i = 1; i < tmp.length; i++) {
setFailedDomeId(entry, "seasonal_dome_failed", tmp[i]);
}
}
strategyName = tmp[0];
log.info("Apply DOME {} for {}", strategyName, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>"))));
log.debug("Looking for ss: {}", strategyName);
if (!strategyName.equals("")) {
if (stgDomes.containsKey(strategyName)) {
log.debug("Found strategyName");
entry.put("dome_applied", "Y");
entry.put("seasonal_dome_applied", "Y");
generatorEngine = new Engine(stgDomes.get(strategyName), true);
if (!noExpMode) {
// Check if there is no weather or soil data matched with experiment
if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) {
log.warn("No scenario weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) {
log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
}
ArrayList<HashMap<String, Object>> newEntries = generatorEngine.applyStg(flatSoilAndWthData(entry, noExpMode));
log.debug("New Entries to add: {}", newEntries.size());
strategyResults.addAll(newEntries);
} else {
log.error("Cannot find strategy: {}", strategyName);
setFailedDomeId(entry, "seasonal_dome_failed", strategyName);
}
}
}
log.debug("=== FINISHED GENERATION ===");
log.debug("Generated count: {}", strategyResults.size());
ArrayList<HashMap<String, Object>> exp = MapUtil.getRawPackageContents(source, "experiments");
exp.clear();
exp.addAll(strategyResults);
flattenedData = MapUtil.flatPack(source);
if (noExpMode) {
flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils"));
flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers"));
}
}
if (!noExpMode) {
if (mode.equals("strategy")) {
updateExpReferences(false);
} else {
updateWthReferences(updateExpReferences(false));
}
flattenedData = MapUtil.flatPack(source);
}
String ovlDomeName = "";
if (autoApply) {
for (String domeName : ovlDomes.keySet()) {
ovlDomeName = domeName;
}
log.info("Auto apply field overlay: {}", ovlDomeName);
}
int cnt = 0;
for (HashMap<String, Object> entry : flattenedData) {
log.debug("Exp at {}: {}, {}, {}",
cnt,
entry.get("wst_id"),
((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"),
((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id"));
cnt++;
if (autoApply) {
entry.put("field_overlay", ovlDomeName);
}
String domeName = getLinkIds("overlay", entry);
if (domeName.equals("")) {
domeName = MapUtil.getValueOr(entry, "field_overlay", "");
} else {
entry.put("field_overlay", domeName);
log.debug("Apply field overlay domes from link csv: {}", domeName);
}
setOriLinkIds(entry, domeName, "overlay");
if (!domeName.equals("")) {
String tmp[] = domeName.split("[|]");
int tmpLength = tmp.length;
for (int i = 0; i < tmpLength; i++) {
String tmpDomeId = tmp[i].toUpperCase();
log.info("Apply DOME {} for {}", tmpDomeId, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>"))));
log.debug("Looking for dome_name: {}", tmpDomeId);
if (ovlDomes.containsKey(tmpDomeId)) {
domeEngine = new Engine(ovlDomes.get(tmpDomeId));
entry.put("dome_applied", "Y");
entry.put("field_dome_applied", "Y");
domeEngine.apply(flatSoilAndWthData(entry, noExpMode));
ArrayList<String> strategyList = domeEngine.getGenerators();
if (!strategyList.isEmpty()) {
log.warn("The following DOME commands in the field overlay file are ignored : {}", strategyList.toString());
}
if (!noExpMode && !mode.equals("strategy")) {
// Check if there is no weather or soil data matched with experiment
if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) {
log.warn("No baseline weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) {
log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
}
} else {
log.error("Cannot find overlay: {}", tmpDomeId);
setFailedDomeId(entry, "field_dome_failed", tmpDomeId);
}
}
}
}
if (noExpMode) {
output.put("domeoutput", source);
} else {
output.put("domeoutput", MapUtil.bundle(flattenedData));
}
if (linkDomes != null && !linkDomes.isEmpty()) {
output.put("linkDomes", linkDomes);
} else {
linkDomes = new HashMap<String, Object>();
linkDomes.put("link_overlay", orgOvlLinks);
linkDomes.put("link_stragty", orgStgLinks);
output.put("linkDomes", linkDomes);
}
if (ovlDomes != null && !ovlDomes.isEmpty()) {
output.put("ovlDomes", ovlDomes);
}
if (stgDomes != null && !stgDomes.isEmpty()) {
output.put("stgDomes", stgDomes);
}
return output;
}
// private void flatSoilAndWthData(ArrayList<HashMap<String, Object>> flattenedData, String key) {
// ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, key + "s");
// for (HashMap<String, Object> data : arr) {
// HashMap<String, Object> tmp = new HashMap<String, Object>();
// tmp.put(key, data);
// flattenedData.add(tmp);
// }
// }
private HashMap<String, Object> flatSoilAndWthData(HashMap<String, Object> data, boolean noExpFlg) {
if (!noExpFlg) {
return data;
}
HashMap<String, Object> ret;
if (data.containsKey("dailyWeather")) {
ret = new HashMap<String, Object>();
ret.put("weather", data);
} else if (data.containsKey("soilLayer")) {
ret = new HashMap<String, Object>();
ret.put("soil", data);
} else {
ret = data;
}
return ret;
}
private void setFailedDomeId(HashMap data, String failKey, String failId) {
String failIds;
if ((failIds = (String) data.get(failKey)) != null) {
data.put(failKey, failId);
} else {
data.put(failKey, failIds + "|" + failId);
}
}
private boolean updateExpReferences(boolean isStgDome) {
ArrayList<HashMap<String, Object>> expArr = MapUtil.getRawPackageContents(source, "experiments");
boolean isClimIDchanged = false;
HashMap<String, HashMap<String, Object>> domes;
String linkid;
String domeKey;
int maxDomeNum;
if (isStgDome) {
domes = stgDomes;
linkid = "strategy";
domeKey = "seasonal_strategy";
maxDomeNum = 1;
} else {
domes = ovlDomes;
linkid = "field";
domeKey = "field_overlay";
maxDomeNum = Integer.MAX_VALUE;
}
// Pre-scan the seasnal DOME to update reference variables
String autoDomeName = "";
if (autoApply) {
for (String domeName : domes.keySet()) {
autoDomeName = domeName;
}
}
for (HashMap<String, Object> exp : expArr) {
String domeName = getLinkIds(linkid, exp);
if (domeName.equals("")) {
if (autoApply) {
domeName = autoDomeName;
} else {
domeName = MapUtil.getValueOr(exp, domeKey, "");
}
}
if (!domeName.equals("")) {
String tmp[] = domeName.split("[|]");
int tmpLength = Math.min(tmp.length, maxDomeNum);
for (int i = 0; i < tmpLength; i++) {
String tmpDomeId = tmp[i].toUpperCase();
log.debug("Looking for dome_name: {}", tmpDomeId);
if (domes.containsKey(tmpDomeId)) {
log.debug("Found DOME {}", tmpDomeId);
Engine domeEngine = new Engine(domes.get(tmpDomeId));
isClimIDchanged = domeEngine.updateWSRef(exp, isStgDome, mode.equals("strategy"));
// Check if the wst_id is switch to 8-bit long version
String wst_id = MapUtil.getValueOr(exp, "wst_id", "");
if (isStgDome && wst_id.length() < 8) {
exp.put("wst_id", wst_id + "0XXX");
exp.put("clim_id", "0XXX");
isClimIDchanged = true;
}
log.debug("New exp linkage: {}", exp);
}
}
}
}
return isClimIDchanged;
}
private void updateWthReferences(boolean isClimIDchanged) {
ArrayList<HashMap<String, Object>> wthArr = MapUtil.getRawPackageContents(source, "weathers");
boolean isStrategy = mode.equals("strategy");
HashMap<String, HashMap> unfixedWths = new HashMap();
HashSet<String> fixedWths = new HashSet();
for (HashMap<String, Object> wth : wthArr) {
String wst_id = MapUtil.getValueOr(wth, "wst_id", "");
String clim_id = MapUtil.getValueOr(wth, "clim_id", "");
if (clim_id.equals("")) {
if (wst_id.length() == 8) {
clim_id = wst_id.substring(4, 8);
} else {
clim_id = "0XXX";
}
}
// If user assign CLIM_ID in the DOME, or find non-baseline data in the overlay mode, then switch WST_ID to 8-bit version
if (isStrategy || isClimIDchanged || !clim_id.startsWith("0")) {
if (wst_id.length() < 8) {
wth.put("wst_id", wst_id + clim_id);
}
} else {
// Temporally switch all the WST_ID to 8-bit in the data set
if (wst_id.length() < 8) {
wth.put("wst_id", wst_id + clim_id);
} else {
wst_id = wst_id.substring(0, 4);
}
// Check if there is multiple baseline record for one site
if (unfixedWths.containsKey(wst_id)) {
log.warn("There is multiple baseline weather data for site [{}], please choose a particular baseline via field overlay DOME", wst_id);
unfixedWths.remove(wst_id);
fixedWths.add(wst_id);
} else {
if (!fixedWths.contains(wst_id)) {
unfixedWths.put(wst_id, wth);
}
}
}
}
// If no CLIM_ID provided in the overlay mode, then switch the baseline WST_ID to 4-bit.
if (!isStrategy && !unfixedWths.isEmpty()) {
for (String wst_id : unfixedWths.keySet()) {
unfixedWths.get(wst_id).put("wst_id", wst_id);
}
}
}
}
|
src/main/java/org/agmip/ui/quadui/ApplyDomeTask.java
|
package org.agmip.ui.quadui;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Scanner;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.agmip.dome.DomeUtil;
import org.agmip.dome.Engine;
import org.agmip.translators.csv.DomeInput;
import org.agmip.util.MapUtil;
import org.apache.pivot.util.concurrent.Task;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplyDomeTask extends Task<HashMap> {
private static Logger log = LoggerFactory.getLogger(ApplyDomeTask.class);
private HashMap<String, HashMap<String, Object>> ovlDomes = new HashMap<String, HashMap<String, Object>>();
private HashMap<String, HashMap<String, Object>> stgDomes = new HashMap<String, HashMap<String, Object>>();
private HashMap<String, Object> linkDomes = new HashMap<String, Object>();
private HashMap<String, String> ovlLinks = new HashMap<String, String>();
private HashMap<String, String> stgLinks = new HashMap<String, String>();
private HashMap<String, String> orgOvlLinks = new HashMap<String, String>();
private HashMap<String, String> orgStgLinks = new HashMap<String, String>();
// private HashMap<String, ArrayList<String>> wthLinks = new HashMap<String, ArrayList<String>>();
// private HashMap<String, ArrayList<String>> soilLinks = new HashMap<String, ArrayList<String>>();
private HashMap source;
private String mode;
private boolean autoApply;
public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply) {
this.source = m;
this.mode = mode;
this.autoApply = autoApply;
// Setup the domes here.
loadDomeLinkFile(linkFile);
log.debug("link csv: {}", ovlLinks);
if (mode.equals("strategy")) {
loadDomeFile(strategyFile, stgDomes);
}
loadDomeFile(fieldFile, ovlDomes);
}
private void loadDomeLinkFile(String fileName) {
String fileNameTest = fileName.toUpperCase();
log.debug("Loading LINK file: {}", fileName);
linkDomes = null;
try {
// if (fileNameTest.endsWith(".ZIP")) {
// log.debug("Entering Zip file handling");
// ZipFile z = null;
// try {
// z = new ZipFile(fileName);
// Enumeration entries = z.entries();
// while (entries.hasMoreElements()) {
// // Do we handle nested zips? Not yet.
// ZipEntry entry = (ZipEntry) entries.nextElement();
// File zipFileName = new File(entry.getName());
// if (zipFileName.getName().toLowerCase().endsWith(".csv") && ! zipFileName.getName().startsWith(".")) {
// log.debug("Processing file: {}", zipFileName.getName());
// DomeInput translator = new DomeInput();
// translator.readCSV(z.getInputStream(entry));
// HashMap<String, Object> link = translator.getDome();
// log.debug("link info: {}", link.toString());
// if (!link.isEmpty()) {
// if (link.containsKey("link_overlay")) {
// // Combine csv link
// }
// if (link.containsKey("link_stragty")) {
// // Combine csv link
// }
// }
// }
// }
// z.close();
// } catch (Exception ex) {
// log.error("Error processing DOME file: {}", ex.getMessage());
// HashMap<String, Object> d = new HashMap<String, Object>();
// d.put("errors", ex.getMessage());
// }
// } else
if (fileNameTest.endsWith(".CSV")) {
log.debug("Entering single CSV file DOME handling");
DomeInput translator = new DomeInput();
linkDomes = (HashMap<String, Object>) translator.readFile(fileName);
}
else if (fileNameTest.endsWith(".ACEB")) {
log.debug("Entering single ACEB file DOME handling");
ObjectMapper mapper = new ObjectMapper();
String json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next();
linkDomes = mapper.readValue(json, new TypeReference<HashMap<String, Object>>() {});
linkDomes = (HashMap) linkDomes.values().iterator().next();
}
if (linkDomes != null) {
log.debug("link info: {}", linkDomes.toString());
try {
if (!linkDomes.isEmpty()) {
if (linkDomes.containsKey("link_overlay")) {
ovlLinks = (HashMap<String, String>) linkDomes.get("link_overlay");
}
if (linkDomes.containsKey("link_stragty")) {
stgLinks = (HashMap<String, String>) linkDomes.get("link_stragty");
}
}
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
private String getLinkIds(String domeType, HashMap entry) {
String exname = MapUtil.getValueOr(entry, "exname", "");
String wst_id = MapUtil.getValueOr(entry, "wst_id", "");
String soil_id = MapUtil.getValueOr(entry, "soil_id", "");
String linkIdsExp = getLinkIds(domeType, "EXNAME", exname);
String linkIdsWst = getLinkIds(domeType, "WST_ID", wst_id);
String linkIdsSoil = getLinkIds(domeType, "SOIL_ID", soil_id);
String ret = "";
if (!linkIdsExp.equals("")) {
ret += linkIdsExp + "|";
}
if (!linkIdsWst.equals("")) {
ret += linkIdsWst + "|";
}
if (!linkIdsSoil.equals("")) {
ret += linkIdsSoil;
}
if (ret.endsWith("|")) {
ret = ret.substring(0, ret.length() - 1);
}
return ret;
}
private String getLinkIds(String domeType, String idType, String id) {
HashMap<String, String> links;
if (domeType.equals("strategy")) {
links = stgLinks;
} else if (domeType.equals("overlay")) {
links = ovlLinks;
} else {
return "";
}
if (links.isEmpty() || id.equals("")) {
return "";
}
String linkIds = "";
ArrayList<String> altLinkIds = new ArrayList();
altLinkIds.add(idType + "_ALL");
if (id.matches(".+_\\d+$") && domeType.equals("overlay")) {
altLinkIds.add(idType + "_" + id.replaceAll("_\\d+$", ""));
} else if (id.matches(".+_\\d+__\\d+$") && domeType.equals("strategy")) {
altLinkIds.add(idType + "_" + id.replaceAll("_\\d+__\\d+$", ""));
}
altLinkIds.add(idType + "_" + id);
for (String linkId : altLinkIds) {
if (links.containsKey(linkId)) {
linkIds += links.get(linkId) + "|";
}
}
if (linkIds.endsWith("|")) {
linkIds = linkIds.substring(0, linkIds.length() - 1);
}
return linkIds;
}
private void setOriLinkIds(HashMap entry, String domeIds, String domeType) {
HashMap<String, String> links;
if (domeType.equals("strategy")) {
links = orgStgLinks;
} else if (domeType.equals("overlay")) {
links = orgOvlLinks;
} else {
return;
}
String exname = MapUtil.getValueOr(entry, "exname", "");
if (!exname.equals("")) {
links.put("EXNAME_" + exname, domeIds);
} else {
String soil_id = MapUtil.getValueOr(entry, "soil_id", "");
String wst_id = MapUtil.getValueOr(entry, "wst_id", "");
if (!soil_id.equals("")) {
links.put("SOIL_ID_" + soil_id, domeIds);
} else if (!wst_id.equals("")) {
links.put("WST_ID_" + wst_id, domeIds);
}
}
}
private void loadDomeFile(String fileName, HashMap<String, HashMap<String, Object>> domes) {
String fileNameTest = fileName.toUpperCase();
log.info("Loading DOME file: {}", fileName);
if (fileNameTest.endsWith(".ZIP")) {
log.debug("Entering Zip file handling");
ZipFile z = null;
try {
z = new ZipFile(fileName);
Enumeration entries = z.entries();
while (entries.hasMoreElements()) {
// Do we handle nested zips? Not yet.
ZipEntry entry = (ZipEntry) entries.nextElement();
File zipFileName = new File(entry.getName());
if (zipFileName.getName().toLowerCase().endsWith(".csv") && !zipFileName.getName().startsWith(".")) {
log.debug("Processing file: {}", zipFileName.getName());
DomeInput translator = new DomeInput();
translator.readCSV(z.getInputStream(entry));
HashMap<String, Object> dome = translator.getDome();
log.debug("dome info: {}", dome.toString());
String domeName = DomeUtil.generateDomeName(dome);
if (!domeName.equals("----")) {
domes.put(domeName, new HashMap<String, Object>(dome));
}
}
}
z.close();
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
} else if (fileNameTest.endsWith(".CSV")) {
log.debug("Entering single CSV file DOME handling");
try {
DomeInput translator = new DomeInput();
HashMap<String, Object> dome = (HashMap<String, Object>) translator.readFile(fileName);
String domeName = DomeUtil.generateDomeName(dome);
log.debug("Dome name: {}", domeName);
log.debug("Dome layout: {}", dome.toString());
domes.put(domeName, dome);
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
} else if (fileNameTest.endsWith(".ACEB")) {
log.debug("Entering single ACEB file DOME handling");
try {
ObjectMapper mapper = new ObjectMapper();
String json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next();
HashMap<String, HashMap<String, Object>> tmp = mapper.readValue(json, new TypeReference<HashMap<String, HashMap<String, Object>>>() {});
// domes.putAll(tmp);
for (HashMap dome : tmp.values()) {
String domeName = DomeUtil.generateDomeName(dome);
if (!domeName.equals("----")) {
domes.put(domeName, new HashMap<String, Object>(dome));
}
}
log.debug("Domes layout: {}", domes.toString());
} catch (Exception ex) {
log.error("Error processing DOME file: {}", ex.getMessage());
HashMap<String, Object> d = new HashMap<String, Object>();
d.put("errors", ex.getMessage());
}
}
}
@Override
public HashMap<String, Object> execute() {
// First extract all the domes and put them in a HashMap by DOME_NAME
// The read the DOME_NAME field of the CSV file
// Split the DOME_NAME, and then apply sequentially to the HashMap.
// PLEASE NOTE: This can be a massive undertaking if the source map
// is really large. Need to find optimization points.
HashMap<String, Object> output = new HashMap<String, Object>();
//HashMap<String, ArrayList<HashMap<String, String>>> dome;
// Load the dome
if (ovlDomes.isEmpty() && stgDomes.isEmpty()) {
log.info("No DOME to apply.");
HashMap<String, Object> d = new HashMap<String, Object>();
//d.put("domeinfo", new HashMap<String, String>());
d.put("domeoutput", source);
return d;
}
if (autoApply) {
HashMap<String, Object> d = new HashMap<String, Object>();
if (ovlDomes.size() > 1) {
log.error("Auto-Apply feature only allows one field overlay file per run");
d.put("errors", "Auto-Apply feature only allows one field overlay file per run");
return d;
} else if (stgDomes.size() > 1) {
log.error("Auto-Apply feature only allows one seasonal strategy file per run");
d.put("errors", "Auto-Apply feature only allows one seasonal strategy file per run");
return d;
}
}
// Flatten the data and apply the dome.
Engine domeEngine;
ArrayList<HashMap<String, Object>> flattenedData = MapUtil.flatPack(source);
boolean noExpMode = false;
if (flattenedData.isEmpty()) {
log.info("No experiment data detected, will try Weather and Soil data only mode");
noExpMode = true;
flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils"));
flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers"));
// flatSoilAndWthData(flattenedData, "soil");
// flatSoilAndWthData(flattenedData, "weather");
if (flattenedData.isEmpty()) {
HashMap<String, Object> d = new HashMap<String, Object>();
log.error("No data found from input file, no DOME will be applied for data set {}", source.toString());
d.put("errors", "Loaded raw data is invalid, please check input files");
return d;
}
}
if (mode.equals("strategy")) {
log.debug("Domes: {}", stgDomes.toString());
log.debug("Entering Strategy mode!");
if (!noExpMode) {
updateWthReferences(updateExpReferences(true));
flattenedData = MapUtil.flatPack(source);
}
// int cnt = 0;
// for (HashMap<String, Object> entry : MapUtil.getRawPackageContents(source, "experiments")) {
//
// log.debug("Exp at {}: {}, {}",
// cnt,
// entry.get("wst_id"),
// entry.get("clim_id"),
// ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"),
// ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id")
// );
// cnt++;
// }
String stgDomeName = "";
if (autoApply) {
for (String domeName : stgDomes.keySet()) {
stgDomeName = domeName;
}
log.info("Auto apply seasonal strategy: {}", stgDomeName);
}
Engine generatorEngine;
ArrayList<HashMap<String, Object>> strategyResults = new ArrayList<HashMap<String, Object>>();
for (HashMap<String, Object> entry : flattenedData) {
if (autoApply) {
entry.put("seasonal_strategy", stgDomeName);
}
String domeName = getLinkIds("strategy", entry);
if (domeName.equals("")) {
domeName = MapUtil.getValueOr(entry, "seasonal_strategy", "");
} else {
entry.put("seasonal_strategy", domeName);
log.debug("Apply seasonal strategy domes from link csv: {}", domeName);
}
setOriLinkIds(entry, domeName, "strategy");
String tmp[] = domeName.split("[|]");
String strategyName;
if (tmp.length > 1) {
log.warn("Multiple seasonal strategy dome is not supported yet, only the first dome will be applied");
for (int i = 1; i < tmp.length; i++) {
setFailedDomeId(entry, "seasonal_dome_failed", tmp[i]);
}
}
strategyName = tmp[0];
log.info("Apply DOME {} for {}", strategyName, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>"))));
log.debug("Looking for ss: {}", strategyName);
if (!strategyName.equals("")) {
if (stgDomes.containsKey(strategyName)) {
log.debug("Found strategyName");
entry.put("dome_applied", "Y");
entry.put("seasonal_dome_applied", "Y");
generatorEngine = new Engine(stgDomes.get(strategyName), true);
if (!noExpMode) {
// Check if there is no weather or soil data matched with experiment
if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) {
log.warn("No scenario weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) {
log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
}
ArrayList<HashMap<String, Object>> newEntries = generatorEngine.applyStg(flatSoilAndWthData(entry, noExpMode));
log.debug("New Entries to add: {}", newEntries.size());
strategyResults.addAll(newEntries);
} else {
log.error("Cannot find strategy: {}", strategyName);
setFailedDomeId(entry, "seasonal_dome_failed", strategyName);
}
}
}
log.debug("=== FINISHED GENERATION ===");
log.debug("Generated count: {}", strategyResults.size());
ArrayList<HashMap<String, Object>> exp = MapUtil.getRawPackageContents(source, "experiments");
exp.clear();
exp.addAll(strategyResults);
flattenedData = MapUtil.flatPack(source);
if (noExpMode) {
flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils"));
flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers"));
}
}
if (!noExpMode) {
if (mode.equals("strategy")) {
updateExpReferences(false);
} else {
updateWthReferences(updateExpReferences(false));
}
flattenedData = MapUtil.flatPack(source);
}
String ovlDomeName = "";
if (autoApply) {
for (String domeName : ovlDomes.keySet()) {
ovlDomeName = domeName;
}
log.info("Auto apply field overlay: {}", ovlDomeName);
}
int cnt = 0;
for (HashMap<String, Object> entry : flattenedData) {
log.debug("Exp at {}: {}, {}, {}",
cnt,
entry.get("wst_id"),
((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"),
((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id"));
cnt++;
if (autoApply) {
entry.put("field_overlay", ovlDomeName);
}
String domeName = getLinkIds("overlay", entry);
if (domeName.equals("")) {
domeName = MapUtil.getValueOr(entry, "field_overlay", "");
} else {
entry.put("field_overlay", domeName);
log.debug("Apply field overlay domes from link csv: {}", domeName);
}
setOriLinkIds(entry, domeName, "overlay");
if (!domeName.equals("")) {
String tmp[] = domeName.split("[|]");
int tmpLength = tmp.length;
for (int i = 0; i < tmpLength; i++) {
String tmpDomeId = tmp[i].toUpperCase();
log.info("Apply DOME {} for {}", tmpDomeId, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>"))));
log.debug("Looking for dome_name: {}", tmpDomeId);
if (ovlDomes.containsKey(tmpDomeId)) {
domeEngine = new Engine(ovlDomes.get(tmpDomeId));
entry.put("dome_applied", "Y");
entry.put("field_dome_applied", "Y");
domeEngine.apply(flatSoilAndWthData(entry, noExpMode));
ArrayList<String> strategyList = domeEngine.getGenerators();
if (!strategyList.isEmpty()) {
log.warn("The following DOME commands in the field overlay file are ignored : {}", strategyList.toString());
}
if (!noExpMode && !mode.equals("strategy")) {
// Check if there is no weather or soil data matched with experiment
if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) {
log.warn("No baseline weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) {
log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A"));
}
}
} else {
log.error("Cannot find overlay: {}", tmpDomeId);
setFailedDomeId(entry, "field_dome_failed", tmpDomeId);
}
}
}
}
if (noExpMode) {
output.put("domeoutput", source);
} else {
output.put("domeoutput", MapUtil.bundle(flattenedData));
}
if (linkDomes != null && !linkDomes.isEmpty()) {
output.put("linkDomes", linkDomes);
} else {
linkDomes = new HashMap<String, Object>();
linkDomes.put("link_overlay", orgOvlLinks);
linkDomes.put("link_stragty", orgStgLinks);
output.put("linkDomes", linkDomes);
}
if (ovlDomes != null && !ovlDomes.isEmpty()) {
output.put("ovlDomes", ovlDomes);
}
if (stgDomes != null && !stgDomes.isEmpty()) {
output.put("stgDomes", stgDomes);
}
return output;
}
// private void flatSoilAndWthData(ArrayList<HashMap<String, Object>> flattenedData, String key) {
// ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, key + "s");
// for (HashMap<String, Object> data : arr) {
// HashMap<String, Object> tmp = new HashMap<String, Object>();
// tmp.put(key, data);
// flattenedData.add(tmp);
// }
// }
private HashMap<String, Object> flatSoilAndWthData(HashMap<String, Object> data, boolean noExpFlg) {
if (!noExpFlg) {
return data;
}
HashMap<String, Object> ret;
if (data.containsKey("dailyWeather")) {
ret = new HashMap<String, Object>();
ret.put("weather", data);
} else if (data.containsKey("soilLayer")) {
ret = new HashMap<String, Object>();
ret.put("soil", data);
} else {
ret = data;
}
return ret;
}
private void setFailedDomeId(HashMap data, String failKey, String failId) {
String failIds;
if ((failIds = (String) data.get(failKey)) != null) {
data.put(failKey, failId);
} else {
data.put(failKey, failIds + "|" + failId);
}
}
private boolean updateExpReferences(boolean isStgDome) {
ArrayList<HashMap<String, Object>> expArr = MapUtil.getRawPackageContents(source, "experiments");
boolean isClimIDchanged = false;
HashMap<String, HashMap<String, Object>> domes;
String linkid;
String domeKey;
int maxDomeNum;
if (isStgDome) {
domes = stgDomes;
linkid = "strategy";
domeKey = "seasonal_strategy";
maxDomeNum = 1;
} else {
domes = ovlDomes;
linkid = "field";
domeKey = "field_overlay";
maxDomeNum = Integer.MAX_VALUE;
}
// Pre-scan the seasnal DOME to update reference variables
String autoDomeName = "";
if (autoApply) {
for (String domeName : domes.keySet()) {
autoDomeName = domeName;
}
}
for (HashMap<String, Object> exp : expArr) {
String domeName = getLinkIds(linkid, exp);
if (domeName.equals("")) {
if (autoApply) {
domeName = autoDomeName;
} else {
domeName = MapUtil.getValueOr(exp, domeKey, "");
}
}
if (!domeName.equals("")) {
String tmp[] = domeName.split("[|]");
int tmpLength = Math.min(tmp.length, maxDomeNum);
for (int i = 0; i < tmpLength; i++) {
String tmpDomeId = tmp[i].toUpperCase();
log.debug("Looking for dome_name: {}", tmpDomeId);
if (domes.containsKey(tmpDomeId)) {
log.debug("Found DOME {}", tmpDomeId);
Engine domeEngine = new Engine(domes.get(tmpDomeId));
isClimIDchanged = domeEngine.updateWSRef(exp, isStgDome, mode.equals("strategy"));
// Check if the wst_id is switch to 8-bit long version
String wst_id = MapUtil.getValueOr(exp, "wst_id", "");
if (isStgDome && wst_id.length() < 8) {
exp.put("wst_id", wst_id + "0XXX");
exp.put("clim_id", "0XXX");
isClimIDchanged = true;
}
log.debug("New exp linkage: {}", exp);
}
}
}
}
return isClimIDchanged;
}
private void updateWthReferences(boolean isClimIDchanged) {
ArrayList<HashMap<String, Object>> wthArr = MapUtil.getRawPackageContents(source, "weathers");
boolean isStrategy = mode.equals("strategy");
HashMap<String, HashMap> unfixedWths = new HashMap();
HashSet<String> fixedWths = new HashSet();
for (HashMap<String, Object> wth : wthArr) {
String wst_id = MapUtil.getValueOr(wth, "wst_id", "");
String clim_id = MapUtil.getValueOr(wth, "clim_id", "");
if (clim_id.equals("")) {
if (wst_id.length() == 8) {
clim_id = wst_id.substring(4, 8);
} else {
clim_id = "0XXX";
}
}
// If user assign CLIM_ID in the DOME, or find non-baseline data in the overlay mode, then switch WST_ID to 8-bit version
if (isStrategy || isClimIDchanged || !clim_id.startsWith("0")) {
if (wst_id.length() < 8) {
wth.put("wst_id", wst_id + clim_id);
}
} else {
// Temporally switch all the WST_ID to 8-bit in the data set
if (wst_id.length() < 8) {
wth.put("wst_id", wst_id + clim_id);
} else {
wst_id = wst_id.substring(0, 4);
}
// Check if there is multiple baseline record for one site
if (unfixedWths.containsKey(wst_id)) {
log.warn("There is multiple baseline weather data for site [{}], please choose a particular baseline via field overlay DOME", wst_id);
unfixedWths.remove(wst_id);
fixedWths.add(wst_id);
} else {
if (!fixedWths.contains(wst_id)) {
unfixedWths.put(wst_id, wth);
}
}
}
}
// If no CLIM_ID provided in the overlay mode, then switch the baseline WST_ID to 4-bit.
if (!isStrategy && !unfixedWths.isEmpty()) {
for (String wst_id : unfixedWths.keySet()) {
unfixedWths.get(wst_id).put("wst_id", wst_id);
}
}
}
}
|
Update to keep the order of records in order to make sure the hash id will not be changed by random order of records
|
src/main/java/org/agmip/ui/quadui/ApplyDomeTask.java
|
Update to keep the order of records in order to make sure the hash id will not be changed by random order of records
|
|
Java
|
bsd-3-clause
|
f669badb70f1552f03e004d2c58606a38dd38eea
| 0
|
ctubbsii/jline2,scala/scala-jline,scala/scala-jline,fantasy86/jline2,tkruse/jline2,kaulkie/jline2,msaxena2/jline2,renew-tgi/jline2,DALDEI/jline2
|
/*
* Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*/
package jline.console;
import java.awt.*;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.ResourceBundle;
import jline.Terminal;
import jline.TerminalFactory;
import jline.console.completer.CandidateListCompletionHandler;
import jline.console.completer.Completer;
import jline.console.completer.CompletionHandler;
import jline.console.history.History;
import jline.console.history.MemoryHistory;
import jline.internal.Configuration;
import jline.internal.InputStreamReader;
import jline.internal.Log;
import org.fusesource.jansi.AnsiOutputStream;
/**
* A reader for console applications. It supports custom tab-completion,
* saveable command history, and command line editing. On some platforms,
* platform-specific commands will need to be issued before the reader will
* function properly. See {@link jline.Terminal#init} for convenience
* methods for issuing platform-specific setup commands.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @author <a href="mailto:gnodet@gmail.com">Guillaume Nodet</a>
*/
public class ConsoleReader
{
public static final String JLINE_NOBELL = "jline.nobell";
public static final char BACKSPACE = '\b';
public static final char RESET_LINE = '\r';
public static final char KEYBOARD_BELL = '\07';
public static final char NULL_MASK = 0;
public static final int TAB_WIDTH = 4;
private static final ResourceBundle
resources = ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName());
private final Terminal terminal;
private InputStream in;
private final Writer out;
private final CursorBuffer buf = new CursorBuffer();
private String prompt;
private boolean expandEvents = true;
private Character mask;
private Character echoCharacter;
private StringBuffer searchTerm = null;
private String previousSearchTerm = "";
private int searchIndex = -1;
private Reader reader;
private String encoding;
private boolean recording;
private String macro = "";
private String appName;
private URL inputrcUrl;
private ConsoleKeys consoleKeys;
private boolean skipLF = false;
public ConsoleReader() throws IOException {
this(null, new FileInputStream(FileDescriptor.in), System.out, null);
}
public ConsoleReader(final InputStream in, final OutputStream out) throws
IOException
{
this(null, in, out, null);
}
public ConsoleReader(final InputStream in, final OutputStream out, final Terminal term) throws
IOException
{
this(null, in, out, term);
}
public ConsoleReader(final String appName, final InputStream in, final OutputStream out, final Terminal term) throws
IOException
{
this.appName = appName != null ? appName : "JLine";
this.encoding = encoding != null ? encoding : Configuration.getEncoding();
this.terminal = term != null ? term : TerminalFactory.get();
this.out = new OutputStreamWriter(terminal.wrapOutIfNeeded(out), this.encoding);
setInput( in );
this.inputrcUrl = Configuration.getUrlFrom(
Configuration.getString(Configuration.JLINE_INPUTRC,
Configuration.getUrlFrom(new File(Configuration.getUserHome(),
Configuration.INPUT_RC)).toExternalForm()));
consoleKeys = new ConsoleKeys(appName, inputrcUrl);
}
public KeyMap getKeys() {
return consoleKeys.getKeys();
}
void setInput(final InputStream in) throws IOException {
final InputStream wrapped = terminal.wrapInIfNeeded( in );
// Wrap the input stream so that characters are only read one by one
this.in = new FilterInputStream(wrapped) {
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
int c = read();
if (c == -1) {
return -1;
}
b[off] = (byte)c;
return 1;
}
};
this.reader = new InputStreamReader( this.in, encoding );
}
public InputStream getInput() {
return in;
}
public Writer getOutput() {
return out;
}
public Terminal getTerminal() {
return terminal;
}
public CursorBuffer getCursorBuffer() {
return buf;
}
public void setExpandEvents(final boolean expand) {
this.expandEvents = expand;
}
public boolean getExpandEvents() {
return expandEvents;
}
public void setPrompt(final String prompt) {
this.prompt = prompt;
}
public String getPrompt() {
return prompt;
}
/**
* Set the echo character. For example, to have "*" entered when a password is typed:
* <p/>
* <pre>
* myConsoleReader.setEchoCharacter(new Character('*'));
* </pre>
* <p/>
* Setting the character to
* <p/>
* <pre>
* null
* </pre>
* <p/>
* will restore normal character echoing. Setting the character to
* <p/>
* <pre>
* new Character(0)
* </pre>
* <p/>
* will cause nothing to be echoed.
*
* @param c the character to echo to the console in place of the typed character.
*/
public void setEchoCharacter(final Character c) {
this.echoCharacter = c;
}
/**
* Returns the echo character.
*/
public Character getEchoCharacter() {
return echoCharacter;
}
/**
* Erase the current line.
*
* @return false if we failed (e.g., the buffer was empty)
*/
protected final boolean resetLine() throws IOException {
if (buf.cursor == 0) {
return false;
}
backspaceAll();
return true;
}
int getCursorPosition() {
// FIXME: does not handle anything but a line with a prompt absolute position
String prompt = getPrompt();
return ((prompt == null) ? 0 : stripAnsi(lastLine(prompt)).length()) + buf.cursor;
}
/**
* Returns the text after the last '\n'.
* prompt is returned if no '\n' characters are present.
* null is returned if prompt is null.
*/
private String lastLine(String str) {
if (str == null) return "";
int last = str.lastIndexOf("\n");
if (last >= 0) {
return str.substring(last + 1, str.length());
}
return str;
}
private String stripAnsi(String str) {
if (str == null) return "";
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
AnsiOutputStream aos = new AnsiOutputStream(baos);
aos.write(str.getBytes());
aos.flush();
return baos.toString();
} catch (IOException e) {
return str;
}
}
/**
* Move the cursor position to the specified absolute index.
*/
public final boolean setCursorPosition(final int position) throws IOException {
return moveCursor(position - buf.cursor) != 0;
}
/**
* Set the current buffer's content to the specified {@link String}. The
* visual console will be modified to show the current buffer.
*
* @param buffer the new contents of the buffer.
*/
private void setBuffer(final String buffer) throws IOException {
// don't bother modifying it if it is unchanged
if (buffer.equals(buf.buffer.toString())) {
return;
}
// obtain the difference between the current buffer and the new one
int sameIndex = 0;
for (int i = 0, l1 = buffer.length(), l2 = buf.buffer.length(); (i < l1)
&& (i < l2); i++) {
if (buffer.charAt(i) == buf.buffer.charAt(i)) {
sameIndex++;
}
else {
break;
}
}
int diff = buf.cursor - sameIndex;
if (diff < 0) { // we can't backspace here so try from the end of the buffer
moveToEnd();
diff = buf.buffer.length() - sameIndex;
}
backspace(diff); // go back for the differences
killLine(); // clear to the end of the line
buf.buffer.setLength(sameIndex); // the new length
putString(buffer.substring(sameIndex)); // append the differences
}
private void setBuffer(final CharSequence buffer) throws IOException {
setBuffer(String.valueOf(buffer));
}
/**
* Output put the prompt + the current buffer
*/
public final void drawLine() throws IOException {
String prompt = getPrompt();
if (prompt != null) {
print(prompt);
}
print(buf.buffer.toString());
if (buf.length() != buf.cursor) { // not at end of line
back(buf.length() - buf.cursor - 1);
}
// force drawBuffer to check for weird wrap (after clear screen)
drawBuffer();
}
/**
* Clear the line and redraw it.
*/
public final void redrawLine() throws IOException {
print(RESET_LINE);
// flush();
drawLine();
}
/**
* Clear the buffer and add its contents to the history.
*
* @return the former contents of the buffer.
*/
final String finishBuffer() throws IOException { // FIXME: Package protected because used by tests
String str = buf.buffer.toString();
String historyLine = str;
if (expandEvents) {
str = expandEvents(str);
historyLine = str.replaceAll("\\!", "\\\\!");
}
// we only add it to the history if the buffer is not empty
// and if mask is null, since having a mask typically means
// the string was a password. We clear the mask after this call
if (str.length() > 0) {
if (mask == null && isHistoryEnabled()) {
history.add(historyLine);
}
else {
mask = null;
}
}
history.moveToEnd();
buf.buffer.setLength(0);
buf.cursor = 0;
return str;
}
/**
* Expand event designator such as !!, !#, !3, etc...
* See http://www.gnu.org/software/bash/manual/html_node/Event-Designators.html
*
* @param str
* @return
*/
protected String expandEvents(String str) throws IOException {
StringBuilder sb = new StringBuilder();
boolean escaped = false;
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
if (escaped) {
sb.append(c);
escaped = false;
continue;
} else if (c == '\\') {
escaped = true;
continue;
} else {
escaped = false;
}
switch (c) {
case '!':
if (i + 1 < str.length()) {
c = str.charAt(++i);
boolean neg = false;
String rep = null;
int i1, idx;
switch (c) {
case '!':
if (history.size() == 0) {
throw new IllegalArgumentException("!!: event not found");
}
rep = history.get(history.index() - 1).toString();
break;
case '#':
sb.append(sb.toString());
break;
case '?':
i1 = str.indexOf('?', i + 1);
if (i1 < 0) {
i1 = str.length();
}
String sc = str.substring(i + 1, i1);
i = i1;
idx = searchBackwards(sc);
if (idx < 0) {
throw new IllegalArgumentException("!?" + sc + ": event not found");
} else {
rep = history.get(idx).toString();
}
break;
case ' ':
case '\t':
sb.append('!');
sb.append(c);
break;
case '-':
neg = true;
i++;
// fall through
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
i1 = i;
for (; i < str.length(); i++) {
c = str.charAt(i);
if (c < '0' || c > '9') {
break;
}
}
idx = 0;
try {
idx = Integer.parseInt(str.substring(i1, i));
} catch (NumberFormatException e) {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
if (neg) {
if (idx < history.size()) {
rep = (history.get(history.index() - idx)).toString();
} else {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
} else {
if (idx >= history.index() - history.size() && idx < history.index()) {
rep = (history.get(idx)).toString();
} else {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
}
break;
default:
String ss = str.substring(i);
i = str.length();
idx = searchBackwards(ss, history.index(), true);
if (idx < 0) {
throw new IllegalArgumentException("!" + ss + ": event not found");
} else {
rep = history.get(idx).toString();
}
break;
}
if (rep != null) {
sb.append(rep);
}
} else {
sb.append(c);
}
break;
case '^':
if (i == 0) {
int i1 = str.indexOf('^', i + 1);
int i2 = str.indexOf('^', i1 + 1);
if (i2 < 0) {
i2 = str.length();
}
if (i1 > 0 && i2 > 0) {
String s1 = str.substring(i + 1, i1);
String s2 = str.substring(i1 + 1, i2);
String s = history.get(history.index() - 1).toString().replace(s1, s2);
sb.append(s);
i = i2 + 1;
break;
}
}
sb.append(c);
break;
default:
sb.append(c);
break;
}
}
if (escaped) {
sb.append('\\');
}
String result = sb.toString();
if (!str.equals(result)) {
print(result);
println();
flush();
}
return result;
}
/**
* Write out the specified string to the buffer and the output stream.
*/
public final void putString(final CharSequence str) throws IOException {
buf.write(str);
if (mask == null) {
// no masking
print(str);
} else if (mask == NULL_MASK) {
// don't print anything
} else {
print(mask, str.length());
}
drawBuffer();
}
/**
* Redraw the rest of the buffer from the cursor onwards. This is necessary
* for inserting text into the buffer.
*
* @param clear the number of characters to clear after the end of the buffer
*/
private void drawBuffer(final int clear) throws IOException {
// debug ("drawBuffer: " + clear);
if (buf.cursor == buf.length() && clear == 0) {
} else {
char[] chars = buf.buffer.substring(buf.cursor).toCharArray();
if (mask != null) {
Arrays.fill(chars, mask);
}
if (terminal.hasWeirdWrap()) {
// need to determine if wrapping will occur:
int width = terminal.getWidth();
int pos = getCursorPosition();
for (int i = 0; i < chars.length; i++) {
print(chars[i]);
if ((pos + i + 1) % width == 0) {
print(32); // move cursor to next line by printing dummy space
print(13); // CR / not newline.
}
}
} else {
print(chars);
}
clearAhead(clear, chars.length);
if (terminal.isAnsiSupported()) {
if (chars.length > 0) {
back(chars.length);
}
} else {
back(chars.length);
}
}
if (terminal.hasWeirdWrap()) {
int width = terminal.getWidth();
// best guess on whether the cursor is in that weird location...
// Need to do this without calling ansi cursor location methods
// otherwise it breaks paste of wrapped lines in xterm.
if (getCursorPosition() > 0 && (getCursorPosition() % width == 0)
&& buf.cursor == buf.length() && clear == 0) {
// the following workaround is reverse-engineered from looking
// at what bash sent to the terminal in the same situation
print(32); // move cursor to next line by printing dummy space
print(13); // CR / not newline.
}
}
}
/**
* Redraw the rest of the buffer from the cursor onwards. This is necessary
* for inserting text into the buffer.
*/
private void drawBuffer() throws IOException {
drawBuffer(0);
}
/**
* Clear ahead the specified number of characters without moving the cursor.
*
* @param num the number of characters to clear
* @param delta the difference between the internal cursor and the screen
* cursor - if > 0, assume some stuff was printed and weird wrap has to be
* checked
*/
private void clearAhead(final int num, int delta) throws IOException {
if (num == 0) {
return;
}
if (terminal.isAnsiSupported()) {
int width = terminal.getWidth();
int screenCursorCol = getCursorPosition() + delta;
// clear current line
printAnsiSequence("K");
// if cursor+num wraps, then we need to clear the line(s) below too
int curCol = screenCursorCol % width;
int endCol = (screenCursorCol + num - 1) % width;
int lines = num / width;
if (endCol < curCol) lines++;
for (int i = 0; i < lines; i++) {
printAnsiSequence("B");
printAnsiSequence("2K");
}
for (int i = 0; i < lines; i++) {
printAnsiSequence("A");
}
return;
}
// print blank extra characters
print(' ', num);
// we need to flush here so a "clever" console doesn't just ignore the redundancy
// of a space followed by a backspace.
// flush();
// reset the visual cursor
back(num);
// flush();
}
/**
* Move the visual cursor backwards without modifying the buffer cursor.
*/
protected void back(final int num) throws IOException {
if (num == 0) return;
if (terminal.isAnsiSupported()) {
int width = getTerminal().getWidth();
int cursor = getCursorPosition();
int realCursor = cursor + num;
int realCol = realCursor % width;
int newCol = cursor % width;
int moveup = num / width;
int delta = realCol - newCol;
if (delta < 0) moveup++;
if (moveup > 0) {
printAnsiSequence(moveup + "A");
}
printAnsiSequence((1 + newCol) + "G");
return;
}
print(BACKSPACE, num);
// flush();
}
/**
* Flush the console output stream. This is important for printout out single characters (like a backspace or
* keyboard) that we want the console to handle immediately.
*/
public void flush() throws IOException {
out.flush();
}
private int backspaceAll() throws IOException {
return backspace(Integer.MAX_VALUE);
}
/**
* Issue <em>num</em> backspaces.
*
* @return the number of characters backed up
*/
private int backspace(final int num) throws IOException {
if (buf.cursor == 0) {
return 0;
}
int count = 0;
int termwidth = getTerminal().getWidth();
int lines = getCursorPosition() / termwidth;
count = moveCursor(-1 * num) * -1;
buf.buffer.delete(buf.cursor, buf.cursor + count);
if (getCursorPosition() / termwidth != lines) {
if (terminal.isAnsiSupported()) {
// debug("doing backspace redraw: " + getCursorPosition() + " on " + termwidth + ": " + lines);
printAnsiSequence("K");
// if cursor+num wraps, then we need to clear the line(s) below too
// last char printed is one pos less than cursor so we subtract
// one
/*
// TODO: fixme (does not work - test with reverse search with wrapping line and CTRL-E)
int endCol = (getCursorPosition() + num - 1) % termwidth;
int curCol = getCursorPosition() % termwidth;
if (endCol < curCol) lines++;
for (int i = 1; i < lines; i++) {
printAnsiSequence("B");
printAnsiSequence("2K");
}
for (int i = 1; i < lines; i++) {
printAnsiSequence("A");
}
return count;
*/
}
}
drawBuffer(count);
return count;
}
/**
* Issue a backspace.
*
* @return true if successful
*/
public boolean backspace() throws IOException {
return backspace(1) == 1;
}
protected boolean moveToEnd() throws IOException {
return moveCursor(buf.length() - buf.cursor) > 0;
}
/**
* Delete the character at the current position and redraw the remainder of the buffer.
*/
private boolean deleteCurrentCharacter() throws IOException {
if (buf.length() == 0 || buf.cursor == buf.length()) {
return false;
}
buf.buffer.deleteCharAt(buf.cursor);
drawBuffer(1);
return true;
}
private boolean previousWord() throws IOException {
while (isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
// nothing
}
while (!isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
// nothing
}
return true;
}
private boolean nextWord() throws IOException {
while (isDelimiter(buf.nextChar()) && (moveCursor(1) != 0)) {
// nothing
}
while (!isDelimiter(buf.nextChar()) && (moveCursor(1) != 0)) {
// nothing
}
return true;
}
private boolean deletePreviousWord() throws IOException {
while (isDelimiter(buf.current()) && backspace()) {
// nothing
}
while (!isDelimiter(buf.current()) && backspace()) {
// nothing
}
return true;
}
private boolean deleteNextWord() throws IOException {
while (isDelimiter(buf.nextChar()) && delete()) {
}
while (!isDelimiter(buf.nextChar()) && delete()) {
// nothing
}
return true;
}
private boolean capitalizeWord() throws IOException {
boolean first = true;
int i = 1;
char c;
while (buf.cursor + i - 1< buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, first ? Character.toUpperCase(c) : Character.toLowerCase(c));
first = false;
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
private boolean upCaseWord() throws IOException {
int i = 1;
char c;
while (buf.cursor + i - 1 < buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, Character.toUpperCase(c));
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
private boolean downCaseWord() throws IOException {
int i = 1;
char c;
while (buf.cursor + i - 1 < buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, Character.toLowerCase(c));
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
/**
* Performs character transpose. The character prior to the cursor and the
* character under the cursor are swapped and the cursor is advanced one
* character unless you are already at the end of the line.
*
* @return true if the operation succeeded, false otherwise (e.g. transpose
* cannot happen at the beginning of the line).
* @throws IOException
*/
private boolean transposeChars() throws IOException {
if (buf.cursor == 0 || buf.cursor == buf.buffer.length()) {
return false;
}
int first = buf.cursor-1;
int second = buf.cursor;
char tmp = buf.buffer.charAt (first);
buf.buffer.setCharAt(first, buf.buffer.charAt(second));
buf.buffer.setCharAt(second, tmp);
moveInternal(-1);
drawBuffer();
moveInternal(2);
return true;
}
public boolean isKeyMap(String name) {
/*
* Current keymap.
*/
KeyMap map = consoleKeys.getKeys();
KeyMap mapByName = consoleKeys.getKeyMaps().get (name);
if (mapByName == null)
return false;
/*
* This may not be safe to do, but there doesn't appear to be a
* clean way to find this information out.
*/
return map == mapByName;
}
/**
* The equivalent of hitting <RET>. The line is considered
* complete and is returned.
*
* @return The completed line of text.
* @throws IOException
*/
public String accept() throws IOException {
moveToEnd();
println(); // output newline
flush();
String str = finishBuffer();
return str;
}
/**
* Move the cursor <i>where</i> characters.
*
* @param num If less than 0, move abs(<i>where</i>) to the left, otherwise move <i>where</i> to the right.
* @return The number of spaces we moved
*/
public int moveCursor(final int num) throws IOException {
int where = num;
if ((buf.cursor == 0) && (where <= 0)) {
return 0;
}
if ((buf.cursor == buf.buffer.length()) && (where >= 0)) {
return 0;
}
if ((buf.cursor + where) < 0) {
where = -buf.cursor;
}
else if ((buf.cursor + where) > buf.buffer.length()) {
where = buf.buffer.length() - buf.cursor;
}
moveInternal(where);
return where;
}
/**
* Move the cursor <i>where</i> characters, without checking the current buffer.
*
* @param where the number of characters to move to the right or left.
*/
private void moveInternal(final int where) throws IOException {
// debug ("move cursor " + where + " ("
// + buf.cursor + " => " + (buf.cursor + where) + ")");
buf.cursor += where;
if (terminal.isAnsiSupported()) {
if (where < 0) {
back(Math.abs(where));
} else {
int width = getTerminal().getWidth();
int cursor = getCursorPosition();
int oldLine = (cursor - where) / width;
int newLine = cursor / width;
if (newLine > oldLine) {
if (terminal.hasWeirdWrap()) {
// scroll up if at bottom
// note:
// on rxvt cywgin terminal.getHeight() is incorrect
// MacOs xterm does not seem to support scrolling
if (getCurrentAnsiRow() == terminal.getHeight()) {
printAnsiSequence((newLine - oldLine) + "S");
}
}
printAnsiSequence((newLine - oldLine) + "B");
}
printAnsiSequence(1 +(cursor % width) + "G");
}
// flush();
return;
}
char c;
if (where < 0) {
int len = 0;
for (int i = buf.cursor; i < buf.cursor - where; i++) {
if (buf.buffer.charAt(i) == '\t') {
len += TAB_WIDTH;
}
else {
len++;
}
}
char chars[] = new char[len];
Arrays.fill(chars, BACKSPACE);
out.write(chars);
return;
}
else if (buf.cursor == 0) {
return;
}
else if (mask != null) {
c = mask;
}
else {
print(buf.buffer.substring(buf.cursor - where, buf.cursor).toCharArray());
return;
}
// null character mask: don't output anything
if (mask == NULL_MASK) {
return;
}
print(c, Math.abs(where));
}
// FIXME: replace() is not used
public final boolean replace(final int num, final String replacement) {
buf.buffer.replace(buf.cursor - num, buf.cursor, replacement);
try {
moveCursor(-num);
drawBuffer(Math.max(0, num - replacement.length()));
moveCursor(replacement.length());
}
catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
/**
* Read a character from the console.
*
* @return the character, or -1 if an EOF is received.
*/
public final int readCharacter() throws IOException {
int c = reader.read();
if (c >= 0) {
Log.trace("Keystroke: ", c);
// clear any echo characters
clearEcho(c);
}
return c;
}
/**
* Clear the echoed characters for the specified character code.
*/
private int clearEcho(final int c) throws IOException {
// if the terminal is not echoing, then ignore
if (!terminal.isEchoEnabled()) {
return 0;
}
// otherwise, clear
int num = countEchoCharacters(c);
back(num);
drawBuffer(num);
return num;
}
private int countEchoCharacters(final int c) {
// tabs as special: we need to determine the number of spaces
// to cancel based on what out current cursor position is
if (c == 9) {
int tabStop = 8; // will this ever be different?
int position = getCursorPosition();
return tabStop - (position % tabStop);
}
return getPrintableCharacters(c).length();
}
/**
* Return the number of characters that will be printed when the specified
* character is echoed to the screen
*
* Adapted from cat by Torbjorn Granlund, as repeated in stty by David MacKenzie.
*/
private StringBuilder getPrintableCharacters(final int ch) {
StringBuilder sbuff = new StringBuilder();
if (ch >= 32) {
if (ch < 127) {
sbuff.append(ch);
}
else if (ch == 127) {
sbuff.append('^');
sbuff.append('?');
}
else {
sbuff.append('M');
sbuff.append('-');
if (ch >= (128 + 32)) {
if (ch < (128 + 127)) {
sbuff.append((char) (ch - 128));
}
else {
sbuff.append('^');
sbuff.append('?');
}
}
else {
sbuff.append('^');
sbuff.append((char) (ch - 128 + 64));
}
}
}
else {
sbuff.append('^');
sbuff.append((char) (ch + 64));
}
return sbuff;
}
public final int readCharacter(final char... allowed) throws IOException {
// if we restrict to a limited set and the current character is not in the set, then try again.
char c;
Arrays.sort(allowed); // always need to sort before binarySearch
while (Arrays.binarySearch(allowed, c = (char) readCharacter()) < 0) {
// nothing
}
return c;
}
//
// Key Bindings
//
public static final String JLINE_COMPLETION_THRESHOLD = "jline.completion.threshold";
//
// Line Reading
//
/**
* Read the next line and return the contents of the buffer.
*/
public String readLine() throws IOException {
return readLine((String) null);
}
/**
* Read the next line with the specified character mask. If null, then
* characters will be echoed. If 0, then no characters will be echoed.
*/
public String readLine(final Character mask) throws IOException {
return readLine(null, mask);
}
public String readLine(final String prompt) throws IOException {
return readLine(prompt, null);
}
/**
* Sets the current keymap by name. Supported keymaps are "emacs",
* "vi-insert", "vi-move".
* @param name The name of the keymap to switch to
* @return true if the keymap was set, or false if the keymap is
* not recognized.
*/
public boolean setKeyMap(String name) {
KeyMap m = consoleKeys.getKeyMaps().get(name);
if (m == null) {
return false;
}
consoleKeys.setKeys(m);
return true;
}
/**
* Read a line from the <i>in</i> {@link InputStream}, and return the line
* (without any trailing newlines).
*
* @param prompt The prompt to issue to the console, may be null.
* @return A line that is read from the terminal, or null if there was null input (e.g., <i>CTRL-D</i>
* was pressed).
*/
public String readLine(String prompt, final Character mask) throws IOException {
// prompt may be null
// mask may be null
// FIXME: This blows, each call to readLine will reset the console's state which doesn't seem very nice.
this.mask = mask;
if (prompt != null) {
setPrompt(prompt);
}
else {
prompt = getPrompt();
}
try {
if (!terminal.isSupported()) {
beforeReadLine(prompt, mask);
}
if (prompt != null && prompt.length() > 0) {
out.write(prompt);
out.flush();
}
// if the terminal is unsupported, just use plain-java reading
if (!terminal.isSupported()) {
return readLineSimple();
}
String originalPrompt = this.prompt;
final int NORMAL = 1;
final int SEARCH = 2;
int state = NORMAL;
boolean success = true;
StringBuilder sb = new StringBuilder();
List<Character> pushBackChar = new ArrayList<Character>();
while (true) {
int c = pushBackChar.isEmpty() ? readCharacter() : pushBackChar.remove( pushBackChar.size() - 1 );
if (c == -1) {
return null;
}
sb.append( (char) c );
if (recording) {
macro += (char) c;
}
Object o = getKeys().getBound( sb );
if (o == Operation.DO_LOWERCASE_VERSION) {
sb.setLength( sb.length() - 1);
sb.append( Character.toLowerCase( (char) c ));
o = getKeys().getBound( sb );
}
if ( o instanceof KeyMap ) {
continue;
}
while ( o == null && sb.length() > 0 ) {
c = sb.charAt( sb.length() - 1 );
sb.setLength( sb.length() - 1 );
Object o2 = getKeys().getBound( sb );
if ( o2 instanceof KeyMap ) {
o = ((KeyMap) o2).getAnotherKey();
if ( o == null ) {
continue;
} else {
pushBackChar.add( (char) c );
}
}
}
if ( o == null ) {
continue;
}
Log.trace("Binding: ", o);
// Handle macros
if (o instanceof String) {
String macro = (String) o;
for (int i = 0; i < macro.length(); i++) {
pushBackChar.add(macro.charAt(macro.length() - 1 - i));
}
sb.setLength( 0 );
continue;
}
// Handle custom callbacks
if (o instanceof ActionListener) {
((ActionListener) o).actionPerformed(null);
sb.setLength( 0 );
continue;
}
// Search mode.
//
// Note that we have to do this first, because if there is a command
// not linked to a search command, we leave the search mode and fall
// through to the normal state.
if (state == SEARCH) {
int cursorDest = -1;
switch ( ((Operation) o )) {
case ABORT:
state = NORMAL;
break;
case REVERSE_SEARCH_HISTORY:
if (searchTerm.length() == 0) {
searchTerm.append(previousSearchTerm);
}
if (searchIndex == -1) {
searchIndex = searchBackwards(searchTerm.toString());
} else {
searchIndex = searchBackwards(searchTerm.toString(), searchIndex);
}
break;
case BACKWARD_DELETE_CHAR:
if (searchTerm.length() > 0) {
searchTerm.deleteCharAt(searchTerm.length() - 1);
searchIndex = searchBackwards(searchTerm.toString());
}
break;
case SELF_INSERT:
searchTerm.appendCodePoint(c);
searchIndex = searchBackwards(searchTerm.toString());
break;
default:
// Set buffer and cursor position to the found string.
if (searchIndex != -1) {
history.moveTo(searchIndex);
// set cursor position to the found string
cursorDest = history.current().toString().indexOf(searchTerm.toString());
}
state = NORMAL;
break;
}
// if we're still in search mode, print the search status
if (state == SEARCH) {
if (searchTerm.length() == 0) {
printSearchStatus("", "");
searchIndex = -1;
} else {
if (searchIndex == -1) {
beep();
} else {
printSearchStatus(searchTerm.toString(), history.get(searchIndex).toString());
}
}
}
// otherwise, restore the line
else {
restoreLine(originalPrompt, cursorDest);
}
}
if (state == NORMAL) {
if ( o instanceof Operation) {
switch ( ((Operation) o )) {
case COMPLETE: // tab
success = complete();
break;
case POSSIBLE_COMPLETIONS:
printCompletionCandidates();
success = true;
break;
case BEGINNING_OF_LINE:
success = setCursorPosition(0);
break;
case KILL_LINE: // CTRL-K
success = killLine();
break;
case KILL_WHOLE_LINE:
success = setCursorPosition(0) && killLine();
break;
case CLEAR_SCREEN: // CTRL-L
success = clearScreen();
break;
case OVERWRITE_MODE:
buf.setOverTyping(!buf.isOverTyping());
break;
case SELF_INSERT:
putString( sb );
success = true;
break;
case ACCEPT_LINE:
return accept();
/*
* VI_MOVE_ACCEPT_LINE is the result of an ENTER
* while in move mode. This is the same as a normal
* ACCEPT_LINE, except that we need to enter
* insert mode as well.
*/
case VI_MOVE_ACCEPT_LINE:
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
return accept();
case BACKWARD_WORD:
success = previousWord();
break;
case FORWARD_WORD:
success = nextWord();
break;
case PREVIOUS_HISTORY:
success = moveHistory(false);
break;
/*
* According to bash/readline move through history
* in "vi" mode will move the cursor to the
* start of the line. If there is no previous
* history, then the cursor doesn't move.
*/
case VI_PREVIOUS_HISTORY:
success = moveHistory(false)
&& setCursorPosition(0);
break;
case NEXT_HISTORY:
success = moveHistory(true);
break;
/*
* According to bash/readline move through history
* in "vi" mode will move the cursor to the
* start of the line. If there is no next history,
* then the cursor doesn't move.
*/
case VI_NEXT_HISTORY:
success = moveHistory(true)
&& setCursorPosition(0);
break;
case BACKWARD_DELETE_CHAR: // backspace
success = backspace();
break;
case EXIT_OR_DELETE_CHAR:
if (buf.buffer.length() == 0) {
return null;
}
success = deleteCurrentCharacter();
break;
case DELETE_CHAR: // delete
success = deleteCurrentCharacter();
break;
case BACKWARD_CHAR:
success = moveCursor(-1) != 0;
break;
case FORWARD_CHAR:
success = moveCursor(1) != 0;
break;
case UNIX_LINE_DISCARD:
success = resetLine();
break;
case UNIX_WORD_RUBOUT:
case BACKWARD_KILL_WORD:
// in theory, those are slightly different
success = deletePreviousWord();
break;
case KILL_WORD:
success = deleteNextWord();
break;
case BEGINNING_OF_HISTORY:
success = history.moveToFirst();
if (success) {
setBuffer(history.current());
}
break;
case END_OF_HISTORY:
success = history.moveToLast();
if (success) {
setBuffer(history.current());
}
break;
case REVERSE_SEARCH_HISTORY:
if (searchTerm != null) {
previousSearchTerm = searchTerm.toString();
}
searchTerm = new StringBuffer(buf.buffer);
state = SEARCH;
if (searchTerm.length() > 0) {
searchIndex = searchBackwards(searchTerm.toString());
if (searchIndex == -1) {
beep();
}
printSearchStatus(searchTerm.toString(),
searchIndex > -1 ? history.get(searchIndex).toString() : "");
} else {
searchIndex = -1;
printSearchStatus("", "");
}
break;
case CAPITALIZE_WORD:
success = capitalizeWord();
break;
case UPCASE_WORD:
success = upCaseWord();
break;
case DOWNCASE_WORD:
success = downCaseWord();
break;
case END_OF_LINE:
success = moveToEnd();
break;
case TAB_INSERT:
putString( "\t" );
success = true;
break;
case RE_READ_INIT_FILE:
consoleKeys.loadKeys(appName, inputrcUrl);
success = true;
break;
case START_KBD_MACRO:
recording = true;
break;
case END_KBD_MACRO:
recording = false;
macro = macro.substring(0, macro.length() - sb.length());
break;
case CALL_LAST_KBD_MACRO:
for (int i = 0; i < macro.length(); i++) {
pushBackChar.add(macro.charAt(macro.length() - 1 - i));
}
sb.setLength( 0 );
break;
case VI_EDITING_MODE:
consoleKeys.setViEditMode(true);
consoleKeys.setKeys(consoleKeys.getKeyMaps()
.get(KeyMap.VI_INSERT));
break;
case VI_MOVEMENT_MODE:
moveCursor(-1);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_MOVE));
success = true;
break;
case VI_INSERTION_MODE:
consoleKeys.setKeys(consoleKeys.getKeyMaps()
.get(KeyMap.VI_INSERT));
success = true;
break;
case VI_APPEND_MODE:
moveCursor(1);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
success = true;
break;
case VI_APPEND_EOL:
success = moveToEnd();
consoleKeys.setKeys (
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
break;
/*
* Handler for CTRL-D. Attempts to follow readline
* behavior. If the line is empty, then it is an EOF
* otherwise it is as if the user hit enter.
*/
case VI_EOF_MAYBE:
if (buf.buffer.length() == 0) {
return null;
}
return accept();
case TRANSPOSE_CHARS:
success = transposeChars ();
break;
case EMACS_EDITING_MODE:
consoleKeys.setViEditMode(false);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.EMACS));
success = true;
break;
default:
int i = 0;
break;
}
}
}
if (!success) {
beep();
}
sb.setLength( 0 );
flush();
}
}
finally {
if (!terminal.isSupported()) {
afterReadLine();
}
}
}
/**
* Read a line for unsupported terminals.
*/
private String readLineSimple() throws IOException {
StringBuilder buff = new StringBuilder();
if (skipLF) {
skipLF = false;
int i = readCharacter();
if (i == -1 || i == '\r') {
return buff.toString();
} else if (i == '\n') {
// ignore
} else {
buff.append((char) i);
}
}
while (true) {
int i = readCharacter();
if (i == -1 || i == '\n') {
return buff.toString();
} else if (i == '\r') {
skipLF = true;
return buff.toString();
} else {
buff.append((char) i);
}
}
}
//
// Completion
//
private final List<Completer> completers = new LinkedList<Completer>();
private CompletionHandler completionHandler = new CandidateListCompletionHandler();
/**
* Add the specified {@link jline.console.completer.Completer} to the list of handlers for tab-completion.
*
* @param completer the {@link jline.console.completer.Completer} to add
* @return true if it was successfully added
*/
public boolean addCompleter(final Completer completer) {
return completers.add(completer);
}
/**
* Remove the specified {@link jline.console.completer.Completer} from the list of handlers for tab-completion.
*
* @param completer The {@link Completer} to remove
* @return True if it was successfully removed
*/
public boolean removeCompleter(final Completer completer) {
return completers.remove(completer);
}
/**
* Returns an unmodifiable list of all the completers.
*/
public Collection<Completer> getCompleters() {
return Collections.unmodifiableList(completers);
}
public void setCompletionHandler(final CompletionHandler handler) {
assert handler != null;
this.completionHandler = handler;
}
public CompletionHandler getCompletionHandler() {
return this.completionHandler;
}
/**
* Use the completers to modify the buffer with the appropriate completions.
*
* @return true if successful
*/
protected boolean complete() throws IOException {
// debug ("tab for (" + buf + ")");
if (completers.size() == 0) {
return false;
}
List<CharSequence> candidates = new LinkedList<CharSequence>();
String bufstr = buf.buffer.toString();
int cursor = buf.cursor;
int position = -1;
for (Completer comp : completers) {
if ((position = comp.complete(bufstr, cursor, candidates)) != -1) {
break;
}
}
return candidates.size() != 0 && getCompletionHandler().complete(this, candidates, position);
}
protected void printCompletionCandidates() throws IOException {
// debug ("tab for (" + buf + ")");
if (completers.size() == 0) {
return;
}
List<CharSequence> candidates = new LinkedList<CharSequence>();
String bufstr = buf.buffer.toString();
int cursor = buf.cursor;
for (Completer comp : completers) {
if (comp.complete(bufstr, cursor, candidates) != -1) {
break;
}
}
CandidateListCompletionHandler.printCandidates(this, candidates);
drawLine();
}
/**
* The number of tab-completion candidates above which a warning will be
* prompted before showing all the candidates.
*/
private int autoprintThreshold = Integer.getInteger(JLINE_COMPLETION_THRESHOLD, 100); // same default as bash
/**
* @param threshold the number of candidates to print without issuing a warning.
*/
public void setAutoprintThreshold(final int threshold) {
this.autoprintThreshold = threshold;
}
/**
* @return the number of candidates to print without issuing a warning.
*/
public int getAutoprintThreshold() {
return autoprintThreshold;
}
private boolean paginationEnabled;
/**
* Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
*/
public void setPaginationEnabled(final boolean enabled) {
this.paginationEnabled = enabled;
}
/**
* Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
*/
public boolean isPaginationEnabled() {
return paginationEnabled;
}
//
// History
//
private History history = new MemoryHistory();
public void setHistory(final History history) {
this.history = history;
}
public History getHistory() {
return history;
}
private boolean historyEnabled = true;
/**
* Whether or not to add new commands to the history buffer.
*/
public void setHistoryEnabled(final boolean enabled) {
this.historyEnabled = enabled;
}
/**
* Whether or not to add new commands to the history buffer.
*/
public boolean isHistoryEnabled() {
return historyEnabled;
}
/**
* Move up or down the history tree.
*/
private boolean moveHistory(final boolean next) throws IOException {
if (next && !history.next()) {
return false;
}
else if (!next && !history.previous()) {
return false;
}
setBuffer(history.current());
return true;
}
//
// Printing
//
public static final String CR = System.getProperty("line.separator");
/**
* Output the specified character to the output stream without manipulating the current buffer.
*/
private void print(final int c) throws IOException {
if (c == '\t') {
char chars[] = new char[TAB_WIDTH];
Arrays.fill(chars, ' ');
out.write(chars);
return;
}
out.write(c);
}
/**
* Output the specified characters to the output stream without manipulating the current buffer.
*/
private void print(final char... buff) throws IOException {
int len = 0;
for (char c : buff) {
if (c == '\t') {
len += TAB_WIDTH;
}
else {
len++;
}
}
char chars[];
if (len == buff.length) {
chars = buff;
}
else {
chars = new char[len];
int pos = 0;
for (char c : buff) {
if (c == '\t') {
Arrays.fill(chars, pos, pos + TAB_WIDTH, ' ');
pos += TAB_WIDTH;
}
else {
chars[pos] = c;
pos++;
}
}
}
out.write(chars);
}
private void print(final char c, final int num) throws IOException {
if (num == 1) {
print(c);
}
else {
char[] chars = new char[num];
Arrays.fill(chars, c);
print(chars);
}
}
/**
* Output the specified string to the output stream (but not the buffer).
*/
public final void print(final CharSequence s) throws IOException {
assert s != null;
print(s.toString().toCharArray());
}
public final void println(final CharSequence s) throws IOException {
assert s != null;
print(s.toString().toCharArray());
println();
}
/**
* Output a platform-dependant newline.
*/
public final void println() throws IOException {
print(CR);
// flush();
}
//
// Actions
//
/**
* Issue a delete.
*
* @return true if successful
*/
public final boolean delete() throws IOException {
return delete(1) == 1;
}
// FIXME: delete(int) only used by above + the return is always 1 and num is ignored
/**
* Issue <em>num</em> deletes.
*
* @return the number of characters backed up
*/
private int delete(final int num) throws IOException {
// TODO: Try to use jansi for this
/* Commented out because of DWA-2949:
if (buf.cursor == 0) {
return 0;
}
*/
buf.buffer.delete(buf.cursor, buf.cursor + 1);
drawBuffer(1);
return 1;
}
/**
* Kill the buffer ahead of the current cursor position.
*
* @return true if successful
*/
public boolean killLine() throws IOException {
int cp = buf.cursor;
int len = buf.buffer.length();
if (cp >= len) {
return false;
}
int num = buf.buffer.length() - cp;
clearAhead(num, 0);
for (int i = 0; i < num; i++) {
buf.buffer.deleteCharAt(len - i - 1);
}
return true;
}
/**
* Clear the screen by issuing the ANSI "clear screen" code.
*/
public boolean clearScreen() throws IOException {
if (!terminal.isAnsiSupported()) {
return false;
}
// send the ANSI code to clear the screen
printAnsiSequence("2J");
// then send the ANSI code to go to position 1,1
printAnsiSequence("1;1H");
redrawLine();
return true;
}
/**
* Issue an audible keyboard bell.
*/
public void beep() throws IOException {
if (!Configuration.getBoolean(JLINE_NOBELL, true)) {
print(KEYBOARD_BELL);
// need to flush so the console actually beeps
flush();
}
}
/**
* Paste the contents of the clipboard into the console buffer
*
* @return true if clipboard contents pasted
*/
public boolean paste() throws IOException {
Clipboard clipboard;
try { // May throw ugly exception on system without X
clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
}
catch (Exception e) {
return false;
}
if (clipboard == null) {
return false;
}
Transferable transferable = clipboard.getContents(null);
if (transferable == null) {
return false;
}
try {
Object content = transferable.getTransferData(DataFlavor.plainTextFlavor);
// This fix was suggested in bug #1060649 at
// http://sourceforge.net/tracker/index.php?func=detail&aid=1060649&group_id=64033&atid=506056
// to get around the deprecated DataFlavor.plainTextFlavor, but it
// raises a UnsupportedFlavorException on Mac OS X
if (content == null) {
try {
content = new DataFlavor().getReaderForText(transferable);
}
catch (Exception e) {
// ignore
}
}
if (content == null) {
return false;
}
String value;
if (content instanceof Reader) {
// TODO: we might want instead connect to the input stream
// so we can interpret individual lines
value = "";
String line;
BufferedReader read = new BufferedReader((Reader) content);
while ((line = read.readLine()) != null) {
if (value.length() > 0) {
value += "\n";
}
value += line;
}
}
else {
value = content.toString();
}
if (value == null) {
return true;
}
putString(value);
return true;
}
catch (UnsupportedFlavorException e) {
Log.error("Paste failed: ", e);
return false;
}
}
//
// Triggered Actions
//
private final Map<Character, ActionListener> triggeredActions = new HashMap<Character, ActionListener>();
/**
* Adding a triggered Action allows to give another curse of action if a character passed the pre-processing.
* <p/>
* Say you want to close the application if the user enter q.
* addTriggerAction('q', new ActionListener(){ System.exit(0); }); would do the trick.
*/
public void addTriggeredAction(final char c, final ActionListener listener) {
triggeredActions.put(c, listener);
}
//
// Formatted Output
//
/**
* Output the specified {@link Collection} in proper columns.
*/
public void printColumns(final Collection<? extends CharSequence> items) throws IOException {
if (items == null || items.isEmpty()) {
return;
}
int width = getTerminal().getWidth();
int height = getTerminal().getHeight();
int maxWidth = 0;
for (CharSequence item : items) {
maxWidth = Math.max(maxWidth, item.length());
}
maxWidth = maxWidth + 3;
Log.debug("Max width: ", maxWidth);
int showLines;
if (isPaginationEnabled()) {
showLines = height - 1; // page limit
}
else {
showLines = Integer.MAX_VALUE;
}
StringBuilder buff = new StringBuilder();
for (CharSequence item : items) {
if ((buff.length() + maxWidth) > width) {
println(buff);
buff.setLength(0);
if (--showLines == 0) {
// Overflow
print(resources.getString("DISPLAY_MORE"));
flush();
int c = readCharacter();
if (c == '\r' || c == '\n') {
// one step forward
showLines = 1;
}
else if (c != 'q') {
// page forward
showLines = height - 1;
}
back(resources.getString("DISPLAY_MORE").length());
if (c == 'q') {
// cancel
break;
}
}
}
// NOTE: toString() is important here due to AnsiString being retarded
buff.append(item.toString());
for (int i = 0; i < (maxWidth - item.length()); i++) {
buff.append(' ');
}
}
if (buff.length() > 0) {
println(buff);
}
}
//
// Non-supported Terminal Support
//
private Thread maskThread;
private void beforeReadLine(final String prompt, final Character mask) {
if (mask != null && maskThread == null) {
final String fullPrompt = "\r" + prompt
+ " "
+ " "
+ " "
+ "\r" + prompt;
maskThread = new Thread()
{
public void run() {
while (!interrupted()) {
try {
Writer out = getOutput();
out.write(fullPrompt);
out.flush();
sleep(3);
}
catch (IOException e) {
return;
}
catch (InterruptedException e) {
return;
}
}
}
};
maskThread.setPriority(Thread.MAX_PRIORITY);
maskThread.setDaemon(true);
maskThread.start();
}
}
private void afterReadLine() {
if (maskThread != null && maskThread.isAlive()) {
maskThread.interrupt();
}
maskThread = null;
}
/**
* Erases the current line with the existing prompt, then redraws the line
* with the provided prompt and buffer
* @param prompt
* the new prompt
* @param buffer
* the buffer to be drawn
* @param cursorDest
* where you want the cursor set when the line has been drawn.
* -1 for end of line.
* */
public void resetPromptLine(String prompt, String buffer, int cursorDest) throws IOException {
// move cursor to end of line
moveToEnd();
// backspace all text, including prompt
buf.buffer.append(this.prompt);
buf.cursor += this.prompt.length();
this.prompt = "";
backspaceAll();
this.prompt = prompt;
redrawLine();
setBuffer(buffer);
// move cursor to destination (-1 will move to end of line)
if (cursorDest < 0) cursorDest = buffer.length();
setCursorPosition(cursorDest);
flush();
}
public void printSearchStatus(String searchTerm, String match) throws IOException {
String prompt = "(reverse-i-search)`" + searchTerm + "': ";
String buffer = match;
int cursorDest = match.indexOf(searchTerm);
resetPromptLine(prompt, buffer, cursorDest);
}
public void restoreLine(String originalPrompt, int cursorDest) throws IOException {
// TODO move cursor to matched string
String prompt = lastLine(originalPrompt);
String buffer = buf.buffer.toString();
resetPromptLine(prompt, buffer, cursorDest);
}
//
// History search
//
/**
* Search backward in history from a given position.
*
* @param searchTerm substring to search for.
* @param startIndex the index from which on to search
* @return index where this substring has been found, or -1 else.
*/
public int searchBackwards(String searchTerm, int startIndex) {
return searchBackwards(searchTerm, startIndex, false);
}
/**
* Search backwards in history from the current position.
*
* @param searchTerm substring to search for.
* @return index where the substring has been found, or -1 else.
*/
public int searchBackwards(String searchTerm) {
return searchBackwards(searchTerm, history.index());
}
public int searchBackwards(String searchTerm, int startIndex, boolean startsWith) {
ListIterator<History.Entry> it = history.entries(startIndex);
while (it.hasPrevious()) {
History.Entry e = it.previous();
if (startsWith) {
if (e.value().toString().startsWith(searchTerm)) {
return e.index();
}
} else {
if (e.value().toString().contains(searchTerm)) {
return e.index();
}
}
}
return -1;
}
//
// Helpers
//
/**
* Checks to see if the specified character is a delimiter. We consider a
* character a delimiter if it is anything but a letter or digit.
*
* @param c The character to test
* @return True if it is a delimiter
*/
private boolean isDelimiter(final char c) {
return !Character.isLetterOrDigit(c);
}
private void printAnsiSequence(String sequence) throws IOException {
print(27);
print('[');
print(sequence);
flush(); // helps with step debugging
}
// return column position, reported by the terminal
private int getCurrentPosition() {
// check for ByteArrayInputStream to disable for unit tests
if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
StringBuffer b = new StringBuffer(8);
// position is sent as <ESC>[{ROW};{COLUMN}R
int r;
while((r = in.read()) > -1 && r != 'R') {
if (r != 27 && r != '[') {
b.append((char) r);
}
}
String[] pos = b.toString().split(";");
return Integer.parseInt(pos[1]);
} catch (Exception x) {
// no luck
}
}
return -1; // TODO: throw exception instead?
}
// return row position, reported by the terminal
// needed to know whether to scroll up on cursor move in last col for weird
// wrapping terminals - not tested for anything else
private int getCurrentAnsiRow() {
// check for ByteArrayInputStream to disable for unit tests
if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
StringBuffer b = new StringBuffer(8);
// position is sent as <ESC>[{ROW};{COLUMN}R
int r;
while((r = in.read()) > -1 && r != 'R') {
if (r != 27 && r != '[') {
b.append((char) r);
}
}
String[] pos = b.toString().split(";");
return Integer.parseInt(pos[0]);
} catch (Exception x) {
// no luck
}
}
return -1; // TODO: throw exception instead?
}
}
|
src/main/java/jline/console/ConsoleReader.java
|
/*
* Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*/
package jline.console;
import java.awt.*;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.ResourceBundle;
import jline.Terminal;
import jline.TerminalFactory;
import jline.console.completer.CandidateListCompletionHandler;
import jline.console.completer.Completer;
import jline.console.completer.CompletionHandler;
import jline.console.history.History;
import jline.console.history.MemoryHistory;
import jline.internal.Configuration;
import jline.internal.InputStreamReader;
import jline.internal.Log;
import org.fusesource.jansi.AnsiOutputStream;
/**
* A reader for console applications. It supports custom tab-completion,
* saveable command history, and command line editing. On some platforms,
* platform-specific commands will need to be issued before the reader will
* function properly. See {@link jline.Terminal#init} for convenience
* methods for issuing platform-specific setup commands.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @author <a href="mailto:gnodet@gmail.com">Guillaume Nodet</a>
*/
public class ConsoleReader
{
public static final String JLINE_NOBELL = "jline.nobell";
public static final char BACKSPACE = '\b';
public static final char RESET_LINE = '\r';
public static final char KEYBOARD_BELL = '\07';
public static final char NULL_MASK = 0;
public static final int TAB_WIDTH = 4;
private static final ResourceBundle
resources = ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName());
private final Terminal terminal;
private InputStream in;
private final Writer out;
private final CursorBuffer buf = new CursorBuffer();
private String prompt;
private boolean expandEvents = true;
private Character mask;
private Character echoCharacter;
private StringBuffer searchTerm = null;
private String previousSearchTerm = "";
private int searchIndex = -1;
private Reader reader;
private String encoding;
private boolean recording;
private String macro = "";
private String appName;
private URL inputrcUrl;
private ConsoleKeys consoleKeys;
private boolean skipLF = false;
public ConsoleReader() throws IOException {
this(null, new FileInputStream(FileDescriptor.in), System.out, null);
}
public ConsoleReader(final InputStream in, final OutputStream out) throws
IOException
{
this(null, in, out, null);
}
public ConsoleReader(final InputStream in, final OutputStream out, final Terminal term) throws
IOException
{
this(null, in, out, term);
}
public ConsoleReader(final String appName, final InputStream in, final OutputStream out, final Terminal term) throws
IOException
{
this.appName = appName != null ? appName : "JLine";
this.encoding = encoding != null ? encoding : Configuration.getEncoding();
this.terminal = term != null ? term : TerminalFactory.get();
this.out = new OutputStreamWriter(terminal.wrapOutIfNeeded(out), this.encoding);
setInput( in );
this.inputrcUrl = Configuration.getUrlFrom(
Configuration.getString(Configuration.JLINE_INPUTRC,
Configuration.getUrlFrom(new File(Configuration.getUserHome(),
Configuration.INPUT_RC)).toExternalForm()));
consoleKeys = new ConsoleKeys(appName, inputrcUrl);
}
public KeyMap getKeys() {
return consoleKeys.getKeys();
}
void setInput(final InputStream in) throws IOException {
final InputStream wrapped = terminal.wrapInIfNeeded( in );
// Wrap the input stream so that characters are only read one by one
this.in = new FilterInputStream(wrapped) {
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
int c = read();
if (c == -1) {
return -1;
}
b[off] = (byte)c;
return 1;
}
};
this.reader = new InputStreamReader( this.in, encoding );
}
public InputStream getInput() {
return in;
}
public Writer getOutput() {
return out;
}
public Terminal getTerminal() {
return terminal;
}
public CursorBuffer getCursorBuffer() {
return buf;
}
public void setExpandEvents(final boolean expand) {
this.expandEvents = expand;
}
public boolean getExpandEvents() {
return expandEvents;
}
public void setPrompt(final String prompt) {
this.prompt = prompt;
}
public String getPrompt() {
return prompt;
}
/**
* Set the echo character. For example, to have "*" entered when a password is typed:
* <p/>
* <pre>
* myConsoleReader.setEchoCharacter(new Character('*'));
* </pre>
* <p/>
* Setting the character to
* <p/>
* <pre>
* null
* </pre>
* <p/>
* will restore normal character echoing. Setting the character to
* <p/>
* <pre>
* new Character(0)
* </pre>
* <p/>
* will cause nothing to be echoed.
*
* @param c the character to echo to the console in place of the typed character.
*/
public void setEchoCharacter(final Character c) {
this.echoCharacter = c;
}
/**
* Returns the echo character.
*/
public Character getEchoCharacter() {
return echoCharacter;
}
/**
* Erase the current line.
*
* @return false if we failed (e.g., the buffer was empty)
*/
protected final boolean resetLine() throws IOException {
if (buf.cursor == 0) {
return false;
}
backspaceAll();
return true;
}
int getCursorPosition() {
// FIXME: does not handle anything but a line with a prompt absolute position
String prompt = getPrompt();
return ((prompt == null) ? 0 : stripAnsi(lastLine(prompt)).length()) + buf.cursor;
}
/**
* Returns the text after the last '\n'.
* prompt is returned if no '\n' characters are present.
* null is returned if prompt is null.
*/
private String lastLine(String str) {
if (str == null) return "";
int last = str.lastIndexOf("\n");
if (last >= 0) {
return str.substring(last + 1, str.length());
}
return str;
}
private String stripAnsi(String str) {
if (str == null) return "";
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
AnsiOutputStream aos = new AnsiOutputStream(baos);
aos.write(str.getBytes());
aos.flush();
return baos.toString();
} catch (IOException e) {
return str;
}
}
/**
* Move the cursor position to the specified absolute index.
*/
public final boolean setCursorPosition(final int position) throws IOException {
return moveCursor(position - buf.cursor) != 0;
}
/**
* Set the current buffer's content to the specified {@link String}. The
* visual console will be modified to show the current buffer.
*
* @param buffer the new contents of the buffer.
*/
private void setBuffer(final String buffer) throws IOException {
// don't bother modifying it if it is unchanged
if (buffer.equals(buf.buffer.toString())) {
return;
}
// obtain the difference between the current buffer and the new one
int sameIndex = 0;
for (int i = 0, l1 = buffer.length(), l2 = buf.buffer.length(); (i < l1)
&& (i < l2); i++) {
if (buffer.charAt(i) == buf.buffer.charAt(i)) {
sameIndex++;
}
else {
break;
}
}
int diff = buf.cursor - sameIndex;
if (diff < 0) { // we can't backspace here so try from the end of the buffer
moveToEnd();
diff = buf.buffer.length() - sameIndex;
}
backspace(diff); // go back for the differences
killLine(); // clear to the end of the line
buf.buffer.setLength(sameIndex); // the new length
putString(buffer.substring(sameIndex)); // append the differences
}
private void setBuffer(final CharSequence buffer) throws IOException {
setBuffer(String.valueOf(buffer));
}
/**
* Output put the prompt + the current buffer
*/
public final void drawLine() throws IOException {
String prompt = getPrompt();
if (prompt != null) {
print(prompt);
}
print(buf.buffer.toString());
if (buf.length() != buf.cursor) { // not at end of line
back(buf.length() - buf.cursor - 1);
}
// force drawBuffer to check for weird wrap (after clear screen)
drawBuffer();
}
/**
* Clear the line and redraw it.
*/
public final void redrawLine() throws IOException {
print(RESET_LINE);
// flush();
drawLine();
}
/**
* Clear the buffer and add its contents to the history.
*
* @return the former contents of the buffer.
*/
final String finishBuffer() throws IOException { // FIXME: Package protected because used by tests
String str = buf.buffer.toString();
String historyLine = str;
if (expandEvents) {
str = expandEvents(str);
historyLine = str.replaceAll("\\!", "\\\\!");
}
// we only add it to the history if the buffer is not empty
// and if mask is null, since having a mask typically means
// the string was a password. We clear the mask after this call
if (str.length() > 0) {
if (mask == null && isHistoryEnabled()) {
history.add(historyLine);
}
else {
mask = null;
}
}
history.moveToEnd();
buf.buffer.setLength(0);
buf.cursor = 0;
return str;
}
/**
* Expand event designator such as !!, !#, !3, etc...
* See http://www.gnu.org/software/bash/manual/html_node/Event-Designators.html
*
* @param str
* @return
*/
protected String expandEvents(String str) throws IOException {
StringBuilder sb = new StringBuilder();
boolean escaped = false;
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
if (escaped) {
sb.append(c);
escaped = false;
continue;
} else if (c == '\\') {
escaped = true;
continue;
} else {
escaped = false;
}
switch (c) {
case '!':
if (i + 1 < str.length()) {
c = str.charAt(++i);
boolean neg = false;
String rep = null;
int i1, idx;
switch (c) {
case '!':
if (history.size() == 0) {
throw new IllegalArgumentException("!!: event not found");
}
rep = history.get(history.index() - 1).toString();
break;
case '#':
sb.append(sb.toString());
break;
case '?':
i1 = str.indexOf('?', i + 1);
if (i1 < 0) {
i1 = str.length();
}
String sc = str.substring(i + 1, i1);
i = i1;
idx = searchBackwards(sc);
if (idx < 0) {
throw new IllegalArgumentException("!?" + sc + ": event not found");
} else {
rep = history.get(idx).toString();
}
break;
case ' ':
case '\t':
sb.append('!');
sb.append(c);
break;
case '-':
neg = true;
i++;
// fall through
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
i1 = i;
for (; i < str.length(); i++) {
c = str.charAt(i);
if (c < '0' || c > '9') {
break;
}
}
idx = 0;
try {
idx = Integer.parseInt(str.substring(i1, i));
} catch (NumberFormatException e) {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
if (neg) {
if (idx < history.size()) {
rep = (history.get(history.index() - idx)).toString();
} else {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
} else {
if (idx >= history.index() - history.size() && idx < history.index()) {
rep = (history.get(idx)).toString();
} else {
throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
}
}
break;
default:
String ss = str.substring(i);
i = str.length();
idx = searchBackwards(ss, history.index(), true);
if (idx < 0) {
throw new IllegalArgumentException("!" + ss + ": event not found");
} else {
rep = history.get(idx).toString();
}
break;
}
if (rep != null) {
sb.append(rep);
}
} else {
sb.append(c);
}
break;
case '^':
if (i == 0) {
int i1 = str.indexOf('^', i + 1);
int i2 = str.indexOf('^', i1 + 1);
if (i2 < 0) {
i2 = str.length();
}
if (i1 > 0 && i2 > 0) {
String s1 = str.substring(i + 1, i1);
String s2 = str.substring(i1 + 1, i2);
String s = history.get(history.index() - 1).toString().replace(s1, s2);
sb.append(s);
i = i2 + 1;
break;
}
}
sb.append(c);
break;
default:
sb.append(c);
break;
}
}
if (escaped) {
sb.append('\\');
}
String result = sb.toString();
if (!str.equals(result)) {
print(result);
println();
flush();
}
return result;
}
/**
* Write out the specified string to the buffer and the output stream.
*/
public final void putString(final CharSequence str) throws IOException {
buf.write(str);
if (mask == null) {
// no masking
print(str);
} else if (mask == NULL_MASK) {
// don't print anything
} else {
print(mask, str.length());
}
drawBuffer();
}
/**
* Redraw the rest of the buffer from the cursor onwards. This is necessary
* for inserting text into the buffer.
*
* @param clear the number of characters to clear after the end of the buffer
*/
private void drawBuffer(final int clear) throws IOException {
// debug ("drawBuffer: " + clear);
if (buf.cursor == buf.length() && clear == 0) {
} else {
char[] chars = buf.buffer.substring(buf.cursor).toCharArray();
if (mask != null) {
Arrays.fill(chars, mask);
}
if (terminal.hasWeirdWrap()) {
// need to determine if wrapping will occur:
int width = terminal.getWidth();
int pos = getCursorPosition();
for (int i = 0; i < chars.length; i++) {
print(chars[i]);
if ((pos + i + 1) % width == 0) {
print(32); // move cursor to next line by printing dummy space
print(13); // CR / not newline.
}
}
} else {
print(chars);
}
clearAhead(clear, chars.length);
if (terminal.isAnsiSupported()) {
if (chars.length > 0) {
back(chars.length);
}
} else {
back(chars.length);
}
}
if (terminal.hasWeirdWrap()) {
int width = terminal.getWidth();
// best guess on whether the cursor is in that weird location...
// Need to do this without calling ansi cursor location methods
// otherwise it breaks paste of wrapped lines in xterm.
if (getCursorPosition() > 0 && (getCursorPosition() % width == 0)
&& buf.cursor == buf.length() && clear == 0) {
// the following workaround is reverse-engineered from looking
// at what bash sent to the terminal in the same situation
print(32); // move cursor to next line by printing dummy space
print(13); // CR / not newline.
}
}
}
/**
* Redraw the rest of the buffer from the cursor onwards. This is necessary
* for inserting text into the buffer.
*/
private void drawBuffer() throws IOException {
drawBuffer(0);
}
/**
* Clear ahead the specified number of characters without moving the cursor.
*
* @param num the number of characters to clear
* @param delta the difference between the internal cursor and the screen
* cursor - if > 0, assume some stuff was printed and weird wrap has to be
* checked
*/
private void clearAhead(final int num, int delta) throws IOException {
if (num == 0) {
return;
}
if (terminal.isAnsiSupported()) {
int width = terminal.getWidth();
int screenCursorCol = getCursorPosition() + delta;
// clear current line
printAnsiSequence("K");
// if cursor+num wraps, then we need to clear the line(s) below too
int curCol = screenCursorCol % width;
int endCol = (screenCursorCol + num - 1) % width;
int lines = num / width;
if (endCol < curCol) lines++;
for (int i = 0; i < lines; i++) {
printAnsiSequence("B");
printAnsiSequence("2K");
}
for (int i = 0; i < lines; i++) {
printAnsiSequence("A");
}
return;
}
// print blank extra characters
print(' ', num);
// we need to flush here so a "clever" console doesn't just ignore the redundancy
// of a space followed by a backspace.
// flush();
// reset the visual cursor
back(num);
// flush();
}
/**
* Move the visual cursor backwards without modifying the buffer cursor.
*/
protected void back(final int num) throws IOException {
if (num == 0) return;
if (terminal.isAnsiSupported()) {
int width = getTerminal().getWidth();
int cursor = getCursorPosition();
int realCursor = cursor + num;
int realCol = realCursor % width;
int newCol = cursor % width;
int moveup = num / width;
int delta = realCol - newCol;
if (delta < 0) moveup++;
if (moveup > 0) {
printAnsiSequence(moveup + "A");
}
printAnsiSequence((1 + newCol) + "G");
return;
}
print(BACKSPACE, num);
// flush();
}
/**
* Flush the console output stream. This is important for printout out single characters (like a backspace or
* keyboard) that we want the console to handle immediately.
*/
public void flush() throws IOException {
out.flush();
}
private int backspaceAll() throws IOException {
return backspace(Integer.MAX_VALUE);
}
/**
* Issue <em>num</em> backspaces.
*
* @return the number of characters backed up
*/
private int backspace(final int num) throws IOException {
if (buf.cursor == 0) {
return 0;
}
int count = 0;
int termwidth = getTerminal().getWidth();
int lines = getCursorPosition() / termwidth;
count = moveCursor(-1 * num) * -1;
buf.buffer.delete(buf.cursor, buf.cursor + count);
if (getCursorPosition() / termwidth != lines) {
if (terminal.isAnsiSupported()) {
// debug("doing backspace redraw: " + getCursorPosition() + " on " + termwidth + ": " + lines);
printAnsiSequence("K");
// if cursor+num wraps, then we need to clear the line(s) below too
// last char printed is one pos less than cursor so we subtract
// one
/*
// TODO: fixme (does not work - test with reverse search with wrapping line and CTRL-E)
int endCol = (getCursorPosition() + num - 1) % termwidth;
int curCol = getCursorPosition() % termwidth;
if (endCol < curCol) lines++;
for (int i = 1; i < lines; i++) {
printAnsiSequence("B");
printAnsiSequence("2K");
}
for (int i = 1; i < lines; i++) {
printAnsiSequence("A");
}
return count;
*/
}
}
drawBuffer(count);
return count;
}
/**
* Issue a backspace.
*
* @return true if successful
*/
public boolean backspace() throws IOException {
return backspace(1) == 1;
}
protected boolean moveToEnd() throws IOException {
return moveCursor(buf.length() - buf.cursor) > 0;
}
/**
* Delete the character at the current position and redraw the remainder of the buffer.
*/
private boolean deleteCurrentCharacter() throws IOException {
if (buf.length() == 0 || buf.cursor == buf.length()) {
return false;
}
buf.buffer.deleteCharAt(buf.cursor);
drawBuffer(1);
return true;
}
private boolean previousWord() throws IOException {
while (isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
// nothing
}
while (!isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
// nothing
}
return true;
}
private boolean nextWord() throws IOException {
while (isDelimiter(buf.nextChar()) && (moveCursor(1) != 0)) {
// nothing
}
while (!isDelimiter(buf.nextChar()) && (moveCursor(1) != 0)) {
// nothing
}
return true;
}
private boolean deletePreviousWord() throws IOException {
while (isDelimiter(buf.current()) && backspace()) {
// nothing
}
while (!isDelimiter(buf.current()) && backspace()) {
// nothing
}
return true;
}
private boolean deleteNextWord() throws IOException {
while (isDelimiter(buf.nextChar()) && delete()) {
}
while (!isDelimiter(buf.nextChar()) && delete()) {
// nothing
}
return true;
}
private boolean capitalizeWord() throws IOException {
boolean first = true;
int i = 1;
char c;
while (buf.cursor + i - 1< buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, first ? Character.toUpperCase(c) : Character.toLowerCase(c));
first = false;
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
private boolean upCaseWord() throws IOException {
int i = 1;
char c;
while (buf.cursor + i - 1 < buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, Character.toUpperCase(c));
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
private boolean downCaseWord() throws IOException {
int i = 1;
char c;
while (buf.cursor + i - 1 < buf.length() && !isDelimiter((c = buf.buffer.charAt(buf.cursor + i - 1)))) {
buf.buffer.setCharAt(buf.cursor + i - 1, Character.toLowerCase(c));
i++;
}
drawBuffer();
moveCursor(i - 1);
return true;
}
/**
* Performs character transpose. The character prior to the cursor and the
* character under the cursor are swapped and the cursor is advanced one
* character unless you are already at the end of the line.
*
* @return true if the operation succeeded, false otherwise (e.g. transpose
* cannot happen at the beginning of the line).
* @throws IOException
*/
private boolean transposeChars() throws IOException {
if (buf.cursor == 0 || buf.cursor == buf.buffer.length())
{
return false;
}
int first = buf.cursor-1;
int second = buf.cursor;
char tmp = buf.buffer.charAt (first);
buf.buffer.setCharAt(first, buf.buffer.charAt(second));
buf.buffer.setCharAt(second, tmp);
moveInternal(-1);
drawBuffer();
moveInternal(2);
return true;
}
public boolean isKeyMap(String name) {
/*
* Current keymap.
*/
KeyMap map = consoleKeys.getKeys();
KeyMap mapByName = consoleKeys.getKeyMaps().get (name);
if (mapByName == null)
return false;
/*
* This may not be safe to do, but there doesn't appear to be a
* clean way to find this information out.
*/
return map == mapByName;
}
/**
* The equivalent of hitting <RET>. The line is considered
* complete and is returned.
*
* @return The completed line of text.
* @throws IOException
*/
public String accept() throws IOException {
moveToEnd();
println(); // output newline
flush();
String str = finishBuffer();
return str;
}
/**
* Move the cursor <i>where</i> characters.
*
* @param num If less than 0, move abs(<i>where</i>) to the left, otherwise move <i>where</i> to the right.
* @return The number of spaces we moved
*/
public int moveCursor(final int num) throws IOException {
int where = num;
if ((buf.cursor == 0) && (where <= 0)) {
return 0;
}
if ((buf.cursor == buf.buffer.length()) && (where >= 0)) {
return 0;
}
if ((buf.cursor + where) < 0) {
where = -buf.cursor;
}
else if ((buf.cursor + where) > buf.buffer.length()) {
where = buf.buffer.length() - buf.cursor;
}
moveInternal(where);
return where;
}
/**
* Move the cursor <i>where</i> characters, without checking the current buffer.
*
* @param where the number of characters to move to the right or left.
*/
private void moveInternal(final int where) throws IOException {
// debug ("move cursor " + where + " ("
// + buf.cursor + " => " + (buf.cursor + where) + ")");
buf.cursor += where;
if (terminal.isAnsiSupported()) {
if (where < 0) {
back(Math.abs(where));
} else {
int width = getTerminal().getWidth();
int cursor = getCursorPosition();
int oldLine = (cursor - where) / width;
int newLine = cursor / width;
if (newLine > oldLine) {
if (terminal.hasWeirdWrap()) {
// scroll up if at bottom
// note:
// on rxvt cywgin terminal.getHeight() is incorrect
// MacOs xterm does not seem to support scrolling
if (getCurrentAnsiRow() == terminal.getHeight()) {
printAnsiSequence((newLine - oldLine) + "S");
}
}
printAnsiSequence((newLine - oldLine) + "B");
}
printAnsiSequence(1 +(cursor % width) + "G");
}
// flush();
return;
}
char c;
if (where < 0) {
int len = 0;
for (int i = buf.cursor; i < buf.cursor - where; i++) {
if (buf.buffer.charAt(i) == '\t') {
len += TAB_WIDTH;
}
else {
len++;
}
}
char chars[] = new char[len];
Arrays.fill(chars, BACKSPACE);
out.write(chars);
return;
}
else if (buf.cursor == 0) {
return;
}
else if (mask != null) {
c = mask;
}
else {
print(buf.buffer.substring(buf.cursor - where, buf.cursor).toCharArray());
return;
}
// null character mask: don't output anything
if (mask == NULL_MASK) {
return;
}
print(c, Math.abs(where));
}
// FIXME: replace() is not used
public final boolean replace(final int num, final String replacement) {
buf.buffer.replace(buf.cursor - num, buf.cursor, replacement);
try {
moveCursor(-num);
drawBuffer(Math.max(0, num - replacement.length()));
moveCursor(replacement.length());
}
catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
/**
* Read a character from the console.
*
* @return the character, or -1 if an EOF is received.
*/
public final int readCharacter() throws IOException {
int c = reader.read();
if (c >= 0) {
Log.trace("Keystroke: ", c);
// clear any echo characters
clearEcho(c);
}
return c;
}
/**
* Clear the echoed characters for the specified character code.
*/
private int clearEcho(final int c) throws IOException {
// if the terminal is not echoing, then ignore
if (!terminal.isEchoEnabled()) {
return 0;
}
// otherwise, clear
int num = countEchoCharacters(c);
back(num);
drawBuffer(num);
return num;
}
private int countEchoCharacters(final int c) {
// tabs as special: we need to determine the number of spaces
// to cancel based on what out current cursor position is
if (c == 9) {
int tabStop = 8; // will this ever be different?
int position = getCursorPosition();
return tabStop - (position % tabStop);
}
return getPrintableCharacters(c).length();
}
/**
* Return the number of characters that will be printed when the specified
* character is echoed to the screen
*
* Adapted from cat by Torbjorn Granlund, as repeated in stty by David MacKenzie.
*/
private StringBuilder getPrintableCharacters(final int ch) {
StringBuilder sbuff = new StringBuilder();
if (ch >= 32) {
if (ch < 127) {
sbuff.append(ch);
}
else if (ch == 127) {
sbuff.append('^');
sbuff.append('?');
}
else {
sbuff.append('M');
sbuff.append('-');
if (ch >= (128 + 32)) {
if (ch < (128 + 127)) {
sbuff.append((char) (ch - 128));
}
else {
sbuff.append('^');
sbuff.append('?');
}
}
else {
sbuff.append('^');
sbuff.append((char) (ch - 128 + 64));
}
}
}
else {
sbuff.append('^');
sbuff.append((char) (ch + 64));
}
return sbuff;
}
public final int readCharacter(final char... allowed) throws IOException {
// if we restrict to a limited set and the current character is not in the set, then try again.
char c;
Arrays.sort(allowed); // always need to sort before binarySearch
while (Arrays.binarySearch(allowed, c = (char) readCharacter()) < 0) {
// nothing
}
return c;
}
//
// Key Bindings
//
public static final String JLINE_COMPLETION_THRESHOLD = "jline.completion.threshold";
//
// Line Reading
//
/**
* Read the next line and return the contents of the buffer.
*/
public String readLine() throws IOException {
return readLine((String) null);
}
/**
* Read the next line with the specified character mask. If null, then
* characters will be echoed. If 0, then no characters will be echoed.
*/
public String readLine(final Character mask) throws IOException {
return readLine(null, mask);
}
public String readLine(final String prompt) throws IOException {
return readLine(prompt, null);
}
/**
* Sets the current keymap by name. Supported keymaps are "emacs",
* "vi-insert", "vi-move".
* @param name The name of the keymap to switch to
* @return true if the keymap was set, or false if the keymap is
* not recognized.
*/
public boolean setKeyMap(String name) {
KeyMap m = consoleKeys.getKeyMaps().get(name);
if (m == null) {
return false;
}
consoleKeys.setKeys(m);
return true;
}
/**
* Read a line from the <i>in</i> {@link InputStream}, and return the line
* (without any trailing newlines).
*
* @param prompt The prompt to issue to the console, may be null.
* @return A line that is read from the terminal, or null if there was null input (e.g., <i>CTRL-D</i>
* was pressed).
*/
public String readLine(String prompt, final Character mask) throws IOException {
// prompt may be null
// mask may be null
// FIXME: This blows, each call to readLine will reset the console's state which doesn't seem very nice.
this.mask = mask;
if (prompt != null) {
setPrompt(prompt);
}
else {
prompt = getPrompt();
}
try {
if (!terminal.isSupported()) {
beforeReadLine(prompt, mask);
}
if (prompt != null && prompt.length() > 0) {
out.write(prompt);
out.flush();
}
// if the terminal is unsupported, just use plain-java reading
if (!terminal.isSupported()) {
return readLineSimple();
}
String originalPrompt = this.prompt;
final int NORMAL = 1;
final int SEARCH = 2;
int state = NORMAL;
boolean success = true;
StringBuilder sb = new StringBuilder();
List<Character> pushBackChar = new ArrayList<Character>();
while (true) {
int c = pushBackChar.isEmpty() ? readCharacter() : pushBackChar.remove( pushBackChar.size() - 1 );
if (c == -1) {
return null;
}
sb.append( (char) c );
if (recording) {
macro += (char) c;
}
Object o = getKeys().getBound( sb );
if (o == Operation.DO_LOWERCASE_VERSION) {
sb.setLength( sb.length() - 1);
sb.append( Character.toLowerCase( (char) c ));
o = getKeys().getBound( sb );
}
if ( o instanceof KeyMap ) {
continue;
}
while ( o == null && sb.length() > 0 ) {
c = sb.charAt( sb.length() - 1 );
sb.setLength( sb.length() - 1 );
Object o2 = getKeys().getBound( sb );
if ( o2 instanceof KeyMap ) {
o = ((KeyMap) o2).getAnotherKey();
if ( o == null ) {
continue;
} else {
pushBackChar.add( (char) c );
}
}
}
if ( o == null ) {
continue;
}
Log.trace("Binding: ", o);
// Handle macros
if (o instanceof String) {
String macro = (String) o;
for (int i = 0; i < macro.length(); i++) {
pushBackChar.add(macro.charAt(macro.length() - 1 - i));
}
sb.setLength( 0 );
continue;
}
// Handle custom callbacks
if (o instanceof ActionListener) {
((ActionListener) o).actionPerformed(null);
sb.setLength( 0 );
continue;
}
// Search mode.
//
// Note that we have to do this first, because if there is a command
// not linked to a search command, we leave the search mode and fall
// through to the normal state.
if (state == SEARCH) {
int cursorDest = -1;
switch ( ((Operation) o )) {
case ABORT:
state = NORMAL;
break;
case REVERSE_SEARCH_HISTORY:
if (searchTerm.length() == 0) {
searchTerm.append(previousSearchTerm);
}
if (searchIndex == -1) {
searchIndex = searchBackwards(searchTerm.toString());
} else {
searchIndex = searchBackwards(searchTerm.toString(), searchIndex);
}
break;
case BACKWARD_DELETE_CHAR:
if (searchTerm.length() > 0) {
searchTerm.deleteCharAt(searchTerm.length() - 1);
searchIndex = searchBackwards(searchTerm.toString());
}
break;
case SELF_INSERT:
searchTerm.appendCodePoint(c);
searchIndex = searchBackwards(searchTerm.toString());
break;
default:
// Set buffer and cursor position to the found string.
if (searchIndex != -1) {
history.moveTo(searchIndex);
// set cursor position to the found string
cursorDest = history.current().toString().indexOf(searchTerm.toString());
}
state = NORMAL;
break;
}
// if we're still in search mode, print the search status
if (state == SEARCH) {
if (searchTerm.length() == 0) {
printSearchStatus("", "");
searchIndex = -1;
} else {
if (searchIndex == -1) {
beep();
} else {
printSearchStatus(searchTerm.toString(), history.get(searchIndex).toString());
}
}
}
// otherwise, restore the line
else {
restoreLine(originalPrompt, cursorDest);
}
}
if (state == NORMAL) {
if ( o instanceof Operation) {
switch ( ((Operation) o )) {
case COMPLETE: // tab
success = complete();
break;
case POSSIBLE_COMPLETIONS:
printCompletionCandidates();
success = true;
break;
case BEGINNING_OF_LINE:
success = setCursorPosition(0);
break;
case KILL_LINE: // CTRL-K
success = killLine();
break;
case KILL_WHOLE_LINE:
success = setCursorPosition(0) && killLine();
break;
case CLEAR_SCREEN: // CTRL-L
success = clearScreen();
break;
case OVERWRITE_MODE:
buf.setOverTyping(!buf.isOverTyping());
break;
case SELF_INSERT:
putString( sb );
success = true;
break;
case ACCEPT_LINE:
return accept();
/*
* VI_MOVE_ACCEPT_LINE is the result of an ENTER
* while in move mode. This is the same as a normal
* ACCEPT_LINE, except that we need to enter
* insert mode as well.
*/
case VI_MOVE_ACCEPT_LINE:
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
return accept();
case BACKWARD_WORD:
success = previousWord();
break;
case FORWARD_WORD:
success = nextWord();
break;
case PREVIOUS_HISTORY:
success = moveHistory(false);
break;
/*
* According to bash/readline move through history
* in "vi" mode will move the cursor to the
* start of the line. If there is no previous
* history, then the cursor doesn't move.
*/
case VI_PREVIOUS_HISTORY:
success = moveHistory(false)
&& setCursorPosition(0);
break;
case NEXT_HISTORY:
success = moveHistory(true);
break;
/*
* According to bash/readline move through history
* in "vi" mode will move the cursor to the
* start of the line. If there is no next history,
* then the cursor doesn't move.
*/
case VI_NEXT_HISTORY:
success = moveHistory(true)
&& setCursorPosition(0);
break;
case BACKWARD_DELETE_CHAR: // backspace
success = backspace();
break;
case EXIT_OR_DELETE_CHAR:
if (buf.buffer.length() == 0) {
return null;
}
success = deleteCurrentCharacter();
break;
case DELETE_CHAR: // delete
success = deleteCurrentCharacter();
break;
case BACKWARD_CHAR:
success = moveCursor(-1) != 0;
break;
case FORWARD_CHAR:
success = moveCursor(1) != 0;
break;
case UNIX_LINE_DISCARD:
success = resetLine();
break;
case UNIX_WORD_RUBOUT:
case BACKWARD_KILL_WORD:
// in theory, those are slightly different
success = deletePreviousWord();
break;
case KILL_WORD:
success = deleteNextWord();
break;
case BEGINNING_OF_HISTORY:
success = history.moveToFirst();
if (success) {
setBuffer(history.current());
}
break;
case END_OF_HISTORY:
success = history.moveToLast();
if (success) {
setBuffer(history.current());
}
break;
case REVERSE_SEARCH_HISTORY:
if (searchTerm != null) {
previousSearchTerm = searchTerm.toString();
}
searchTerm = new StringBuffer(buf.buffer);
state = SEARCH;
if (searchTerm.length() > 0) {
searchIndex = searchBackwards(searchTerm.toString());
if (searchIndex == -1) {
beep();
}
printSearchStatus(searchTerm.toString(),
searchIndex > -1 ? history.get(searchIndex).toString() : "");
} else {
searchIndex = -1;
printSearchStatus("", "");
}
break;
case CAPITALIZE_WORD:
success = capitalizeWord();
break;
case UPCASE_WORD:
success = upCaseWord();
break;
case DOWNCASE_WORD:
success = downCaseWord();
break;
case END_OF_LINE:
success = moveToEnd();
break;
case TAB_INSERT:
putString( "\t" );
success = true;
break;
case RE_READ_INIT_FILE:
consoleKeys.loadKeys(appName, inputrcUrl);
success = true;
break;
case START_KBD_MACRO:
recording = true;
break;
case END_KBD_MACRO:
recording = false;
macro = macro.substring(0, macro.length() - sb.length());
break;
case CALL_LAST_KBD_MACRO:
for (int i = 0; i < macro.length(); i++) {
pushBackChar.add(macro.charAt(macro.length() - 1 - i));
}
sb.setLength( 0 );
break;
case VI_EDITING_MODE:
consoleKeys.setViEditMode(true);
consoleKeys.setKeys(consoleKeys.getKeyMaps()
.get(KeyMap.VI_INSERT));
break;
case VI_MOVEMENT_MODE:
moveCursor(-1);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_MOVE));
success = true;
break;
case VI_INSERTION_MODE:
consoleKeys.setKeys(consoleKeys.getKeyMaps()
.get(KeyMap.VI_INSERT));
success = true;
break;
case VI_APPEND_MODE:
moveCursor(1);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
success = true;
break;
case VI_APPEND_EOL:
success = moveToEnd();
consoleKeys.setKeys (
consoleKeys.getKeyMaps().get(KeyMap.VI_INSERT));
break;
/*
* Handler for CTRL-D. Attempts to follow readline
* behavior. If the line is empty, then it is an EOF
* otherwise it is as if the user hit enter.
*/
case VI_EOF_MAYBE:
if (buf.buffer.length() == 0) {
return null;
}
return accept();
case TRANSPOSE_CHARS:
success = transposeChars ();
break;
case EMACS_EDITING_MODE:
consoleKeys.setViEditMode(false);
consoleKeys.setKeys(
consoleKeys.getKeyMaps().get(KeyMap.EMACS));
success = true;
break;
default:
int i = 0;
break;
}
}
}
if (!success) {
beep();
}
sb.setLength( 0 );
flush();
}
}
finally {
if (!terminal.isSupported()) {
afterReadLine();
}
}
}
/**
* Read a line for unsupported terminals.
*/
private String readLineSimple() throws IOException {
StringBuilder buff = new StringBuilder();
if (skipLF) {
skipLF = false;
int i = readCharacter();
if (i == -1 || i == '\r') {
return buff.toString();
} else if (i == '\n') {
// ignore
} else {
buff.append((char) i);
}
}
while (true) {
int i = readCharacter();
if (i == -1 || i == '\n') {
return buff.toString();
} else if (i == '\r') {
skipLF = true;
return buff.toString();
} else {
buff.append((char) i);
}
}
}
//
// Completion
//
private final List<Completer> completers = new LinkedList<Completer>();
private CompletionHandler completionHandler = new CandidateListCompletionHandler();
/**
* Add the specified {@link jline.console.completer.Completer} to the list of handlers for tab-completion.
*
* @param completer the {@link jline.console.completer.Completer} to add
* @return true if it was successfully added
*/
public boolean addCompleter(final Completer completer) {
return completers.add(completer);
}
/**
* Remove the specified {@link jline.console.completer.Completer} from the list of handlers for tab-completion.
*
* @param completer The {@link Completer} to remove
* @return True if it was successfully removed
*/
public boolean removeCompleter(final Completer completer) {
return completers.remove(completer);
}
/**
* Returns an unmodifiable list of all the completers.
*/
public Collection<Completer> getCompleters() {
return Collections.unmodifiableList(completers);
}
public void setCompletionHandler(final CompletionHandler handler) {
assert handler != null;
this.completionHandler = handler;
}
public CompletionHandler getCompletionHandler() {
return this.completionHandler;
}
/**
* Use the completers to modify the buffer with the appropriate completions.
*
* @return true if successful
*/
protected boolean complete() throws IOException {
// debug ("tab for (" + buf + ")");
if (completers.size() == 0) {
return false;
}
List<CharSequence> candidates = new LinkedList<CharSequence>();
String bufstr = buf.buffer.toString();
int cursor = buf.cursor;
int position = -1;
for (Completer comp : completers) {
if ((position = comp.complete(bufstr, cursor, candidates)) != -1) {
break;
}
}
return candidates.size() != 0 && getCompletionHandler().complete(this, candidates, position);
}
protected void printCompletionCandidates() throws IOException {
// debug ("tab for (" + buf + ")");
if (completers.size() == 0) {
return;
}
List<CharSequence> candidates = new LinkedList<CharSequence>();
String bufstr = buf.buffer.toString();
int cursor = buf.cursor;
for (Completer comp : completers) {
if (comp.complete(bufstr, cursor, candidates) != -1) {
break;
}
}
CandidateListCompletionHandler.printCandidates(this, candidates);
drawLine();
}
/**
* The number of tab-completion candidates above which a warning will be
* prompted before showing all the candidates.
*/
private int autoprintThreshold = Integer.getInteger(JLINE_COMPLETION_THRESHOLD, 100); // same default as bash
/**
* @param threshold the number of candidates to print without issuing a warning.
*/
public void setAutoprintThreshold(final int threshold) {
this.autoprintThreshold = threshold;
}
/**
* @return the number of candidates to print without issuing a warning.
*/
public int getAutoprintThreshold() {
return autoprintThreshold;
}
private boolean paginationEnabled;
/**
* Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
*/
public void setPaginationEnabled(final boolean enabled) {
this.paginationEnabled = enabled;
}
/**
* Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
*/
public boolean isPaginationEnabled() {
return paginationEnabled;
}
//
// History
//
private History history = new MemoryHistory();
public void setHistory(final History history) {
this.history = history;
}
public History getHistory() {
return history;
}
private boolean historyEnabled = true;
/**
* Whether or not to add new commands to the history buffer.
*/
public void setHistoryEnabled(final boolean enabled) {
this.historyEnabled = enabled;
}
/**
* Whether or not to add new commands to the history buffer.
*/
public boolean isHistoryEnabled() {
return historyEnabled;
}
/**
* Move up or down the history tree.
*/
private boolean moveHistory(final boolean next) throws IOException {
if (next && !history.next()) {
return false;
}
else if (!next && !history.previous()) {
return false;
}
setBuffer(history.current());
return true;
}
//
// Printing
//
public static final String CR = System.getProperty("line.separator");
/**
* Output the specified character to the output stream without manipulating the current buffer.
*/
private void print(final int c) throws IOException {
if (c == '\t') {
char chars[] = new char[TAB_WIDTH];
Arrays.fill(chars, ' ');
out.write(chars);
return;
}
out.write(c);
}
/**
* Output the specified characters to the output stream without manipulating the current buffer.
*/
private void print(final char... buff) throws IOException {
int len = 0;
for (char c : buff) {
if (c == '\t') {
len += TAB_WIDTH;
}
else {
len++;
}
}
char chars[];
if (len == buff.length) {
chars = buff;
}
else {
chars = new char[len];
int pos = 0;
for (char c : buff) {
if (c == '\t') {
Arrays.fill(chars, pos, pos + TAB_WIDTH, ' ');
pos += TAB_WIDTH;
}
else {
chars[pos] = c;
pos++;
}
}
}
out.write(chars);
}
private void print(final char c, final int num) throws IOException {
if (num == 1) {
print(c);
}
else {
char[] chars = new char[num];
Arrays.fill(chars, c);
print(chars);
}
}
/**
* Output the specified string to the output stream (but not the buffer).
*/
public final void print(final CharSequence s) throws IOException {
assert s != null;
print(s.toString().toCharArray());
}
public final void println(final CharSequence s) throws IOException {
assert s != null;
print(s.toString().toCharArray());
println();
}
/**
* Output a platform-dependant newline.
*/
public final void println() throws IOException {
print(CR);
// flush();
}
//
// Actions
//
/**
* Issue a delete.
*
* @return true if successful
*/
public final boolean delete() throws IOException {
return delete(1) == 1;
}
// FIXME: delete(int) only used by above + the return is always 1 and num is ignored
/**
* Issue <em>num</em> deletes.
*
* @return the number of characters backed up
*/
private int delete(final int num) throws IOException {
// TODO: Try to use jansi for this
/* Commented out because of DWA-2949:
if (buf.cursor == 0) {
return 0;
}
*/
buf.buffer.delete(buf.cursor, buf.cursor + 1);
drawBuffer(1);
return 1;
}
/**
* Kill the buffer ahead of the current cursor position.
*
* @return true if successful
*/
public boolean killLine() throws IOException {
int cp = buf.cursor;
int len = buf.buffer.length();
if (cp >= len) {
return false;
}
int num = buf.buffer.length() - cp;
clearAhead(num, 0);
for (int i = 0; i < num; i++) {
buf.buffer.deleteCharAt(len - i - 1);
}
return true;
}
/**
* Clear the screen by issuing the ANSI "clear screen" code.
*/
public boolean clearScreen() throws IOException {
if (!terminal.isAnsiSupported()) {
return false;
}
// send the ANSI code to clear the screen
printAnsiSequence("2J");
// then send the ANSI code to go to position 1,1
printAnsiSequence("1;1H");
redrawLine();
return true;
}
/**
* Issue an audible keyboard bell.
*/
public void beep() throws IOException {
if (!Configuration.getBoolean(JLINE_NOBELL, true)) {
print(KEYBOARD_BELL);
// need to flush so the console actually beeps
flush();
}
}
/**
* Paste the contents of the clipboard into the console buffer
*
* @return true if clipboard contents pasted
*/
public boolean paste() throws IOException {
Clipboard clipboard;
try { // May throw ugly exception on system without X
clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
}
catch (Exception e) {
return false;
}
if (clipboard == null) {
return false;
}
Transferable transferable = clipboard.getContents(null);
if (transferable == null) {
return false;
}
try {
Object content = transferable.getTransferData(DataFlavor.plainTextFlavor);
// This fix was suggested in bug #1060649 at
// http://sourceforge.net/tracker/index.php?func=detail&aid=1060649&group_id=64033&atid=506056
// to get around the deprecated DataFlavor.plainTextFlavor, but it
// raises a UnsupportedFlavorException on Mac OS X
if (content == null) {
try {
content = new DataFlavor().getReaderForText(transferable);
}
catch (Exception e) {
// ignore
}
}
if (content == null) {
return false;
}
String value;
if (content instanceof Reader) {
// TODO: we might want instead connect to the input stream
// so we can interpret individual lines
value = "";
String line;
BufferedReader read = new BufferedReader((Reader) content);
while ((line = read.readLine()) != null) {
if (value.length() > 0) {
value += "\n";
}
value += line;
}
}
else {
value = content.toString();
}
if (value == null) {
return true;
}
putString(value);
return true;
}
catch (UnsupportedFlavorException e) {
Log.error("Paste failed: ", e);
return false;
}
}
//
// Triggered Actions
//
private final Map<Character, ActionListener> triggeredActions = new HashMap<Character, ActionListener>();
/**
* Adding a triggered Action allows to give another curse of action if a character passed the pre-processing.
* <p/>
* Say you want to close the application if the user enter q.
* addTriggerAction('q', new ActionListener(){ System.exit(0); }); would do the trick.
*/
public void addTriggeredAction(final char c, final ActionListener listener) {
triggeredActions.put(c, listener);
}
//
// Formatted Output
//
/**
* Output the specified {@link Collection} in proper columns.
*/
public void printColumns(final Collection<? extends CharSequence> items) throws IOException {
if (items == null || items.isEmpty()) {
return;
}
int width = getTerminal().getWidth();
int height = getTerminal().getHeight();
int maxWidth = 0;
for (CharSequence item : items) {
maxWidth = Math.max(maxWidth, item.length());
}
maxWidth = maxWidth + 3;
Log.debug("Max width: ", maxWidth);
int showLines;
if (isPaginationEnabled()) {
showLines = height - 1; // page limit
}
else {
showLines = Integer.MAX_VALUE;
}
StringBuilder buff = new StringBuilder();
for (CharSequence item : items) {
if ((buff.length() + maxWidth) > width) {
println(buff);
buff.setLength(0);
if (--showLines == 0) {
// Overflow
print(resources.getString("DISPLAY_MORE"));
flush();
int c = readCharacter();
if (c == '\r' || c == '\n') {
// one step forward
showLines = 1;
}
else if (c != 'q') {
// page forward
showLines = height - 1;
}
back(resources.getString("DISPLAY_MORE").length());
if (c == 'q') {
// cancel
break;
}
}
}
// NOTE: toString() is important here due to AnsiString being retarded
buff.append(item.toString());
for (int i = 0; i < (maxWidth - item.length()); i++) {
buff.append(' ');
}
}
if (buff.length() > 0) {
println(buff);
}
}
//
// Non-supported Terminal Support
//
private Thread maskThread;
private void beforeReadLine(final String prompt, final Character mask) {
if (mask != null && maskThread == null) {
final String fullPrompt = "\r" + prompt
+ " "
+ " "
+ " "
+ "\r" + prompt;
maskThread = new Thread()
{
public void run() {
while (!interrupted()) {
try {
Writer out = getOutput();
out.write(fullPrompt);
out.flush();
sleep(3);
}
catch (IOException e) {
return;
}
catch (InterruptedException e) {
return;
}
}
}
};
maskThread.setPriority(Thread.MAX_PRIORITY);
maskThread.setDaemon(true);
maskThread.start();
}
}
private void afterReadLine() {
if (maskThread != null && maskThread.isAlive()) {
maskThread.interrupt();
}
maskThread = null;
}
/**
* Erases the current line with the existing prompt, then redraws the line
* with the provided prompt and buffer
* @param prompt
* the new prompt
* @param buffer
* the buffer to be drawn
* @param cursorDest
* where you want the cursor set when the line has been drawn.
* -1 for end of line.
* */
public void resetPromptLine(String prompt, String buffer, int cursorDest) throws IOException {
// move cursor to end of line
moveToEnd();
// backspace all text, including prompt
buf.buffer.append(this.prompt);
buf.cursor += this.prompt.length();
this.prompt = "";
backspaceAll();
this.prompt = prompt;
redrawLine();
setBuffer(buffer);
// move cursor to destination (-1 will move to end of line)
if (cursorDest < 0) cursorDest = buffer.length();
setCursorPosition(cursorDest);
flush();
}
public void printSearchStatus(String searchTerm, String match) throws IOException {
String prompt = "(reverse-i-search)`" + searchTerm + "': ";
String buffer = match;
int cursorDest = match.indexOf(searchTerm);
resetPromptLine(prompt, buffer, cursorDest);
}
public void restoreLine(String originalPrompt, int cursorDest) throws IOException {
// TODO move cursor to matched string
String prompt = lastLine(originalPrompt);
String buffer = buf.buffer.toString();
resetPromptLine(prompt, buffer, cursorDest);
}
//
// History search
//
/**
* Search backward in history from a given position.
*
* @param searchTerm substring to search for.
* @param startIndex the index from which on to search
* @return index where this substring has been found, or -1 else.
*/
public int searchBackwards(String searchTerm, int startIndex) {
return searchBackwards(searchTerm, startIndex, false);
}
/**
* Search backwards in history from the current position.
*
* @param searchTerm substring to search for.
* @return index where the substring has been found, or -1 else.
*/
public int searchBackwards(String searchTerm) {
return searchBackwards(searchTerm, history.index());
}
public int searchBackwards(String searchTerm, int startIndex, boolean startsWith) {
ListIterator<History.Entry> it = history.entries(startIndex);
while (it.hasPrevious()) {
History.Entry e = it.previous();
if (startsWith) {
if (e.value().toString().startsWith(searchTerm)) {
return e.index();
}
} else {
if (e.value().toString().contains(searchTerm)) {
return e.index();
}
}
}
return -1;
}
//
// Helpers
//
/**
* Checks to see if the specified character is a delimiter. We consider a
* character a delimiter if it is anything but a letter or digit.
*
* @param c The character to test
* @return True if it is a delimiter
*/
private boolean isDelimiter(final char c) {
return !Character.isLetterOrDigit(c);
}
private void printAnsiSequence(String sequence) throws IOException {
print(27);
print('[');
print(sequence);
flush(); // helps with step debugging
}
// return column position, reported by the terminal
private int getCurrentPosition() {
// check for ByteArrayInputStream to disable for unit tests
if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
StringBuffer b = new StringBuffer(8);
// position is sent as <ESC>[{ROW};{COLUMN}R
int r;
while((r = in.read()) > -1 && r != 'R') {
if (r != 27 && r != '[') {
b.append((char) r);
}
}
String[] pos = b.toString().split(";");
return Integer.parseInt(pos[1]);
} catch (Exception x) {
// no luck
}
}
return -1; // TODO: throw exception instead?
}
// return row position, reported by the terminal
// needed to know whether to scroll up on cursor move in last col for weird
// wrapping terminals - not tested for anything else
private int getCurrentAnsiRow() {
// check for ByteArrayInputStream to disable for unit tests
if (terminal.isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
try {
printAnsiSequence("6n");
flush();
StringBuffer b = new StringBuffer(8);
// position is sent as <ESC>[{ROW};{COLUMN}R
int r;
while((r = in.read()) > -1 && r != 'R') {
if (r != 27 && r != '[') {
b.append((char) r);
}
}
String[] pos = b.toString().split(";");
return Integer.parseInt(pos[0]);
} catch (Exception x) {
// no luck
}
}
return -1; // TODO: throw exception instead?
}
}
|
Oops. Minor code formatting change.
It is tough to switch styles throughout the day.
|
src/main/java/jline/console/ConsoleReader.java
|
Oops. Minor code formatting change. It is tough to switch styles throughout the day.
|
|
Java
|
apache-2.0
|
d424cf5ce41b7dbd80385e1669f47b05a4c6d4a6
| 0
|
chunlinyao/fop,chunlinyao/fop,chunlinyao/fop,chunlinyao/fop,chunlinyao/fop,apache/fop,apache/fop,apache/fop,apache/fop,apache/fop
|
src/java/org/apache/fop/render/afp/modca/EndPageGroup.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.render.afp.modca;
import java.io.IOException;
import java.io.OutputStream;
/**
* The End Named Page Group (ENG) structured field terminates a page group that was
* initiated by a Begin Named Page Group structured field.
*
* Note :This object will be used to represent an ENG
* structured field. It is necessary as you can't end
* a PageGroup because you don't know where the group
* will end (as this is controlled by the tags in the FO).
* <p>
*
*/
public class EndPageGroup extends AbstractNamedAFPObject {
/**
* Main constructor
* @param groupId the group id
*/
public EndPageGroup(String groupId) {
super(groupId);
if (log.isDebugEnabled()) {
log.debug("A ENG is being created for group: " + groupId);
}
}
/**
* Accessor method to write the AFP datastream for the End Page Group.
* @param os The stream to write to
* @throws java.io.IOException thrown if an I/O exception of some sort has occurred
*/
public void writeDataStream(OutputStream os)
throws IOException {
byte[] data = new byte[17];
data[0] = 0x5A; // Structured field identifier
data[1] = 0x00; // Length byte 1
data[2] = 0x10; // Length byte 2
data[3] = (byte) 0xD3; // Structured field id byte 1
data[4] = (byte) 0xA9; // Structured field id byte 2
data[5] = (byte) 0xAD; // Structured field id byte 3
data[6] = 0x00; // Flags
data[7] = 0x00; // Reserved
data[8] = 0x00; // Reserved
for (int i = 0; i < nameBytes.length; i++) {
data[9 + i] = nameBytes[i];
}
os.write(data);
}
}
|
Appears to be unused/referenced and superceeded by PageGroup
git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@610021 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/fop/render/afp/modca/EndPageGroup.java
|
Appears to be unused/referenced and superceeded by PageGroup
|
||
Java
|
mit
|
c076bf0abc29d8b7b2c7f8a71e77b6b5d16e0d6e
| 0
|
r1chardj0n3s/pycode-minecraft
|
package net.mechanicalcat.pycode.init;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fml.common.registry.GameRegistry;
public class ModCrafting {
public static void register() {
GameRegistry.addShapedRecipe(
new ItemStack(ModBlocks.python_block),
"CLC",
"LRY",
"CYC",
'C', Blocks.COBBLESTONE,
'L', new ItemStack(Items.DYE, 1, EnumDyeColor.BLUE.getDyeDamage()),
'Y', new ItemStack(Items.DYE, 1, EnumDyeColor.YELLOW.getDyeDamage()),
'R', Items.REDSTONE
);
GameRegistry.addShapedRecipe(
new ItemStack(ModItems.python_wand),
" L",
" RY",
"S ",
'S', Items.STICK,
'L', new ItemStack(Items.DYE, 1, EnumDyeColor.BLUE.getDyeDamage()),
'Y', new ItemStack(Items.DYE, 1, EnumDyeColor.YELLOW.getDyeDamage()),
'R', Items.REDSTONE
);
GameRegistry.addShapedRecipe(
new ItemStack(ModItems.python_hand),
" L ",
"WRY",
" W ",
'W', Blocks.WOOL,
'L', new ItemStack(Items.DYE, 1, EnumDyeColor.BLUE.getDyeDamage()),
'Y', new ItemStack(Items.DYE, 1, EnumDyeColor.YELLOW.getDyeDamage()),
'R', Items.REDSTONE
);
GameRegistry.addShapelessRecipe(
new ItemStack(ModItems.python_book),
ModItems.python_wand,
Items.WRITABLE_BOOK
);
}
}
|
main/java/net/mechanicalcat/pycode/init/ModCrafting.java
|
package net.mechanicalcat.pycode.init;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fml.common.registry.GameRegistry;
public class ModCrafting {
public static void register() {
GameRegistry.addShapedRecipe(
new ItemStack(ModBlocks.python_block),
"CLC",
"LRY",
"CYC",
'C', Blocks.COBBLESTONE,
'L', new ItemStack(Items.DYE, 1, EnumDyeColor.BLUE.getDyeDamage()),
'Y', new ItemStack(Items.DYE, 1, EnumDyeColor.YELLOW.getDyeDamage()),
'R', Items.REDSTONE
);
GameRegistry.addShapedRecipe(
new ItemStack(ModItems.python_wand),
" L",
" RY",
"S ",
'S', Items.STICK,
'L', new ItemStack(Items.DYE, 1, EnumDyeColor.BLUE.getDyeDamage()),
'Y', new ItemStack(Items.DYE, 1, EnumDyeColor.YELLOW.getDyeDamage()),
'R', Items.REDSTONE
);
GameRegistry.addShapelessRecipe(
new ItemStack(ModItems.python_book),
ModItems.python_wand,
Items.WRITABLE_BOOK
);
}
}
|
add hand recipe
|
main/java/net/mechanicalcat/pycode/init/ModCrafting.java
|
add hand recipe
|
|
Java
|
mit
|
7c2153911834710437bb6f1a66b73e615c8c160e
| 0
|
codahale/shore
|
package com.codahale.shore;
import static com.google.common.base.Preconditions.*;
import java.util.Properties;
import java.util.Map.Entry;
import java.util.logging.Logger;
import net.jcip.annotations.Immutable;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Handler;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.FilterHolder;
import org.mortbay.jetty.servlet.ServletHolder;
import com.codahale.shore.modules.HibernateModule;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.wideplay.warp.persist.SessionFilter;
/**
* Starts a new Jetty server and runs a Shore application.
*
* @author coda
*
*/
@Immutable
public class ServerCommand implements Runnable {
private static final int GRACEFUL_SHUTDOWN_PERIOD = 5000; //ms
private static final Logger LOGGER = Logger.getLogger(ServerCommand.class.getCanonicalName());
private final AbstractConfiguration configuration;
private final int port;
private final Properties properties;
/**
* Creates a new {@link ServerCommand}.
*
* @param configuration
* the application's configuration
* @param port
* the port to listen on
* @param properties
* the connection properties
*/
public ServerCommand(AbstractConfiguration configuration, int port, Properties properties) {
this.configuration = checkNotNull(configuration);
this.port = port;
this.properties = checkNotNull(properties);
}
public AbstractConfiguration getConfiguration() {
return configuration;
}
public int getPort() {
return port;
}
public Properties getProperties() {
return properties;
}
@Override
public void run() {
final Server server = new Server();
configuration.configure();
server.addConnector(buildConnector());
server.addHandler(buildContext(buildServletHolder()));
server.setSendServerVersion(false);
server.setGracefulShutdown(GRACEFUL_SHUTDOWN_PERIOD);
server.setStopAtShutdown(true);
configuration.configureServer(server);
try {
server.start();
server.join();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Connector buildConnector() {
final Connector connector = configuration.getConnector();
connector.setPort(port);
return connector;
}
private Context buildContext(ServletHolder servletHolder) {
final Context root = new Context();
root.setContextPath("/");
root.addServlet(servletHolder, "/*");
for (Entry<FilterHolder, String> filter : configuration.getServletFilters().entrySet()) {
root.addFilter(filter.getKey(), filter.getValue(), Handler.DEFAULT);
}
root.addFilter(SessionFilter.class, "/*", Handler.DEFAULT);
configuration.configureContext(root);
return root;
}
private ServletHolder buildServletHolder() {
final ServletHolder servletHolder = new ServletHolder(new GuiceContainer(buildInjector()));
servletHolder.setInitParameter("com.sun.jersey.config.property.packages", getResourcePackages());
LOGGER.info("Configured resource packages: " + configuration.getResourcePackages());
return servletHolder;
}
private String getResourcePackages() {
return Joiner.on(";").join(Iterables.transform(
configuration.getResourcePackages(),
Functions.toStringFunction()
));
}
private Injector buildInjector() {
return Guice.createInjector(
configuration.getStage(),
Iterables.concat(
configuration.getModules(),
ImmutableList.of(buildHibernateModule())
)
);
}
private Module buildHibernateModule() {
return new HibernateModule(LOGGER, properties, configuration.getEntityPackages());
}
}
|
src/main/java/com/codahale/shore/ServerCommand.java
|
package com.codahale.shore;
import static com.google.common.base.Preconditions.*;
import java.util.Properties;
import java.util.Map.Entry;
import java.util.logging.Logger;
import net.jcip.annotations.Immutable;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.FilterHolder;
import org.mortbay.jetty.servlet.ServletHolder;
import com.codahale.shore.modules.HibernateModule;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.wideplay.warp.persist.SessionFilter;
/**
* Starts a new Jetty server and runs a Shore application.
*
* @author coda
*
*/
@Immutable
public class ServerCommand implements Runnable {
private static final int GRACEFUL_SHUTDOWN_PERIOD = 5000; //ms
private static final Logger LOGGER = Logger.getLogger(ServerCommand.class.getCanonicalName());
private final AbstractConfiguration configuration;
private final int port;
private final Properties properties;
/**
* Creates a new {@link ServerCommand}.
*
* @param configuration
* the application's configuration
* @param port
* the port to listen on
* @param properties
* the connection properties
*/
public ServerCommand(AbstractConfiguration configuration, int port, Properties properties) {
this.configuration = checkNotNull(configuration);
this.port = port;
this.properties = checkNotNull(properties);
}
public AbstractConfiguration getConfiguration() {
return configuration;
}
public int getPort() {
return port;
}
public Properties getProperties() {
return properties;
}
@Override
public void run() {
final Server server = new Server();
configuration.configure();
server.addConnector(buildConnector());
server.addHandler(buildContext(buildServletHolder()));
server.setSendServerVersion(false);
server.setGracefulShutdown(GRACEFUL_SHUTDOWN_PERIOD);
server.setStopAtShutdown(true);
configuration.configureServer(server);
try {
server.start();
server.join();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Connector buildConnector() {
final Connector connector = configuration.getConnector();
connector.setPort(port);
return connector;
}
private Context buildContext(ServletHolder servletHolder) {
final Context root = new Context();
root.setContextPath("/");
root.addServlet(servletHolder, "/*");
for (Entry<FilterHolder, String> filter : configuration.getServletFilters().entrySet()) {
root.addFilter(filter.getKey(), filter.getValue(), 0);
}
root.addFilter(SessionFilter.class, "/*", 0);
configuration.configureContext(root);
return root;
}
private ServletHolder buildServletHolder() {
final ServletHolder servletHolder = new ServletHolder(new GuiceContainer(buildInjector()));
servletHolder.setInitParameter("com.sun.jersey.config.property.packages", getResourcePackages());
LOGGER.info("Configured resource packages: " + configuration.getResourcePackages());
return servletHolder;
}
private String getResourcePackages() {
return Joiner.on(";").join(Iterables.transform(
configuration.getResourcePackages(),
Functions.toStringFunction()
));
}
private Injector buildInjector() {
return Guice.createInjector(
configuration.getStage(),
Iterables.concat(
configuration.getModules(),
ImmutableList.of(buildHibernateModule())
)
);
}
private Module buildHibernateModule() {
return new HibernateModule(LOGGER, properties, configuration.getEntityPackages());
}
}
|
Replaced magic numbers with Jetty constants.
|
src/main/java/com/codahale/shore/ServerCommand.java
|
Replaced magic numbers with Jetty constants.
|
|
Java
|
mit
|
4ff13d64d08cc6eafffb82c41081ff139a7e363a
| 0
|
kotmw0701/Splatoon-in-Minecraft-Rewrite
|
package jp.kotmw.splatoon.mainweapons;
import java.util.Random;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import org.bukkit.entity.Snowball;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.scheduler.BukkitRunnable;
import org.bukkit.util.Vector;
import jp.kotmw.splatoon.Main;
import jp.kotmw.splatoon.gamedatas.DataStore;
import jp.kotmw.splatoon.gamedatas.DataStore.WeaponType;
import jp.kotmw.splatoon.gamedatas.PlayerData;
import jp.kotmw.splatoon.gamedatas.WeaponData;
import jp.kotmw.splatoon.maingame.MainGame;
import jp.kotmw.splatoon.mainweapons.threads.ShooterRunnable;
import jp.kotmw.splatoon.manager.Paint;
public class Shooter implements Listener {
@EventHandler
public void onInteract(PlayerInteractEvent e) {
if(!DataStore.hasPlayerData(e.getPlayer().getName()))
return;
if(DataStore.getPlayerData(e.getPlayer().getName()).getArena() == null)
return;
Action action = e.getAction();
if(action == Action.LEFT_CLICK_AIR
|| action == Action.LEFT_CLICK_BLOCK
|| action == Action.PHYSICAL)
return;
Player player = e.getPlayer();
ItemStack item = player.getInventory().getItemInMainHand();
PlayerData data = DataStore.getPlayerData(player.getName());
if(DataStore.getWeapondata(data.getWeapon()).getType() != WeaponType.Shooter)
return;
if(data.isAllCancel()
|| item == null
|| item.getType() != DataStore.getWeapondata(data.getWeapon()).getItemtype()
|| !item.hasItemMeta()
|| item.getItemMeta().getLore().size() < 5
|| !item.getItemMeta().getDisplayName().equalsIgnoreCase(data.getWeapon()))
return;
WeaponData weapondata = DataStore.getWeapondata(data.getWeapon());
if(player.getExp() < weapondata.getCost()) {
MainGame.sendTitle(data, 0, 5, 0, " ", ChatColor.RED+"インクがありません!");
return;
}
int tick = 1;
if(weapondata.getFirespeed() < 5) //右クリックしてる時の連射(可変)
tick=tick+(5-weapondata.getFirespeed());
if(data.getTask() == null) {
BukkitRunnable task = new ShooterRunnable(player.getName());
task.runTaskTimer(Main.main, 0, weapondata.getFirespeed());
data.setTask(task);
}
data.setTick(tick);
}
@EventHandler
public void onHit(ProjectileHitEvent e) {
if(!(e.getEntity() instanceof Snowball)
|| !(e.getEntity().getShooter() instanceof Player))
return;
Player player = (Player) e.getEntity().getShooter();
if(!DataStore.hasPlayerData(player.getName()))
return;
if(DataStore.getPlayerData(player.getName()).getArena() == null)
return;
PlayerData data = DataStore.getPlayerData(player.getName());
if(DataStore.getWeapondata(data.getWeapon()).getType() != WeaponType.Shooter)
return;
Paint.SpherePaint(e.getEntity().getLocation(), DataStore.getWeapondata(data.getWeapon()).getRadius(), data);
}
@EventHandler
public void onDamage(EntityDamageByEntityEvent e) {
if(e.getDamager() instanceof Snowball && DataStore.hasPlayerData(e.getEntity().getName())) {
Snowball ball = (Snowball) e.getDamager();
if(!(ball.getShooter() instanceof Player))
return;
if(!(e.getEntity() instanceof Player))
return;
Player player = (Player) e.getEntity(), shooter = (Player) ball.getShooter();
if(!DataStore.hasPlayerData(shooter.getName())
|| player.getName() == shooter.getName()
|| DataStore.getPlayerData(player.getName()).getTeamid() == DataStore.getPlayerData(shooter.getName()).getTeamid())
return;
WeaponData data = DataStore.getWeapondata(DataStore.getPlayerData(shooter.getName()).getWeapon());
if(data.getType() != WeaponType.Shooter)
return;
e.setDamage(data.getDamage());
}
}
@EventHandler
public void onArmorstanddamage(EntityDamageByEntityEvent e) {
if(e.getEntity().getType() != EntityType.ARMOR_STAND || !(e.getDamager() instanceof Snowball))
return;
Snowball ball = (Snowball) e.getDamager();
if(!(ball.getShooter() instanceof Player) || !DataStore.hasPlayerData(((Player)ball.getShooter()).getName()))
return;
e.setCancelled(true);
}
public static void shoot(PlayerData data) {
Player player = Bukkit.getPlayer(data.getName());
WeaponData weapon = DataStore.getWeapondata(data.getWeapon());
player.setExp((float) (player.getExp()-weapon.getCost()));
Paint.SpherePaint(player.getLocation(), DataStore.getWeapondata(data.getWeapon()).getRadius(), data);
Random random = new Random();
int angle = weapon.getAngle()*100;
double x = Math.toRadians((random.nextInt(angle)/100)-((weapon.getAngle()-1)/2));
double z = Math.toRadians((random.nextInt(angle)/100)-((weapon.getAngle()-1)/2));
Vector direction = player.getLocation().getDirection().clone();
MainGame.sync(() -> {
Snowball snowball = player.launchProjectile(Snowball.class);
Vector vec = new Vector(x,0,z), vec2 = new Vector(direction.getX()*0.75, direction.getY()*0.75, direction.getZ()*0.75);
vec2.add(vec);
snowball.setVelocity(vec2);
});
}
/*
* シューター仕様のまとめ
* ・tick指定で連射速度を指定可能
* ・着弾地点&発射地点の着色の半径も指定可能
*
* [発射の角度の乱数関係のまとめ]
* テストとして12°の角度の範囲内での乱数とする
* 正面が半分になるようにするとなると、12を半分に割った6°:6°で左右に分ける(分けないと片方だけ飛んでいくって感じになる)
* -6~6の範囲内を乱数で取る
* それぞれの角度をそれぞれのベクトルに変換し、XとZのベクトルに乗算(加算?)する
*
*/
}
|
src/main/java/jp/kotmw/splatoon/mainweapons/Shooter.java
|
package jp.kotmw.splatoon.mainweapons;
import java.util.Random;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import org.bukkit.entity.Snowball;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.scheduler.BukkitRunnable;
import org.bukkit.util.Vector;
import jp.kotmw.splatoon.Main;
import jp.kotmw.splatoon.gamedatas.DataStore;
import jp.kotmw.splatoon.gamedatas.DataStore.WeaponType;
import jp.kotmw.splatoon.gamedatas.PlayerData;
import jp.kotmw.splatoon.gamedatas.WeaponData;
import jp.kotmw.splatoon.maingame.MainGame;
import jp.kotmw.splatoon.mainweapons.threads.ShooterRunnable;
import jp.kotmw.splatoon.manager.Paint;
public class Shooter implements Listener {
@EventHandler
public void onInteract(PlayerInteractEvent e) {
if(!DataStore.hasPlayerData(e.getPlayer().getName()))
return;
if(DataStore.getPlayerData(e.getPlayer().getName()).getArena() == null)
return;
Action action = e.getAction();
if(action == Action.LEFT_CLICK_AIR
|| action == Action.LEFT_CLICK_BLOCK
|| action == Action.PHYSICAL)
return;
Player player = e.getPlayer();
ItemStack item = player.getInventory().getItemInMainHand();
PlayerData data = DataStore.getPlayerData(player.getName());
if(DataStore.getWeapondata(data.getWeapon()).getType() != WeaponType.Shooter)
return;
if(data.isAllCancel()
|| item == null
|| item.getType() != DataStore.getWeapondata(data.getWeapon()).getItemtype()
|| !item.hasItemMeta()
|| item.getItemMeta().getLore().size() < 5
|| !item.getItemMeta().getDisplayName().equalsIgnoreCase(data.getWeapon()))
return;
WeaponData weapondata = DataStore.getWeapondata(data.getWeapon());
if(player.getExp() < weapondata.getCost()) {
MainGame.sendTitle(data, 0, 5, 0, " ", ChatColor.RED+"インクがありません!");
return;
}
int tick = 1;
if(weapondata.getFirespeed() < 5)
tick=tick+(5-weapondata.getFirespeed());
if(data.getTask() == null) {
BukkitRunnable task = new ShooterRunnable(player.getName());
task.runTaskTimer(Main.main, 0, weapondata.getFirespeed());
data.setTask(task);
}
data.setTick(tick);
}
@EventHandler
public void onHit(ProjectileHitEvent e) {
if(!(e.getEntity() instanceof Snowball)
|| !(e.getEntity().getShooter() instanceof Player))
return;
Player player = (Player) e.getEntity().getShooter();
if(!DataStore.hasPlayerData(player.getName()))
return;
if(DataStore.getPlayerData(player.getName()).getArena() == null)
return;
PlayerData data = DataStore.getPlayerData(player.getName());
if(DataStore.getWeapondata(data.getWeapon()).getType() != WeaponType.Shooter)
return;
Paint.SpherePaint(e.getEntity().getLocation(), DataStore.getWeapondata(data.getWeapon()).getRadius(), data);
}
@EventHandler
public void onDamage(EntityDamageByEntityEvent e) {
if(e.getDamager() instanceof Snowball && DataStore.hasPlayerData(e.getEntity().getName())) {
Snowball ball = (Snowball) e.getDamager();
if(!(ball.getShooter() instanceof Player))
return;
if(!(e.getEntity() instanceof Player))
return;
Player player = (Player) e.getEntity(), shooter = (Player) ball.getShooter();
if(!DataStore.hasPlayerData(shooter.getName())
|| player.getName() == shooter.getName()
|| DataStore.getPlayerData(player.getName()).getTeamid() == DataStore.getPlayerData(shooter.getName()).getTeamid())
return;
WeaponData data = DataStore.getWeapondata(DataStore.getPlayerData(shooter.getName()).getWeapon());
if(data.getType() != WeaponType.Shooter)
return;
e.setDamage(data.getDamage());
}
}
@EventHandler
public void onArmorstanddamage(EntityDamageByEntityEvent e) {
if(e.getEntity().getType() != EntityType.ARMOR_STAND || !(e.getDamager() instanceof Snowball))
return;
Snowball ball = (Snowball) e.getDamager();
if(!(ball.getShooter() instanceof Player) || !DataStore.hasPlayerData(((Player)ball.getShooter()).getName()))
return;
e.setCancelled(true);
}
public static void shoot(PlayerData data) {
Player player = Bukkit.getPlayer(data.getName());
WeaponData weapon = DataStore.getWeapondata(data.getWeapon());
player.setExp((float) (player.getExp()-weapon.getCost()));
Paint.SpherePaint(player.getLocation(), DataStore.getWeapondata(data.getWeapon()).getRadius(), data);
Random random = new Random();
int angle = weapon.getAngle()*100;
double x = Math.toRadians((random.nextInt(angle)/100)-((weapon.getAngle()-1)/2));
double z = Math.toRadians((random.nextInt(angle)/100)-((weapon.getAngle()-1)/2));
Vector direction = player.getLocation().getDirection().clone();
MainGame.sync(() -> {
Snowball snowball = player.launchProjectile(Snowball.class);
Vector vec = new Vector(x,0,z), vec2 = new Vector(direction.getX()*0.75, direction.getY()*0.75, direction.getZ()*0.75);
vec2.add(vec);
snowball.setVelocity(vec2);
});
}
/*
* シューター仕様のまとめ
* ・tick指定で連射速度を指定可能
* ・着弾地点&発射地点の着色の半径も指定可能
*
* [発射の角度の乱数関係のまとめ]
* テストとして12°の角度の範囲内での乱数とする
* 正面が半分になるようにするとなると、12を半分に割った6°:6°で左右に分ける(分けないと片方だけ飛んでいくって感じになる)
* -6~6の範囲内を乱数で取る
* それぞれの角度をそれぞれのベクトルに変換し、XとZのベクトルに乗算(加算?)する
*
*/
}
|
Update Shooter.java
|
src/main/java/jp/kotmw/splatoon/mainweapons/Shooter.java
|
Update Shooter.java
|
|
Java
|
mit
|
e42d90d17e48f76e35139e4839776fd84b054238
| 0
|
dalifreire/takes,simonjenga/takes,yegor256/takes,simonjenga/takes,xupyprmv/takes,yegor256/takes,xupyprmv/takes,dalifreire/takes
|
/**
* The MIT License (MIT)
*
* Copyright (c) 2014-2016 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.facets.auth.social;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.JsonResponse;
import com.jcabi.http.response.RestResponse;
import com.jcabi.http.response.XmlResponse;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.json.JsonObject;
import lombok.EqualsAndHashCode;
import org.takes.HttpException;
import org.takes.Request;
import org.takes.Response;
import org.takes.facets.auth.Identity;
import org.takes.facets.auth.Pass;
import org.takes.misc.Href;
import org.takes.misc.Opt;
import org.takes.rq.RqHref;
/**
* Github OAuth landing/callback page.
*
* <p>The class is immutable and thread-safe.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 0.1
* @checkstyle MultipleStringLiteralsCheck (500 lines)
*/
@EqualsAndHashCode(of = { "app", "key" })
public final class PsGithub implements Pass {
/**
* Access token.
*/
private static final String ACCESS_TOKEN = "access_token";
/**
* Code.
*/
private static final String CODE = "code";
/**
* Login.
*/
private static final String LOGIN = "login";
/**
* App name.
*/
private final transient String app;
/**
* Key.
*/
private final transient String key;
/**
* GitHub OAuth url.
*/
private final transient String github;
/**
* GitHub API url.
*/
private final transient String api;
/**
* Ctor.
* @param gapp Github app
* @param gkey Github key
*/
public PsGithub(final String gapp, final String gkey) {
this(gapp, gkey, "https://github.com", "https://api.github.com");
}
/**
* Ctor.
* @param gapp Github app
* @param gkey Github key
* @param gurl Github OAuth server
* @param aurl Github API server
* @checkstyle ParameterNumberCheck (2 lines)
*/
PsGithub(final String gapp, final String gkey,
final String gurl, final String aurl) {
this.app = gapp;
this.key = gkey;
this.github = gurl;
this.api = aurl;
}
@Override
public Opt<Identity> enter(final Request request)
throws IOException {
final Href href = new RqHref.Base(request).href();
final Iterator<String> code = href.param(PsGithub.CODE).iterator();
if (!code.hasNext()) {
throw new HttpException(
HttpURLConnection.HTTP_BAD_REQUEST,
"code is not provided by Github"
);
}
return new Opt.Single<Identity>(
this.fetch(this.token(href.toString(), code.next()))
);
}
@Override
public Response exit(final Response response,
final Identity identity) {
return response;
}
/**
* Get user name from Github, with the token provided.
* @param token Github access token
* @return The user found in Github
* @throws IOException If fails
*/
private Identity fetch(final String token) throws IOException {
final String uri = new Href(this.api).path("user")
.with(PsGithub.ACCESS_TOKEN, token).toString();
return PsGithub.parse(
new JdkRequest(uri)
.header("accept", "application/json")
.fetch().as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.as(JsonResponse.class).json().readObject()
);
}
/**
* Retrieve Github access token.
* @param home Home of this page
* @param code Github "authorization code"
* @return The token
* @throws IOException If failed
*/
private String token(final String home, final String code)
throws IOException {
final String uri = new Href(this.github)
.path(PsGithub.LOGIN).path("oauth").path(PsGithub.ACCESS_TOKEN)
.toString();
final List<String> tokens = new JdkRequest(uri)
.method("POST")
.header("Accept", "application/xml")
.body()
.formParam("client_id", this.app)
.formParam("redirect_uri", home)
.formParam("client_secret", this.key)
.formParam(PsGithub.CODE, code)
.back()
.fetch().as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.as(XmlResponse.class)
.xml().xpath("/OAuth/access_token/text()");
if (tokens.isEmpty()) {
throw new HttpException(
HttpURLConnection.HTTP_BAD_REQUEST, "No access token"
);
}
return tokens.get(0);
}
/**
* Make identity from JSON object.
* @param json JSON received from Github
* @return Identity found
*/
private static Identity parse(final JsonObject json) {
final Map<String, String> props =
new HashMap<String, String>(json.size());
// @checkstyle MultipleStringLiteralsCheck (1 line)
props.put(PsGithub.LOGIN, json.getString(PsGithub.LOGIN, "unknown"));
props.put("avatar", json.getString("avatar_url", "#"));
return new Identity.Simple(
String.format("urn:github:%d", json.getInt("id")), props
);
}
}
|
src/main/java/org/takes/facets/auth/social/PsGithub.java
|
/**
* The MIT License (MIT)
*
* Copyright (c) 2014-2016 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.facets.auth.social;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.JsonResponse;
import com.jcabi.http.response.RestResponse;
import com.jcabi.http.response.XmlResponse;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.json.JsonObject;
import lombok.EqualsAndHashCode;
import org.takes.HttpException;
import org.takes.Request;
import org.takes.Response;
import org.takes.facets.auth.Identity;
import org.takes.facets.auth.Pass;
import org.takes.misc.Href;
import org.takes.misc.Opt;
import org.takes.rq.RqHref;
/**
* Github OAuth landing/callback page.
*
* <p>The class is immutable and thread-safe.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 0.1
* @checkstyle MultipleStringLiteralsCheck (500 lines)
*/
@EqualsAndHashCode(of = { "app", "key" })
public final class PsGithub implements Pass {
/**
* App name.
*/
private final transient String app;
/**
* Key.
*/
private final transient String key;
/**
* GitHub OAuth url.
*/
private final transient String github;
/**
* GitHub API url.
*/
private final transient String api;
/**
* Ctor.
* @param gapp Github app
* @param gkey Github key
*/
public PsGithub(final String gapp, final String gkey) {
this(gapp, gkey, "https://github.com", "https://api.github.com");
}
/**
* Ctor.
* @param gapp Github app
* @param gkey Github key
* @param gurl Github OAuth server
* @param aurl Github API server
* @checkstyle ParameterNumberCheck (2 lines)
*/
PsGithub(final String gapp, final String gkey,
final String gurl, final String aurl) {
this.app = gapp;
this.key = gkey;
this.github = gurl;
this.api = aurl;
}
@Override
public Opt<Identity> enter(final Request request)
throws IOException {
final Href href = new RqHref.Base(request).href();
final Iterator<String> code = href.param("code").iterator();
if (!code.hasNext()) {
throw new HttpException(
HttpURLConnection.HTTP_BAD_REQUEST,
"code is not provided by Github"
);
}
return new Opt.Single<Identity>(
this.fetch(this.token(href.toString(), code.next()))
);
}
@Override
public Response exit(final Response response,
final Identity identity) {
return response;
}
/**
* Get user name from Github, with the token provided.
* @param token Github access token
* @return The user found in Github
* @throws IOException If fails
*/
private Identity fetch(final String token) throws IOException {
final String uri = new Href(this.api).path("user")
.with("access_token", token).toString();
return PsGithub.parse(
new JdkRequest(uri)
.header("accept", "application/json")
.fetch().as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.as(JsonResponse.class).json().readObject()
);
}
/**
* Retrieve Github access token.
* @param home Home of this page
* @param code Github "authorization code"
* @return The token
* @throws IOException If failed
*/
private String token(final String home, final String code)
throws IOException {
final String uri = new Href(this.github)
.path("login").path("oauth").path("access_token")
.toString();
final List<String> tokens = new JdkRequest(uri)
.method("POST")
.header("Accept", "application/xml")
.body()
.formParam("client_id", this.app)
.formParam("redirect_uri", home)
.formParam("client_secret", this.key)
.formParam("code", code)
.back()
.fetch().as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.as(XmlResponse.class)
.xml().xpath("/OAuth/access_token/text()");
if (tokens.isEmpty()) {
throw new HttpException(
HttpURLConnection.HTTP_BAD_REQUEST, "No access token"
);
}
return tokens.get(0);
}
/**
* Make identity from JSON object.
* @param json JSON received from Github
* @return Identity found
*/
private static Identity parse(final JsonObject json) {
final Map<String, String> props =
new HashMap<String, String>(json.size());
// @checkstyle MultipleStringLiteralsCheck (1 line)
props.put("login", json.getString("login", "unknown"));
props.put("avatar", json.getString("avatar_url", "#"));
return new Identity.Simple(
String.format("urn:github:%d", json.getInt("id")), props
);
}
}
|
Fixing AvoidDuplicateLiterals for PsGithub class.
|
src/main/java/org/takes/facets/auth/social/PsGithub.java
|
Fixing AvoidDuplicateLiterals for PsGithub class.
|
|
Java
|
mit
|
e557e277cb55f7c9741451b8b05f5fdf3b870510
| 0
|
OpenAMEE/amee.platform.api
|
package com.amee.restlet.resource;
import com.amee.base.resource.ValidationResult;
import com.amee.restlet.AMEESpringServer;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.json.JSONException;
import org.json.JSONObject;
import org.restlet.data.MediaType;
import org.restlet.data.Preference;
import org.restlet.data.Request;
import org.restlet.data.Response;
import org.restlet.data.Status;
import org.restlet.ext.json.JsonRepresentation;
import org.restlet.resource.DomRepresentation;
import org.restlet.resource.Representation;
public class ResourceManager {
private final Log log = LogFactory.getLog(getClass());
private GenericResource resource;
public void init(GenericResource resource) {
this.resource = resource;
}
/**
* This is how we tell if the request came via HTTPS as SSL is terminated at the load balancer.
*
* @return true if the current request has come through the secure connector
*/
protected boolean isSecure() {
return getActiveServer().isSecure();
}
protected boolean isOk(JSONObject result) {
return isStatus(result, "OK");
}
protected boolean isNotFound(JSONObject result) {
return isStatus(result, "NOT_FOUND");
}
protected boolean isNotAuthenticated(JSONObject result) {
return isStatus(result, "NOT_AUTHENTICATED");
}
protected boolean isNotAuthorized(JSONObject result) {
return isStatus(result, "NOT_AUTHORIZED");
}
protected boolean isInternalError(JSONObject result) {
return isStatus(result, "INTERNAL_ERROR");
}
protected boolean isInvalid(JSONObject result) {
return isStatus(result, "INVALID");
}
protected boolean isTimedOut(JSONObject result) {
return isStatus(result, "TIMED_OUT");
}
protected boolean isMediaTypeNotSupported(JSONObject result) {
return isStatus(result, "MEDIA_TYPE_NOT_SUPPORTED");
}
protected boolean isStatus(JSONObject result, String status) {
try {
return (result != null) && result.has("status") && result.getString("status").equals(status);
} catch (JSONException e) {
// Swallow.
return false;
}
}
protected Map<String, String> getAttributes() {
Map<String, String> attributes = new HashMap<String, String>();
for (String attributeName : getAttributeNames()) {
if (getRequest().getAttributes().containsKey(attributeName)) {
Object a = getRequest().getAttributes().get(attributeName);
if (a instanceof String) {
// This removes any matrix parameters.
String value = ((String) a).split(";")[0];
try {
// URLDecoder decodes application/x-www-form-urlencoded Strings, which should only appear in the body of a POST.
// It decodes "+" symbols to spaces, which breaks ISO time formats that include a "+", so we manually encode them
// here and immediately decode them again in order to preserve them.
value = URLDecoder.decode(value.replace("+", "%2B"), "UTF-8").replace("%2B", "+");
} catch (UnsupportedEncodingException e) {
log.warn("getAttributes() Caught UnsupportedEncodingException: " + e.getMessage());
}
attributes.put(attributeName, value);
} else {
log.warn("getAttributes() Attribute value is not a String: " + attributeName);
}
} else {
log.warn("getAttributes() Attribute value not found: " + attributeName);
}
}
return attributes;
}
protected Map<String, String> getMatrixParameters() {
return getRequest().getResourceRef().getMatrixAsForm().getValuesMap();
}
protected Map<String, String> getQueryParameters() {
/*
* The query parameters could be retrieved by calling:
*
* getRequest().getResourceRef().getQueryAsForm().getValuesMap();
*
* The problem with that is that the Reference.getQueryAsForm() method calls a Form constructor which decodes the query string with
* URLDecoder.decode, which is appropriate only for application/x-www-form-urlencoded strings in POST bodies. It decodes "+" symbols
* to spaces, which breaks ISO time formats that include a "+", so we manually encode them here before passing them to the Form
* constructor, and immediately decode them again in order to preserve them.
*/
// Get query string
org.restlet.data.Reference ref = getRequest().getResourceRef();
String query = ref.getQuery(false);
if (query != null) {
// Encode + symbols
org.restlet.data.Form form = new org.restlet.data.Form(query.replace("+", "%2B"));
Map<String, String> params = form.getValuesMap();
// Decode + symbols again
for (String param : params.keySet()) {
params.put(param, params.get(param).replace("%2B", "+"));
}
return params;
} else {
return new HashMap<String, String>();
}
}
protected List<String> getAcceptedMediaTypes() {
List<String> acceptedMediaTypes = new ArrayList<String>();
for (Preference<MediaType> p : getRequest().getClientInfo().getAcceptedMediaTypes()) {
acceptedMediaTypes.add(p.getMetadata().toString());
}
return acceptedMediaTypes;
}
public GenericResource getResource() {
return resource;
}
public Request getRequest() {
return resource.getRequest();
}
public Response getResponse() {
return resource.getResponse();
}
public Set<String> getAttributeNames() {
return resource.getAttributeNames();
}
public void setAttributeNames(Set<String> attributeNames) {
resource.setAttributeNames(attributeNames);
}
public AMEESpringServer getActiveServer() {
return (AMEESpringServer) getRequest().getAttributes().get("activeServer");
}
protected Representation getJsonRepresentation(JSONObject result) {
Representation representation = null;
try {
if (result != null) {
// Add version.
result.put("version", getResource().getSupportedVersion().toString());
// Handle validationResult.
if (result.has("validationResult")) {
getResource().addValidationResult(new ValidationResult(result.getJSONObject("validationResult")));
}
// Handle status.
if (result.has("status")) {
if (isOk(result)) {
representation = new JsonRepresentation(result);
} else if (isInvalid(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
representation = new JsonRepresentation(result);
} else if (isNotFound(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
} else if (isNotAuthenticated(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_UNAUTHORIZED);
} else if (isNotAuthorized(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN);
} else if (isTimedOut(result)) {
getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE);
} else if (isMediaTypeNotSupported(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE);
} else if (isInternalError(result)) {
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
} else {
log.warn("getJsonRepresentation() Status code not handled: " + result.getString("status"));
}
}
}
} catch (JSONException e) {
throw new RuntimeException("Caught JSONException: " + e.getMessage(), e);
}
return representation;
}
protected Representation getDomRepresentation(Document document) {
Representation representation = null;
if (document != null) {
Element result = document.getRootElement();
if ((result != null) && result.getName().equals("Representation")) {
// Add version.
result.addContent(new Element("Version").setText(getResource().getSupportedVersion().toString()));
// Handle ValidationResult.
if (result.getChild("ValidationResult") != null) {
getResource().addValidationResult(new ValidationResult(result.getChild("ValidationResult")));
}
// Handle status.
if (result.getChild("Status") != null) {
String status = result.getChild("Status").getValue();
try {
if (status.equals("OK")) {
representation = new DomRepresentation(MediaType.APPLICATION_XML, ResourceBuildManager.DOM_OUTPUTTER.output(document));
} else if (status.equals("INVALID")) {
getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
representation = new DomRepresentation(MediaType.APPLICATION_XML, ResourceBuildManager.DOM_OUTPUTTER.output(document));
} else if (status.equals("NOT_FOUND")) {
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
} else if (status.equals("NOT_AUTHENTICATED")) {
getResponse().setStatus(Status.CLIENT_ERROR_UNAUTHORIZED);
} else if (status.equals("NOT_AUTHORIZED")) {
getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN);
} else if (status.equals("TIMED_OUT")) {
getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE);
} else if (status.equals("MEDIA_TYPE_NOT_SUPPORTED")) {
getResponse().setStatus(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE);
} else if (status.equals("INTERNAL_ERROR")) {
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
} else {
log.warn("getDomRepresentation() Status code not handled: " + status);
}
} catch (JDOMException e) {
throw new RuntimeException("Caught JDOMException: " + e.getMessage(), e);
}
}
} else if ((result != null) && result.getName().equals("ecoSpold")) {
try {
representation = new DomRepresentation(MediaType.valueOf("application/x.ecospold+xml"), ResourceBuildManager.DOM_OUTPUTTER.output(document));
} catch (JDOMException e) {
throw new RuntimeException("Caught JDOMException: " + e.getMessage(), e);
}
}
}
return representation;
}
}
|
src/main/java/com/amee/restlet/resource/ResourceManager.java
|
package com.amee.restlet.resource;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.json.JSONException;
import org.json.JSONObject;
import org.restlet.data.MediaType;
import org.restlet.data.Preference;
import org.restlet.data.Request;
import org.restlet.data.Response;
import org.restlet.data.Status;
import org.restlet.ext.json.JsonRepresentation;
import org.restlet.resource.DomRepresentation;
import org.restlet.resource.Representation;
import com.amee.base.resource.ValidationResult;
import com.amee.restlet.AMEESpringServer;
public class ResourceManager {
private final Log log = LogFactory.getLog(getClass());
private GenericResource resource;
public void init(GenericResource resource) {
this.resource = resource;
}
/**
* This is how we tell if the request came via HTTPS as SSL is terminated at the load balancer.
*
* @return true if the current request has come through the secure connector
*/
protected boolean isSecure() {
return getActiveServer().isSecure();
}
protected boolean isOk(JSONObject result) {
return isStatus(result, "OK");
}
protected boolean isNotFound(JSONObject result) {
return isStatus(result, "NOT_FOUND");
}
protected boolean isNotAuthenticated(JSONObject result) {
return isStatus(result, "NOT_AUTHENTICATED");
}
protected boolean isNotAuthorized(JSONObject result) {
return isStatus(result, "NOT_AUTHORIZED");
}
protected boolean isInternalError(JSONObject result) {
return isStatus(result, "INTERNAL_ERROR");
}
protected boolean isInvalid(JSONObject result) {
return isStatus(result, "INVALID");
}
protected boolean isTimedOut(JSONObject result) {
return isStatus(result, "TIMED_OUT");
}
protected boolean isMediaTypeNotSupported(JSONObject result) {
return isStatus(result, "MEDIA_TYPE_NOT_SUPPORTED");
}
protected boolean isStatus(JSONObject result, String status) {
try {
return (result != null) && result.has("status") && result.getString("status").equals(status);
} catch (JSONException e) {
// Swallow.
return false;
}
}
protected Map<String, String> getAttributes() {
Map<String, String> attributes = new HashMap<String, String>();
for (String attributeName : getAttributeNames()) {
if (getRequest().getAttributes().containsKey(attributeName)) {
Object a = getRequest().getAttributes().get(attributeName);
if (a instanceof String) {
// This removes any matrix parameters.
String value = ((String) a).split(";")[0];
try {
// URLDecoder decodes application/x-www-form-urlencoded Strings, which should only appear in the body of a POST.
// It decodes "+" symbols to spaces, which breaks ISO time formats that include a "+", so we manually encode them
// here and immediately decode them again in order to preserve them.
value = URLDecoder.decode(value.replace("+", "%2B"), "UTF-8").replace("%2B", "+");
} catch (UnsupportedEncodingException e) {
log.warn("getAttributes() Caught UnsupportedEncodingException: " + e.getMessage());
}
attributes.put(attributeName, value);
} else {
log.warn("getAttributes() Attribute value is not a String: " + attributeName);
}
} else {
log.warn("getAttributes() Attribute value not found: " + attributeName);
}
}
return attributes;
}
protected Map<String, String> getMatrixParameters() {
return getRequest().getResourceRef().getMatrixAsForm().getValuesMap();
}
protected Map<String, String> getQueryParameters() {
/*
* The query parameters could be retrieved by calling:
*
* getRequest().getResourceRef().getQueryAsForm().getValuesMap();
*
* The problem with that is that the Reference.getQueryAsForm() method calls a Form constructor which decodes the query string with
* URLDecoder.decode, which is appropriate only for application/x-www-form-urlencoded strings in POST bodies. It decodes "+" symbols
* to spaces, which breaks ISO time formats that include a "+", so we manually encode them here before passing them to the Form
* constructor, and immediately decode them again in order to preserve them.
*/
// Get query string
org.restlet.data.Reference ref = getRequest().getResourceRef();
String query = ref.getQuery(false);
// Encode + symbols
org.restlet.data.Form form = new org.restlet.data.Form(query.replace("+", "%2B"));
Map<String, String> params = form.getValuesMap();
// Decode + symbols again
for(String param : params.keySet()){
params.put(param, params.get(param).replace("%2B", "+"));
}
return params;
}
protected List<String> getAcceptedMediaTypes() {
List<String> acceptedMediaTypes = new ArrayList<String>();
for (Preference<MediaType> p : getRequest().getClientInfo().getAcceptedMediaTypes()) {
acceptedMediaTypes.add(p.getMetadata().toString());
}
return acceptedMediaTypes;
}
public GenericResource getResource() {
return resource;
}
public Request getRequest() {
return resource.getRequest();
}
public Response getResponse() {
return resource.getResponse();
}
public Set<String> getAttributeNames() {
return resource.getAttributeNames();
}
public void setAttributeNames(Set<String> attributeNames) {
resource.setAttributeNames(attributeNames);
}
public AMEESpringServer getActiveServer() {
return (AMEESpringServer) getRequest().getAttributes().get("activeServer");
}
protected Representation getJsonRepresentation(JSONObject result) {
Representation representation = null;
try {
if (result != null) {
// Add version.
result.put("version", getResource().getSupportedVersion().toString());
// Handle validationResult.
if (result.has("validationResult")) {
getResource().addValidationResult(new ValidationResult(result.getJSONObject("validationResult")));
}
// Handle status.
if (result.has("status")) {
if (isOk(result)) {
representation = new JsonRepresentation(result);
} else if (isInvalid(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
representation = new JsonRepresentation(result);
} else if (isNotFound(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
} else if (isNotAuthenticated(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_UNAUTHORIZED);
} else if (isNotAuthorized(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN);
} else if (isTimedOut(result)) {
getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE);
} else if (isMediaTypeNotSupported(result)) {
getResponse().setStatus(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE);
} else if (isInternalError(result)) {
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
} else {
log.warn("getJsonRepresentation() Status code not handled: " + result.getString("status"));
}
}
}
} catch (JSONException e) {
throw new RuntimeException("Caught JSONException: " + e.getMessage(), e);
}
return representation;
}
protected Representation getDomRepresentation(Document document) {
Representation representation = null;
if (document != null) {
Element result = document.getRootElement();
if ((result != null) && result.getName().equals("Representation")) {
// Add version.
result.addContent(new Element("Version").setText(getResource().getSupportedVersion().toString()));
// Handle ValidationResult.
if (result.getChild("ValidationResult") != null) {
getResource().addValidationResult(new ValidationResult(result.getChild("ValidationResult")));
}
// Handle status.
if (result.getChild("Status") != null) {
String status = result.getChild("Status").getValue();
try {
if (status.equals("OK")) {
representation = new DomRepresentation(MediaType.APPLICATION_XML, ResourceBuildManager.DOM_OUTPUTTER.output(document));
} else if (status.equals("INVALID")) {
getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
representation = new DomRepresentation(MediaType.APPLICATION_XML, ResourceBuildManager.DOM_OUTPUTTER.output(document));
} else if (status.equals("NOT_FOUND")) {
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
} else if (status.equals("NOT_AUTHENTICATED")) {
getResponse().setStatus(Status.CLIENT_ERROR_UNAUTHORIZED);
} else if (status.equals("NOT_AUTHORIZED")) {
getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN);
} else if (status.equals("TIMED_OUT")) {
getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE);
} else if (status.equals("MEDIA_TYPE_NOT_SUPPORTED")) {
getResponse().setStatus(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE);
} else if (status.equals("INTERNAL_ERROR")) {
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
} else {
log.warn("getDomRepresentation() Status code not handled: " + status);
}
} catch (JDOMException e) {
throw new RuntimeException("Caught JDOMException: " + e.getMessage(), e);
}
}
} else if ((result != null) && result.getName().equals("ecoSpold")) {
try {
representation = new DomRepresentation(MediaType.valueOf("application/x.ecospold+xml"), ResourceBuildManager.DOM_OUTPUTTER.output(document));
} catch (JDOMException e) {
throw new RuntimeException("Caught JDOMException: " + e.getMessage(), e);
}
}
}
return representation;
}
}
|
Fix NPE when decoding query strings PL-11376
|
src/main/java/com/amee/restlet/resource/ResourceManager.java
|
Fix NPE when decoding query strings PL-11376
|
|
Java
|
mit
|
8b35def1757017a6cc88b7a2d19865e55b819838
| 0
|
cyrille-leclerc/ec2-plugin,databricks/ec2-plugin,jenkinsci/ec2-plugin,vassilevsky/ec2-plugin,mtolan/ec2-plugin,fengxx/ec2-plugin,hudson3-plugins/ec2-plugin,fengxx/ec2-plugin,Khan/ec2-plugin,jenkinsci/ec2-plugin,barretts/ec2-plugin,ydubreuil/ec2-plugin,hudson3-plugins/ec2-plugin,kunickiaj/ec2-plugin,hudson3-plugins/ec2-plugin,kunickiaj/ec2-plugin,mtolan/ec2-plugin,arcivanov/ec2-plugin,barretts/ec2-plugin,vassilevsky/ec2-plugin,Vlatombe/ec2-plugin,ikikko/ec2-plugin,mkozell/ec2-plugin,barretts/ec2-plugin,Khan/ec2-plugin,Vlatombe/ec2-plugin,ydubreuil/ec2-plugin,mkozell/ec2-plugin,ydubreuil/ec2-plugin,fengxx/ec2-plugin,arcivanov/ec2-plugin,kunickiaj/ec2-plugin,Vlatombe/ec2-plugin,Khan/ec2-plugin,databricks/ec2-plugin,ikikko/ec2-plugin,mkozell/ec2-plugin,jenkinsci/ec2-plugin,mkozell/ec2-plugin,arcivanov/ec2-plugin,ydubreuil/ec2-plugin,cyrille-leclerc/ec2-plugin,arcivanov/ec2-plugin,databricks/ec2-plugin,mtolan/ec2-plugin,Khan/ec2-plugin,vassilevsky/ec2-plugin,jenkinsci/ec2-plugin,ikikko/ec2-plugin,Vlatombe/ec2-plugin,ikikko/ec2-plugin,mtolan/ec2-plugin,vassilevsky/ec2-plugin,cyrille-leclerc/ec2-plugin,hudson3-plugins/ec2-plugin,barretts/ec2-plugin,kunickiaj/ec2-plugin,fengxx/ec2-plugin,cyrille-leclerc/ec2-plugin,databricks/ec2-plugin
|
/*
* The MIT License
*
* Copyright (c) 2004-, Kohsuke Kawaguchi, Sun Microsystems, Inc., and a number of other of contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.ec2;
import com.amazonaws.AmazonServiceException;
import hudson.Extension;
import hudson.Util;
import hudson.model.Describable;
import hudson.model.TaskListener;
import hudson.model.Descriptor;
import hudson.model.Descriptor.FormException;
import hudson.model.Hudson;
import hudson.model.Label;
import hudson.model.Node;
import hudson.model.labels.LabelAtom;
import hudson.plugins.ec2.util.DeviceMappingParser;
import hudson.util.FormValidation;
import hudson.util.ListBoxModel;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.util.*;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import jenkins.slaves.iterators.api.NodeIterator;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import com.amazonaws.AmazonClientException;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.*;
/**
* Template of {@link EC2AbstractSlave} to launch.
*
* @author Kohsuke Kawaguchi
*/
public class SlaveTemplate implements Describable<SlaveTemplate> {
public final String ami;
public final String description;
public final String zone;
public final SpotConfiguration spotConfig;
public final String securityGroups;
public final String remoteFS;
public final InstanceType type;
public final String labels;
public final Node.Mode mode;
public final String initScript;
public final String tmpDir;
public final String userData;
public final String numExecutors;
public final String remoteAdmin;
public final String jvmopts;
public final String subnetId;
public final String idleTerminationMinutes;
public final String iamInstanceProfile;
public final boolean useEphemeralDevices;
public final String customDeviceMapping;
public int instanceCap;
public final boolean stopOnTerminate;
private final List<EC2Tag> tags;
public final boolean usePrivateDnsName;
public final boolean associatePublicIp;
protected transient EC2Cloud parent;
public final boolean useDedicatedTenancy;
public AMITypeData amiType;
public int launchTimeout;
private transient /*almost final*/ Set<LabelAtom> labelSet;
private transient /*almost final*/ Set<String> securityGroupSet;
/*
* Necessary to handle reading from old configurations. The UnixData object is
* created in readResolve()
*/
@Deprecated
public transient String sshPort;
@Deprecated
public transient String rootCommandPrefix;
@DataBoundConstructor
public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping) {
this.ami = ami;
this.zone = zone;
this.spotConfig = spotConfig;
this.securityGroups = securityGroups;
this.remoteFS = remoteFS;
this.amiType = amiType;
this.type = type;
this.labels = Util.fixNull(labelString);
this.mode = mode;
this.description = description;
this.initScript = initScript;
this.tmpDir = tmpDir;
this.userData = userData;
this.numExecutors = Util.fixNull(numExecutors).trim();
this.remoteAdmin = remoteAdmin;
this.jvmopts = jvmopts;
this.stopOnTerminate = stopOnTerminate;
this.subnetId = subnetId;
this.tags = tags;
this.idleTerminationMinutes = idleTerminationMinutes;
this.usePrivateDnsName = usePrivateDnsName;
this.associatePublicIp = associatePublicIp;
this.useDedicatedTenancy = useDedicatedTenancy;
if (null == instanceCapStr || instanceCapStr.equals("")) {
this.instanceCap = Integer.MAX_VALUE;
} else {
this.instanceCap = Integer.parseInt(instanceCapStr);
}
try {
this.launchTimeout = Integer.parseInt(launchTimeoutStr);
} catch (NumberFormatException nfe ) {
this.launchTimeout = Integer.MAX_VALUE;
}
this.iamInstanceProfile = iamInstanceProfile;
this.useEphemeralDevices = useEphemeralDevices;
this.customDeviceMapping = customDeviceMapping;
readResolve(); // initialize
}
/**
* Backward compatible constructor for reloading previous version data
*/
public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, String sshPort, InstanceType type, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, String rootCommandPrefix, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, String launchTimeoutStr)
{
this(ami, zone, spotConfig, securityGroups, remoteFS, type, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, new UnixData(rootCommandPrefix, sshPort), jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, false, launchTimeoutStr, false, null);
}
public EC2Cloud getParent() {
return parent;
}
public String getBidType(){
if(spotConfig == null)
return null;
return spotConfig.spotInstanceBidType;
}
public String getLabelString() {
return labels;
}
public Node.Mode getMode() {
return mode;
}
public String getDisplayName() {
return description+" ("+ami+")";
}
String getZone() {
return zone;
}
public String getSecurityGroupString() {
return securityGroups;
}
public Set<String> getSecurityGroupSet() {
return securityGroupSet;
}
public Set<String> parseSecurityGroups() {
if (securityGroups == null || "".equals(securityGroups.trim())) {
return Collections.emptySet();
} else {
return new HashSet<String>(Arrays.asList(securityGroups.split("\\s*,\\s*")));
}
}
public int getNumExecutors() {
try {
return Integer.parseInt(numExecutors);
} catch (NumberFormatException e) {
return EC2AbstractSlave.toNumExecutors(type);
}
}
public int getSshPort() {
try {
String sshPort = "";
if (amiType.isUnix()) {
sshPort = ((UnixData)amiType).getSshPort();
}
return Integer.parseInt(sshPort);
} catch (NumberFormatException e) {
return 22;
}
}
public String getRemoteAdmin() {
return remoteAdmin;
}
public String getRootCommandPrefix() {
return amiType.isUnix() ? ((UnixData)amiType).getRootCommandPrefix() : "";
}
public String getSubnetId() {
return subnetId;
}
public boolean getAssociatePublicIp() {
return associatePublicIp;
}
public List<EC2Tag> getTags() {
if (null == tags) return null;
return Collections.unmodifiableList(tags);
}
public String getidleTerminationMinutes() {
return idleTerminationMinutes;
}
public boolean getUseDedicatedTenancy() {
return useDedicatedTenancy;
}
public Set<LabelAtom> getLabelSet(){
return labelSet;
}
public int getInstanceCap() {
return instanceCap;
}
public String getInstanceCapStr() {
if (instanceCap==Integer.MAX_VALUE) {
return "";
} else {
return String.valueOf(instanceCap);
}
}
public String getSpotMaxBidPrice(){
if (spotConfig == null)
return null;
return SpotConfiguration.normalizeBid(spotConfig.spotMaxBidPrice);
}
public String getIamInstanceProfile() {
return iamInstanceProfile;
}
/**
* Provisions a new EC2 slave or starts a previously stopped on-demand instance.
*
* @return always non-null. This needs to be then added to {@link Hudson#addNode(Node)}.
*/
public EC2AbstractSlave provision(TaskListener listener) throws AmazonClientException, IOException {
if (this.spotConfig != null){
return provisionSpot(listener);
}
return provisionOndemand(listener);
}
/**
* Provisions an On-demand EC2 slave by launching a new instance or
* starting a previously-stopped instance.
*/
private EC2AbstractSlave provisionOndemand(TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try {
logger.println("Launching " + ami + " for template " + description);
LOGGER.info("Launching " + ami + " for template " + description);
KeyPair keyPair = getKeyPair(ec2);
RunInstancesRequest riRequest = new RunInstancesRequest(ami, 1, 1);
InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification();
if (useEphemeralDevices) {
setupEphemeralDeviceMapping(riRequest);
}
else {
setupCustomDeviceMapping(riRequest);
}
List<Filter> diFilters = new ArrayList<Filter>();
diFilters.add(new Filter("image-id").withValues(ami));
if (StringUtils.isNotBlank(getZone())) {
Placement placement = new Placement(getZone());
if (getUseDedicatedTenancy()) {
placement.setTenancy("dedicated");
}
riRequest.setPlacement(placement);
diFilters.add(new Filter("availability-zone").withValues(getZone()));
}
if (StringUtils.isNotBlank(getSubnetId())) {
if (getAssociatePublicIp()) {
net.setSubnetId(getSubnetId());
}else{
riRequest.setSubnetId(getSubnetId());
}
diFilters.add(new Filter("subnet-id").withValues(getSubnetId()));
/* If we have a subnet ID then we can only use VPC security groups */
if (!securityGroupSet.isEmpty()) {
List<String> group_ids = getEc2SecurityGroups(ec2);
if (!group_ids.isEmpty()) {
if (getAssociatePublicIp()) {
net.setGroups(group_ids);
}else{
riRequest.setSecurityGroupIds(group_ids);
}
diFilters.add(new Filter("instance.group-id").withValues(group_ids));
}
}
} else {
/* No subnet: we can use standard security groups by name */
riRequest.setSecurityGroups(securityGroupSet);
if (securityGroupSet.size() > 0)
diFilters.add(new Filter("instance.group-name").withValues(securityGroupSet));
}
String userDataString = Base64.encodeBase64String(userData.getBytes());
riRequest.setUserData(userDataString);
riRequest.setKeyName(keyPair.getKeyName());
diFilters.add(new Filter("key-name").withValues(keyPair.getKeyName()));
riRequest.setInstanceType(type.toString());
diFilters.add(new Filter("instance-type").withValues(type.toString()));
if (getAssociatePublicIp()) {
net.setAssociatePublicIpAddress(true);
net.setDeviceIndex(0);
riRequest.withNetworkInterfaces(net);
}
boolean hasCustomTypeTag = false;
HashSet<Tag> inst_tags = null;
if (tags != null && !tags.isEmpty()) {
inst_tags = new HashSet<Tag>();
for(EC2Tag t : tags) {
inst_tags.add(new Tag(t.getName(), t.getValue()));
diFilters.add(new Filter("tag:"+t.getName()).withValues(t.getValue()));
if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) {
hasCustomTypeTag = true;
}
}
}
if (!hasCustomTypeTag) {
if (inst_tags == null){
inst_tags = new HashSet<Tag>();
}
inst_tags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, "demand"));
}
DescribeInstancesRequest diRequest = new DescribeInstancesRequest();
diFilters.add(new Filter("instance-state-name").withValues(InstanceStateName.Stopped.toString(),
InstanceStateName.Stopping.toString()));
diRequest.setFilters(diFilters);
logger.println("Looking for existing instances with describe-instance: "+diRequest);
LOGGER.fine("Looking for existing instances with describe-instance: "+diRequest);
DescribeInstancesResult diResult = ec2.describeInstances(diRequest);
Instance existingInstance = null;
if (StringUtils.isNotBlank(getIamInstanceProfile())) {
riRequest.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile()));
// cannot filter on IAM Instance Profile, so search in result
reservationLoop:
for (Reservation reservation : diResult.getReservations()) {
for (Instance instance : reservation.getInstances()) {
if (instance.getIamInstanceProfile() != null && instance.getIamInstanceProfile().getArn().equals(getIamInstanceProfile())) {
existingInstance = instance;
break reservationLoop;
}
}
}
} else if (diResult.getReservations().size() > 0) {
existingInstance = diResult.getReservations().get(0).getInstances().get(0);
}
if (existingInstance == null) {
// Have to create a new instance
Instance inst = ec2.runInstances(riRequest).getReservation().getInstances().get(0);
/* Now that we have our instance, we can set tags on it */
if (inst_tags != null) {
for (int i = 0; i < 5; i++) {
try {
updateRemoteTags(ec2, inst_tags, inst.getInstanceId());
break;
} catch (AmazonServiceException e) {
if (e.getErrorCode().equals("InvalidInstanceRequestID.NotFound")) {
Thread.sleep(5000);
continue;
}
throw e;
}
}
// That was a remote request - we should also update our local instance data.
inst.setTags(inst_tags);
}
logger.println("No existing instance found - created: "+inst);
LOGGER.info("No existing instance found - created: "+inst);
return newOndemandSlave(inst);
}
logger.println("Found existing stopped instance: "+existingInstance);
LOGGER.info("Found existing stopped instance: "+existingInstance);
List<String> instances = new ArrayList<String>();
instances.add(existingInstance.getInstanceId());
StartInstancesRequest siRequest = new StartInstancesRequest(instances);
StartInstancesResult siResult = ec2.startInstances(siRequest);
logger.println("Starting existing instance: "+existingInstance+ " result:"+siResult);
LOGGER.fine("Starting existing instance: "+existingInstance+ " result:"+siResult);
for (EC2AbstractSlave ec2Node: NodeIterator.nodes(EC2AbstractSlave.class)){
if (ec2Node.getInstanceId().equals(existingInstance.getInstanceId())) {
logger.println("Found existing corresponding Jenkins slave: "+ec2Node);
LOGGER.finer("Found existing corresponding Jenkins slave: "+ec2Node);
return ec2Node;
}
}
// Existing slave not found
logger.println("Creating new Jenkins slave for existing instance: "+existingInstance);
LOGGER.info("Creating new Jenkins slave for existing instance: "+existingInstance);
return newOndemandSlave(existingInstance);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private void setupEphemeralDeviceMapping(RunInstancesRequest riRequest) {
final List<BlockDeviceMapping> oldDeviceMapping = getAmiBlockDeviceMappings();
final Set<String> occupiedDevices = new HashSet<String>();
for (final BlockDeviceMapping mapping: oldDeviceMapping ) {
occupiedDevices.add(mapping.getDeviceName());
}
final List<String> available = new ArrayList<String>(Arrays.asList(
"ephemeral0", "ephemeral1", "ephemeral2", "ephemeral3"
));
final List<BlockDeviceMapping> newDeviceMapping = new ArrayList<BlockDeviceMapping>(4);
for (char suffix = 'b'; suffix <= 'z' && !available.isEmpty(); suffix++) {
final String deviceName = String.format("/dev/xvd%s", suffix);
if (occupiedDevices.contains(deviceName)) continue;
final BlockDeviceMapping newMapping = new BlockDeviceMapping()
.withDeviceName(deviceName)
.withVirtualName(available.get(0))
;
newDeviceMapping.add(newMapping);
available.remove(0);
}
riRequest.withBlockDeviceMappings(newDeviceMapping);
}
private List<BlockDeviceMapping> getAmiBlockDeviceMappings() {
/*
* AmazonEC2#describeImageAttribute does not work due to a bug
* https://forums.aws.amazon.com/message.jspa?messageID=231972
*/
for (final Image image: getParent().connect().describeImages().getImages()) {
if (ami.equals(image.getImageId())) {
return image.getBlockDeviceMappings();
}
}
throw new AmazonClientException("Unable to get AMI device mapping for " + ami);
}
private void setupCustomDeviceMapping(RunInstancesRequest riRequest) {
if (StringUtils.isNotBlank(customDeviceMapping)) {
riRequest.setBlockDeviceMappings(DeviceMappingParser.parse(customDeviceMapping));
}
}
/**
* Provision a new slave for an EC2 spot instance to call back to Jenkins
*/
private EC2AbstractSlave provisionSpot(TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try{
logger.println("Launching " + ami + " for template " + description);
LOGGER.info("Launching " + ami + " for template " + description);
KeyPair keyPair = getKeyPair(ec2);
RequestSpotInstancesRequest spotRequest = new RequestSpotInstancesRequest();
// Validate spot bid before making the request
if (getSpotMaxBidPrice() == null){
// throw new FormException("Invalid Spot price specified: " + getSpotMaxBidPrice(), "spotMaxBidPrice");
throw new AmazonClientException("Invalid Spot price specified: " + getSpotMaxBidPrice());
}
spotRequest.setSpotPrice(getSpotMaxBidPrice());
spotRequest.setInstanceCount(Integer.valueOf(1));
spotRequest.setType(getBidType());
LaunchSpecification launchSpecification = new LaunchSpecification();
InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification();
launchSpecification.setImageId(ami);
launchSpecification.setInstanceType(type);
if (StringUtils.isNotBlank(getZone())) {
SpotPlacement placement = new SpotPlacement(getZone());
launchSpecification.setPlacement(placement);
}
if (StringUtils.isNotBlank(getSubnetId())) {
if (getAssociatePublicIp()) {
net.setSubnetId(getSubnetId());
}else{
launchSpecification.setSubnetId(getSubnetId());
}
/* If we have a subnet ID then we can only use VPC security groups */
if (!securityGroupSet.isEmpty()) {
List<String> group_ids = getEc2SecurityGroups(ec2);
if (!group_ids.isEmpty()){
if (getAssociatePublicIp()) {
net.setGroups(group_ids);
}else{
ArrayList<GroupIdentifier> groups = new ArrayList<GroupIdentifier>();
for (String group_id : group_ids) {
GroupIdentifier group = new GroupIdentifier();
group.setGroupId(group_id);
groups.add(group);
}
if (!groups.isEmpty())
launchSpecification.setAllSecurityGroups(groups);
}
}
}
} else {
/* No subnet: we can use standard security groups by name */
if (securityGroupSet.size() > 0)
launchSpecification.setSecurityGroups(securityGroupSet);
}
// The slave must know the Jenkins server to register with as well
// as the name of the node in Jenkins it should register as. The only
// way to give information to the Spot slaves is through the ec2 user data
String jenkinsUrl = Hudson.getInstance().getRootUrl();
// We must provide a unique node name for the slave to connect to Jenkins.
// We don't have the EC2 generated instance ID, or the Spot request ID
// until after the instance is requested, which is then too late to set the
// user-data for the request. Instead we generate a unique name from UUID
// so that the slave has a unique name within Jenkins to register to.
String slaveName = UUID.randomUUID().toString();
String newUserData = "";
// We want to allow node configuration with cloud-init and user-data,
// while maintaining backward compatibility with old ami's
// The 'new' way is triggered by the presence of '${SLAVE_NAME}'' in the user data
// (which is not too much to ask)
if (userData.contains("${SLAVE_NAME}")) {
// The cloud-init compatible way
newUserData = new String(userData);
newUserData = newUserData.replace("${SLAVE_NAME}", slaveName);
newUserData = newUserData.replace("${JENKINS_URL}", jenkinsUrl);
} else {
// The 'old' way - maitain full backward compatibility
newUserData = "JENKINS_URL=" + jenkinsUrl +
"&SLAVE_NAME=" + slaveName +
"&USER_DATA=" + Base64.encodeBase64String(userData.getBytes());
}
String userDataString = Base64.encodeBase64String(newUserData.getBytes());
launchSpecification.setUserData(userDataString);
launchSpecification.setKeyName(keyPair.getKeyName());
launchSpecification.setInstanceType(type.toString());
if (getAssociatePublicIp()) {
net.setAssociatePublicIpAddress(true);
net.setDeviceIndex(0);
launchSpecification.withNetworkInterfaces(net);
}
boolean hasCustomTypeTag = false;
HashSet<Tag> inst_tags = null;
if (tags != null && !tags.isEmpty()) {
inst_tags = new HashSet<Tag>();
for(EC2Tag t : tags) {
inst_tags.add(new Tag(t.getName(), t.getValue()));
if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) {
hasCustomTypeTag = true;
}
}
}
if (!hasCustomTypeTag) {
inst_tags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, "spot"));
}
if (StringUtils.isNotBlank(getIamInstanceProfile())) {
launchSpecification.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile()));
}
spotRequest.setLaunchSpecification(launchSpecification);
// Make the request for a new Spot instance
RequestSpotInstancesResult reqResult = ec2.requestSpotInstances(spotRequest);
List<SpotInstanceRequest> reqInstances = reqResult.getSpotInstanceRequests();
if (reqInstances.size() <= 0){
throw new AmazonClientException("No spot instances found");
}
SpotInstanceRequest spotInstReq = reqInstances.get(0);
if (spotInstReq == null){
throw new AmazonClientException("Spot instance request is null");
}
/* Now that we have our Spot request, we can set tags on it */
if (inst_tags != null) {
for (int i = 0; i < 5; i++) {
try {
updateRemoteTags(ec2, inst_tags, spotInstReq.getSpotInstanceRequestId());
break;
} catch (AmazonServiceException e) {
if (e.getErrorCode().equals("InvalidSpotInstanceRequestID.NotFound")) {
Thread.sleep(5000);
continue;
}
throw e;
}
}
// That was a remote request - we should also update our local instance data.
spotInstReq.setTags(inst_tags);
}
logger.println("Spot instance id in provision: " + spotInstReq.getSpotInstanceRequestId());
LOGGER.info("Spot instance id in provision: " + spotInstReq.getSpotInstanceRequestId());
return newSpotSlave(spotInstReq, slaveName);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
protected EC2OndemandSlave newOndemandSlave(Instance inst) throws FormException, IOException {
return new EC2OndemandSlave(inst.getInstanceId(), description, remoteFS, getNumExecutors(), labels, mode, initScript, tmpDir, remoteAdmin, jvmopts, stopOnTerminate, idleTerminationMinutes, inst.getPublicDnsName(), inst.getPrivateDnsName(), EC2Tag.fromAmazonTags(inst.getTags()), parent.name, usePrivateDnsName, useDedicatedTenancy, getLaunchTimeout(), amiType);
}
protected EC2SpotSlave newSpotSlave(SpotInstanceRequest sir, String name) throws FormException, IOException {
return new EC2SpotSlave(name, sir.getSpotInstanceRequestId(), description, remoteFS, getNumExecutors(), mode, initScript, tmpDir, labels, remoteAdmin, jvmopts, idleTerminationMinutes, EC2Tag.fromAmazonTags(sir.getTags()), parent.name, usePrivateDnsName, getLaunchTimeout(), amiType);
}
/**
* Get a KeyPair from the configured information for the slave template
*/
private KeyPair getKeyPair(AmazonEC2 ec2) throws IOException, AmazonClientException{
KeyPair keyPair = parent.getPrivateKey().find(ec2);
if(keyPair==null) {
throw new AmazonClientException("No matching keypair found on EC2. Is the EC2 private key a valid one?");
}
return keyPair;
}
/**
* Update the tags stored in EC2 with the specified information
*/
private void updateRemoteTags(AmazonEC2 ec2, Collection<Tag> inst_tags, String... params) {
CreateTagsRequest tag_request = new CreateTagsRequest();
tag_request.withResources(params).setTags(inst_tags);
ec2.createTags(tag_request);
}
/**
* Get a list of security group ids for the slave
*/
private List<String> getEc2SecurityGroups(AmazonEC2 ec2) throws AmazonClientException{
List<String> group_ids = new ArrayList<String>();
DescribeSecurityGroupsResult group_result = getSecurityGroupsBy("group-name", securityGroupSet, ec2);
if (group_result.getSecurityGroups().size() == 0) {
group_result = getSecurityGroupsBy("group-id", securityGroupSet, ec2);
}
for (SecurityGroup group : group_result.getSecurityGroups()) {
if (group.getVpcId() != null && !group.getVpcId().isEmpty()) {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new Filter("vpc-id").withValues(group.getVpcId()));
filters.add(new Filter("state").withValues("available"));
filters.add(new Filter("subnet-id").withValues(getSubnetId()));
DescribeSubnetsRequest subnet_req = new DescribeSubnetsRequest();
subnet_req.withFilters(filters);
DescribeSubnetsResult subnet_result = ec2.describeSubnets(subnet_req);
List<Subnet> subnets = subnet_result.getSubnets();
if(subnets != null && !subnets.isEmpty()) {
group_ids.add(group.getGroupId());
}
}
}
if (securityGroupSet.size() != group_ids.size()) {
throw new AmazonClientException( "Security groups must all be VPC security groups to work in a VPC context" );
}
return group_ids;
}
private DescribeSecurityGroupsResult getSecurityGroupsBy(String filterName, Set<String> filterValues, AmazonEC2 ec2) {
DescribeSecurityGroupsRequest group_req = new DescribeSecurityGroupsRequest();
group_req.withFilters(new Filter(filterName).withValues(filterValues));
return ec2.describeSecurityGroups(group_req);
}
/**
* Provisions a new EC2 slave based on the currently running instance on EC2,
* instead of starting a new one.
*/
public EC2AbstractSlave attach(String instanceId, TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try {
logger.println("Attaching to "+instanceId);
LOGGER.info("Attaching to "+instanceId);
DescribeInstancesRequest request = new DescribeInstancesRequest();
request.setInstanceIds(Collections.singletonList(instanceId));
Instance inst = ec2.describeInstances(request).getReservations().get(0).getInstances().get(0);
return newOndemandSlave(inst);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
}
}
/**
* Initializes data structure that we don't persist.
*/
protected Object readResolve() {
labelSet = Label.parse(labels);
securityGroupSet = parseSecurityGroups();
/**
* In releases of this plugin prior to 1.18, template-specific instance caps could be configured
* but were not enforced. As a result, it was possible to have the instance cap for a template
* be configured to 0 (zero) with no ill effects. Starting with version 1.18, template-specific
* instance caps are enforced, so if a configuration has a cap of zero for a template, no instances
* will be launched from that template. Since there is no practical value of intentionally setting
* the cap to zero, this block will override such a setting to a value that means 'no cap'.
*/
if (instanceCap == 0) {
instanceCap = Integer.MAX_VALUE;
}
if (amiType == null) {
amiType = new UnixData(rootCommandPrefix, sshPort);
}
return this;
}
public Descriptor<SlaveTemplate> getDescriptor() {
return Hudson.getInstance().getDescriptor(getClass());
}
public int getLaunchTimeout() {
return launchTimeout <= 0 ? Integer.MAX_VALUE : launchTimeout;
}
public String getLaunchTimeoutStr() {
if (launchTimeout==Integer.MAX_VALUE) {
return "";
} else {
return String.valueOf(launchTimeout);
}
}
public boolean isWindowsSlave()
{
return amiType.isWindows();
}
public boolean isUnixSlave()
{
return amiType.isUnix();
}
public String getAdminPassword()
{
return amiType.isWindows() ? ((WindowsData)amiType).getPassword() : "";
}
private boolean isUseHTTPS() {
return amiType.isWindows() ? ((WindowsData)amiType).isUseHTTPS() : false;
}
@Extension
public static final class DescriptorImpl extends Descriptor<SlaveTemplate> {
@Override
public String getDisplayName() {
return null;
}
public List<Descriptor<AMITypeData>> getAMITypeDescriptors()
{
return Hudson.getInstance().<AMITypeData,Descriptor<AMITypeData>>getDescriptorList(AMITypeData.class);
}
/**
* Since this shares much of the configuration with {@link EC2Computer}, check its help page, too.
*/
@Override
public String getHelpFile(String fieldName) {
String p = super.getHelpFile(fieldName);
if (p==null)
p = Hudson.getInstance().getDescriptor(EC2OndemandSlave.class).getHelpFile(fieldName);
if (p==null)
p = Hudson.getInstance().getDescriptor(EC2SpotSlave.class).getHelpFile(fieldName);
return p;
}
/***
* Check that the AMI requested is available in the cloud and can be used.
*/
public FormValidation doValidateAmi(
@QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId, @QueryParameter String secretKey,
@QueryParameter String ec2endpoint, @QueryParameter String region,
final @QueryParameter String ami) throws IOException {
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
AmazonEC2 ec2;
if (region != null) {
ec2 = EC2Cloud.connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region));
} else {
ec2 = EC2Cloud.connect(credentialsProvider, new URL(ec2endpoint));
}
if(ec2!=null) {
try {
List<String> images = new LinkedList<String>();
images.add(ami);
List<String> owners = new LinkedList<String>();
List<String> users = new LinkedList<String>();
DescribeImagesRequest request = new DescribeImagesRequest();
request.setImageIds(images);
request.setOwners(owners);
request.setExecutableUsers(users);
List<Image> img = ec2.describeImages(request).getImages();
if(img==null || img.isEmpty()) {
// de-registered AMI causes an empty list to be returned. so be defensive
// against other possibilities
return FormValidation.error("No such AMI, or not usable with this accessId: "+ami);
}
String ownerAlias = img.get(0).getImageOwnerAlias();
return FormValidation.ok(img.get(0).getImageLocation() +
(ownerAlias != null ? " by " + ownerAlias : ""));
} catch (AmazonClientException e) {
return FormValidation.error(e.getMessage());
}
} else
return FormValidation.ok(); // can't test
}
public FormValidation doCheckLabelString(@QueryParameter String value, @QueryParameter Node.Mode mode) {
if (mode == Node.Mode.EXCLUSIVE && (value == null || value.trim() == "")) {
return FormValidation.warning("You may want to assign labels to this node;" +
" it's marked to only run jobs that are exclusively tied to itself or a label.");
}
return FormValidation.ok();
}
public FormValidation doCheckIdleTerminationMinutes(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val >= -59) return FormValidation.ok();
}
catch ( NumberFormatException nfe ) {}
return FormValidation.error("Idle Termination time must be a greater than -59 (or null)");
}
public FormValidation doCheckInstanceCapStr(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val > 0) return FormValidation.ok();
} catch ( NumberFormatException nfe ) {}
return FormValidation.error("InstanceCap must be a non-negative integer (or null)");
}
public FormValidation doCheckLaunchTimeoutStr(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val >= 0) return FormValidation.ok();
} catch ( NumberFormatException nfe ) {}
return FormValidation.error("Launch Timeout must be a non-negative integer (or null)");
}
public ListBoxModel doFillZoneItems( @QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId,
@QueryParameter String secretKey,
@QueryParameter String region)
throws IOException, ServletException
{
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
return EC2AbstractSlave.fillZoneItems(credentialsProvider, region);
}
/* Validate the Spot Max Bid Price to ensure that it is a floating point number >= .001 */
public FormValidation doCheckSpotMaxBidPrice( @QueryParameter String spotMaxBidPrice ) {
if(SpotConfiguration.normalizeBid(spotMaxBidPrice) != null){
return FormValidation.ok();
}
return FormValidation.error("Not a correct bid price");
}
// Retrieve the availability zones for the region
private ArrayList<String> getAvailabilityZones(AmazonEC2 ec2) {
ArrayList<String> availabilityZones = new ArrayList<String>();
DescribeAvailabilityZonesResult zones = ec2.describeAvailabilityZones();
List<AvailabilityZone> zoneList = zones.getAvailabilityZones();
for (AvailabilityZone z : zoneList) {
availabilityZones.add(z.getZoneName());
}
return availabilityZones;
}
/**
* Populates the Bid Type Drop down on the slave template config.
* @return
*/
public ListBoxModel doFillBidTypeItems() {
ListBoxModel items = new ListBoxModel();
items.add(SpotInstanceType.OneTime.toString());
items.add(SpotInstanceType.Persistent.toString());
return items;
}
/* Check the current Spot price of the selected instance type for the selected region */
public FormValidation doCurrentSpotPrice( @QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId, @QueryParameter String secretKey,
@QueryParameter String region, @QueryParameter String type,
@QueryParameter String zone ) throws IOException, ServletException {
String cp = "";
String zoneStr = "";
// Connect to the EC2 cloud with the access id, secret key, and region queried from the created cloud
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
AmazonEC2 ec2 = EC2Cloud.connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region));
if(ec2!=null) {
try {
// Build a new price history request with the currently selected type
DescribeSpotPriceHistoryRequest request = new DescribeSpotPriceHistoryRequest();
// If a zone is specified, set the availability zone in the request
// Else, proceed with no availability zone which will result with the cheapest Spot price
if(getAvailabilityZones(ec2).contains(zone)){
request.setAvailabilityZone(zone);
zoneStr = zone + " availability zone";
} else {
zoneStr = region + " region";
}
/*
* Iterate through the AWS instance types to see if can find a match for the databound
* String type. This is necessary because the AWS API needs the instance type
* string formatted a particular way to retrieve prices and the form gives us the strings
* in a different format. For example "T1Micro" vs "t1.micro".
*/
InstanceType ec2Type = null;
for(InstanceType it : InstanceType.values()){
if (it.name().toString().equals(type)){
ec2Type = it;
break;
}
}
/*
* If the type string cannot be matched with an instance type,
* throw a Form error
*/
if(ec2Type == null){
return FormValidation.error("Could not resolve instance type: " + type);
}
Collection<String> instanceType = new ArrayList<String>();
instanceType.add(ec2Type.toString());
request.setInstanceTypes(instanceType);
request.setStartTime(new Date());
// Retrieve the price history request result and store the current price
DescribeSpotPriceHistoryResult result = ec2.describeSpotPriceHistory(request);
if(!result.getSpotPriceHistory().isEmpty()){
SpotPrice currentPrice = result.getSpotPriceHistory().get(0);
cp = currentPrice.getSpotPrice();
}
} catch (AmazonClientException e) {
return FormValidation.error(e.getMessage());
}
}
/*
* If we could not return the current price of the instance display an error
* Else, remove the additional zeros from the current price and return it to the interface
* in the form of a message
*/
if(cp.isEmpty()){
return FormValidation.error("Could not retrieve current Spot price");
} else {
cp = cp.substring(0, cp.length() - 3);
return FormValidation.ok("The current Spot price for a " + type +
" in the " + zoneStr + " is $" + cp );
}
}
}
private static final Logger LOGGER = Logger.getLogger(SlaveTemplate.class.getName());
}
|
src/main/java/hudson/plugins/ec2/SlaveTemplate.java
|
/*
* The MIT License
*
* Copyright (c) 2004-, Kohsuke Kawaguchi, Sun Microsystems, Inc., and a number of other of contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.ec2;
import com.amazonaws.AmazonServiceException;
import hudson.Extension;
import hudson.Util;
import hudson.model.Describable;
import hudson.model.TaskListener;
import hudson.model.Descriptor;
import hudson.model.Descriptor.FormException;
import hudson.model.Hudson;
import hudson.model.Label;
import hudson.model.Node;
import hudson.model.labels.LabelAtom;
import hudson.plugins.ec2.util.DeviceMappingParser;
import hudson.util.FormValidation;
import hudson.util.ListBoxModel;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.util.*;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import jenkins.slaves.iterators.api.NodeIterator;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import com.amazonaws.AmazonClientException;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.*;
/**
* Template of {@link EC2AbstractSlave} to launch.
*
* @author Kohsuke Kawaguchi
*/
public class SlaveTemplate implements Describable<SlaveTemplate> {
public final String ami;
public final String description;
public final String zone;
public final SpotConfiguration spotConfig;
public final String securityGroups;
public final String remoteFS;
public final InstanceType type;
public final String labels;
public final Node.Mode mode;
public final String initScript;
public final String tmpDir;
public final String userData;
public final String numExecutors;
public final String remoteAdmin;
public final String jvmopts;
public final String subnetId;
public final String idleTerminationMinutes;
public final String iamInstanceProfile;
public final boolean useEphemeralDevices;
public final String customDeviceMapping;
public int instanceCap;
public final boolean stopOnTerminate;
private final List<EC2Tag> tags;
public final boolean usePrivateDnsName;
public final boolean associatePublicIp;
protected transient EC2Cloud parent;
public final boolean useDedicatedTenancy;
public AMITypeData amiType;
public int launchTimeout;
private transient /*almost final*/ Set<LabelAtom> labelSet;
private transient /*almost final*/ Set<String> securityGroupSet;
/*
* Necessary to handle reading from old configurations. The UnixData object is
* created in readResolve()
*/
@Deprecated
public transient String sshPort;
@Deprecated
public transient String rootCommandPrefix;
@DataBoundConstructor
public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping) {
this.ami = ami;
this.zone = zone;
this.spotConfig = spotConfig;
this.securityGroups = securityGroups;
this.remoteFS = remoteFS;
this.amiType = amiType;
this.type = type;
this.labels = Util.fixNull(labelString);
this.mode = mode;
this.description = description;
this.initScript = initScript;
this.tmpDir = tmpDir;
this.userData = userData;
this.numExecutors = Util.fixNull(numExecutors).trim();
this.remoteAdmin = remoteAdmin;
this.jvmopts = jvmopts;
this.stopOnTerminate = stopOnTerminate;
this.subnetId = subnetId;
this.tags = tags;
this.idleTerminationMinutes = idleTerminationMinutes;
this.usePrivateDnsName = usePrivateDnsName;
this.associatePublicIp = associatePublicIp;
this.useDedicatedTenancy = useDedicatedTenancy;
if (null == instanceCapStr || instanceCapStr.equals("")) {
this.instanceCap = Integer.MAX_VALUE;
} else {
this.instanceCap = Integer.parseInt(instanceCapStr);
}
try {
this.launchTimeout = Integer.parseInt(launchTimeoutStr);
} catch (NumberFormatException nfe ) {
this.launchTimeout = Integer.MAX_VALUE;
}
this.iamInstanceProfile = iamInstanceProfile;
this.useEphemeralDevices = useEphemeralDevices;
this.customDeviceMapping = customDeviceMapping;
readResolve(); // initialize
}
/**
* Backward compatible constructor for reloading previous version data
*/
public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, String sshPort, InstanceType type, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, String rootCommandPrefix, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, String launchTimeoutStr)
{
this(ami, zone, spotConfig, securityGroups, remoteFS, type, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, new UnixData(rootCommandPrefix, sshPort), jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, false, launchTimeoutStr, false, null);
}
public EC2Cloud getParent() {
return parent;
}
public String getBidType(){
if(spotConfig == null)
return null;
return spotConfig.spotInstanceBidType;
}
public String getLabelString() {
return labels;
}
public Node.Mode getMode() {
return mode;
}
public String getDisplayName() {
return description+" ("+ami+")";
}
String getZone() {
return zone;
}
public String getSecurityGroupString() {
return securityGroups;
}
public Set<String> getSecurityGroupSet() {
return securityGroupSet;
}
public Set<String> parseSecurityGroups() {
if (securityGroups == null || "".equals(securityGroups.trim())) {
return Collections.emptySet();
} else {
return new HashSet<String>(Arrays.asList(securityGroups.split("\\s*,\\s*")));
}
}
public int getNumExecutors() {
try {
return Integer.parseInt(numExecutors);
} catch (NumberFormatException e) {
return EC2AbstractSlave.toNumExecutors(type);
}
}
public int getSshPort() {
try {
String sshPort = "";
if (amiType.isUnix()) {
sshPort = ((UnixData)amiType).getSshPort();
}
return Integer.parseInt(sshPort);
} catch (NumberFormatException e) {
return 22;
}
}
public String getRemoteAdmin() {
return remoteAdmin;
}
public String getRootCommandPrefix() {
return amiType.isUnix() ? ((UnixData)amiType).getRootCommandPrefix() : "";
}
public String getSubnetId() {
return subnetId;
}
public boolean getAssociatePublicIp() {
return associatePublicIp;
}
public List<EC2Tag> getTags() {
if (null == tags) return null;
return Collections.unmodifiableList(tags);
}
public String getidleTerminationMinutes() {
return idleTerminationMinutes;
}
public boolean getUseDedicatedTenancy() {
return useDedicatedTenancy;
}
public Set<LabelAtom> getLabelSet(){
return labelSet;
}
public int getInstanceCap() {
return instanceCap;
}
public String getInstanceCapStr() {
if (instanceCap==Integer.MAX_VALUE) {
return "";
} else {
return String.valueOf(instanceCap);
}
}
public String getSpotMaxBidPrice(){
if (spotConfig == null)
return null;
return SpotConfiguration.normalizeBid(spotConfig.spotMaxBidPrice);
}
public String getIamInstanceProfile() {
return iamInstanceProfile;
}
/**
* Provisions a new EC2 slave or starts a previously stopped on-demand instance.
*
* @return always non-null. This needs to be then added to {@link Hudson#addNode(Node)}.
*/
public EC2AbstractSlave provision(TaskListener listener) throws AmazonClientException, IOException {
if (this.spotConfig != null){
return provisionSpot(listener);
}
return provisionOndemand(listener);
}
/**
* Provisions an On-demand EC2 slave by launching a new instance or
* starting a previously-stopped instance.
*/
private EC2AbstractSlave provisionOndemand(TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try {
String msg = "Launching " + ami + " for template " + description;
logger.println(msg);
LOGGER.info(msg);
KeyPair keyPair = getKeyPair(ec2);
RunInstancesRequest riRequest = new RunInstancesRequest(ami, 1, 1);
InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification();
if (useEphemeralDevices) {
setupEphemeralDeviceMapping(riRequest);
}
else {
setupCustomDeviceMapping(riRequest);
}
List<Filter> diFilters = new ArrayList<Filter>();
diFilters.add(new Filter("image-id").withValues(ami));
if (StringUtils.isNotBlank(getZone())) {
Placement placement = new Placement(getZone());
if (getUseDedicatedTenancy()) {
placement.setTenancy("dedicated");
}
riRequest.setPlacement(placement);
diFilters.add(new Filter("availability-zone").withValues(getZone()));
}
if (StringUtils.isNotBlank(getSubnetId())) {
if (getAssociatePublicIp()) {
net.setSubnetId(getSubnetId());
}else{
riRequest.setSubnetId(getSubnetId());
}
diFilters.add(new Filter("subnet-id").withValues(getSubnetId()));
/* If we have a subnet ID then we can only use VPC security groups */
if (!securityGroupSet.isEmpty()) {
List<String> group_ids = getEc2SecurityGroups(ec2);
if (!group_ids.isEmpty()) {
if (getAssociatePublicIp()) {
net.setGroups(group_ids);
}else{
riRequest.setSecurityGroupIds(group_ids);
}
diFilters.add(new Filter("instance.group-id").withValues(group_ids));
}
}
} else {
/* No subnet: we can use standard security groups by name */
riRequest.setSecurityGroups(securityGroupSet);
if (securityGroupSet.size() > 0)
diFilters.add(new Filter("instance.group-name").withValues(securityGroupSet));
}
String userDataString = Base64.encodeBase64String(userData.getBytes());
riRequest.setUserData(userDataString);
riRequest.setKeyName(keyPair.getKeyName());
diFilters.add(new Filter("key-name").withValues(keyPair.getKeyName()));
riRequest.setInstanceType(type.toString());
diFilters.add(new Filter("instance-type").withValues(type.toString()));
if (getAssociatePublicIp()) {
net.setAssociatePublicIpAddress(true);
net.setDeviceIndex(0);
riRequest.withNetworkInterfaces(net);
}
boolean hasCustomTypeTag = false;
HashSet<Tag> inst_tags = null;
if (tags != null && !tags.isEmpty()) {
inst_tags = new HashSet<Tag>();
for(EC2Tag t : tags) {
inst_tags.add(new Tag(t.getName(), t.getValue()));
diFilters.add(new Filter("tag:"+t.getName()).withValues(t.getValue()));
if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) {
hasCustomTypeTag = true;
}
}
}
if (!hasCustomTypeTag) {
if (inst_tags == null){
inst_tags = new HashSet<Tag>();
}
inst_tags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, "demand"));
}
DescribeInstancesRequest diRequest = new DescribeInstancesRequest();
diFilters.add(new Filter("instance-state-name").withValues(InstanceStateName.Stopped.toString(),
InstanceStateName.Stopping.toString()));
diRequest.setFilters(diFilters);
msg = "Looking for existing instances with describe-instance: "+diRequest;
logger.println(msg);
LOGGER.fine(msg);
DescribeInstancesResult diResult = ec2.describeInstances(diRequest);
Instance existingInstance = null;
if (StringUtils.isNotBlank(getIamInstanceProfile())) {
riRequest.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile()));
// cannot filter on IAM Instance Profile, so search in result
reservationLoop:
for (Reservation reservation : diResult.getReservations()) {
for (Instance instance : reservation.getInstances()) {
if (instance.getIamInstanceProfile() != null && instance.getIamInstanceProfile().getArn().equals(getIamInstanceProfile())) {
existingInstance = instance;
break reservationLoop;
}
}
}
} else if (diResult.getReservations().size() > 0) {
existingInstance = diResult.getReservations().get(0).getInstances().get(0);
}
if (existingInstance == null) {
// Have to create a new instance
Instance inst = ec2.runInstances(riRequest).getReservation().getInstances().get(0);
/* Now that we have our instance, we can set tags on it */
if (inst_tags != null) {
for (int i = 0; i < 5; i++) {
try {
updateRemoteTags(ec2, inst_tags, inst.getInstanceId());
break;
} catch (AmazonServiceException e) {
if (e.getErrorCode().equals("InvalidInstanceRequestID.NotFound")) {
Thread.sleep(5000);
continue;
}
throw e;
}
}
// That was a remote request - we should also update our local instance data.
inst.setTags(inst_tags);
}
msg = "No existing instance found - created: "+inst;
logger.println(msg);
LOGGER.info(msg);
return newOndemandSlave(inst);
}
msg = "Found existing stopped instance: "+existingInstance;
logger.println(msg);
LOGGER.info(msg);
List<String> instances = new ArrayList<String>();
instances.add(existingInstance.getInstanceId());
StartInstancesRequest siRequest = new StartInstancesRequest(instances);
StartInstancesResult siResult = ec2.startInstances(siRequest);
msg = "Starting existing instance: "+existingInstance+ " result:"+siResult;
logger.println(msg);
LOGGER.fine(msg);
for (EC2AbstractSlave ec2Node: NodeIterator.nodes(EC2AbstractSlave.class)){
if (ec2Node.getInstanceId().equals(existingInstance.getInstanceId())) {
msg = "Found existing corresponding Jenkins slave: "+ec2Node;
logger.println(msg);
LOGGER.finer(msg);
return ec2Node;
}
}
// Existing slave not found
msg = "Creating new Jenkins slave for existing instance: "+existingInstance;
logger.println(msg);
LOGGER.info(msg);
return newOndemandSlave(existingInstance);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private void setupEphemeralDeviceMapping(RunInstancesRequest riRequest) {
final List<BlockDeviceMapping> oldDeviceMapping = getAmiBlockDeviceMappings();
final Set<String> occupiedDevices = new HashSet<String>();
for (final BlockDeviceMapping mapping: oldDeviceMapping ) {
occupiedDevices.add(mapping.getDeviceName());
}
final List<String> available = new ArrayList<String>(Arrays.asList(
"ephemeral0", "ephemeral1", "ephemeral2", "ephemeral3"
));
final List<BlockDeviceMapping> newDeviceMapping = new ArrayList<BlockDeviceMapping>(4);
for (char suffix = 'b'; suffix <= 'z' && !available.isEmpty(); suffix++) {
final String deviceName = String.format("/dev/xvd%s", suffix);
if (occupiedDevices.contains(deviceName)) continue;
final BlockDeviceMapping newMapping = new BlockDeviceMapping()
.withDeviceName(deviceName)
.withVirtualName(available.get(0))
;
newDeviceMapping.add(newMapping);
available.remove(0);
}
riRequest.withBlockDeviceMappings(newDeviceMapping);
}
private List<BlockDeviceMapping> getAmiBlockDeviceMappings() {
/*
* AmazonEC2#describeImageAttribute does not work due to a bug
* https://forums.aws.amazon.com/message.jspa?messageID=231972
*/
for (final Image image: getParent().connect().describeImages().getImages()) {
if (ami.equals(image.getImageId())) {
return image.getBlockDeviceMappings();
}
}
throw new AmazonClientException("Unable to get AMI device mapping for " + ami);
}
private void setupCustomDeviceMapping(RunInstancesRequest riRequest) {
if (StringUtils.isNotBlank(customDeviceMapping)) {
riRequest.setBlockDeviceMappings(DeviceMappingParser.parse(customDeviceMapping));
}
}
/**
* Provision a new slave for an EC2 spot instance to call back to Jenkins
*/
private EC2AbstractSlave provisionSpot(TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try{
logger.println("Launching " + ami + " for template " + description);
KeyPair keyPair = getKeyPair(ec2);
RequestSpotInstancesRequest spotRequest = new RequestSpotInstancesRequest();
// Validate spot bid before making the request
if (getSpotMaxBidPrice() == null){
// throw new FormException("Invalid Spot price specified: " + getSpotMaxBidPrice(), "spotMaxBidPrice");
throw new AmazonClientException("Invalid Spot price specified: " + getSpotMaxBidPrice());
}
spotRequest.setSpotPrice(getSpotMaxBidPrice());
spotRequest.setInstanceCount(Integer.valueOf(1));
spotRequest.setType(getBidType());
LaunchSpecification launchSpecification = new LaunchSpecification();
InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification();
launchSpecification.setImageId(ami);
launchSpecification.setInstanceType(type);
if (StringUtils.isNotBlank(getZone())) {
SpotPlacement placement = new SpotPlacement(getZone());
launchSpecification.setPlacement(placement);
}
if (StringUtils.isNotBlank(getSubnetId())) {
if (getAssociatePublicIp()) {
net.setSubnetId(getSubnetId());
}else{
launchSpecification.setSubnetId(getSubnetId());
}
/* If we have a subnet ID then we can only use VPC security groups */
if (!securityGroupSet.isEmpty()) {
List<String> group_ids = getEc2SecurityGroups(ec2);
if (!group_ids.isEmpty()){
if (getAssociatePublicIp()) {
net.setGroups(group_ids);
}else{
ArrayList<GroupIdentifier> groups = new ArrayList<GroupIdentifier>();
for (String group_id : group_ids) {
GroupIdentifier group = new GroupIdentifier();
group.setGroupId(group_id);
groups.add(group);
}
if (!groups.isEmpty())
launchSpecification.setAllSecurityGroups(groups);
}
}
}
} else {
/* No subnet: we can use standard security groups by name */
if (securityGroupSet.size() > 0)
launchSpecification.setSecurityGroups(securityGroupSet);
}
// The slave must know the Jenkins server to register with as well
// as the name of the node in Jenkins it should register as. The only
// way to give information to the Spot slaves is through the ec2 user data
String jenkinsUrl = Hudson.getInstance().getRootUrl();
// We must provide a unique node name for the slave to connect to Jenkins.
// We don't have the EC2 generated instance ID, or the Spot request ID
// until after the instance is requested, which is then too late to set the
// user-data for the request. Instead we generate a unique name from UUID
// so that the slave has a unique name within Jenkins to register to.
String slaveName = UUID.randomUUID().toString();
String newUserData = "";
// We want to allow node configuration with cloud-init and user-data,
// while maintaining backward compatibility with old ami's
// The 'new' way is triggered by the presence of '${SLAVE_NAME}'' in the user data
// (which is not too much to ask)
if (userData.contains("${SLAVE_NAME}")) {
// The cloud-init compatible way
newUserData = new String(userData);
newUserData = newUserData.replace("${SLAVE_NAME}", slaveName);
newUserData = newUserData.replace("${JENKINS_URL}", jenkinsUrl);
} else {
// The 'old' way - maitain full backward compatibility
newUserData = "JENKINS_URL=" + jenkinsUrl +
"&SLAVE_NAME=" + slaveName +
"&USER_DATA=" + Base64.encodeBase64String(userData.getBytes());
}
String userDataString = Base64.encodeBase64String(newUserData.getBytes());
launchSpecification.setUserData(userDataString);
launchSpecification.setKeyName(keyPair.getKeyName());
launchSpecification.setInstanceType(type.toString());
if (getAssociatePublicIp()) {
net.setAssociatePublicIpAddress(true);
net.setDeviceIndex(0);
launchSpecification.withNetworkInterfaces(net);
}
boolean hasCustomTypeTag = false;
HashSet<Tag> inst_tags = null;
if (tags != null && !tags.isEmpty()) {
inst_tags = new HashSet<Tag>();
for(EC2Tag t : tags) {
inst_tags.add(new Tag(t.getName(), t.getValue()));
if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) {
hasCustomTypeTag = true;
}
}
}
if (!hasCustomTypeTag) {
inst_tags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, "spot"));
}
if (StringUtils.isNotBlank(getIamInstanceProfile())) {
launchSpecification.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile()));
}
spotRequest.setLaunchSpecification(launchSpecification);
// Make the request for a new Spot instance
RequestSpotInstancesResult reqResult = ec2.requestSpotInstances(spotRequest);
List<SpotInstanceRequest> reqInstances = reqResult.getSpotInstanceRequests();
if (reqInstances.size() <= 0){
throw new AmazonClientException("No spot instances found");
}
SpotInstanceRequest spotInstReq = reqInstances.get(0);
if (spotInstReq == null){
throw new AmazonClientException("Spot instance request is null");
}
/* Now that we have our Spot request, we can set tags on it */
if (inst_tags != null) {
for (int i = 0; i < 5; i++) {
try {
updateRemoteTags(ec2, inst_tags, spotInstReq.getSpotInstanceRequestId());
break;
} catch (AmazonServiceException e) {
if (e.getErrorCode().equals("InvalidSpotInstanceRequestID.NotFound")) {
Thread.sleep(5000);
continue;
}
throw e;
}
}
// That was a remote request - we should also update our local instance data.
spotInstReq.setTags(inst_tags);
}
logger.println("Spot instance id in provision: " + spotInstReq.getSpotInstanceRequestId());
return newSpotSlave(spotInstReq, slaveName);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
protected EC2OndemandSlave newOndemandSlave(Instance inst) throws FormException, IOException {
return new EC2OndemandSlave(inst.getInstanceId(), description, remoteFS, getNumExecutors(), labels, mode, initScript, tmpDir, remoteAdmin, jvmopts, stopOnTerminate, idleTerminationMinutes, inst.getPublicDnsName(), inst.getPrivateDnsName(), EC2Tag.fromAmazonTags(inst.getTags()), parent.name, usePrivateDnsName, useDedicatedTenancy, getLaunchTimeout(), amiType);
}
protected EC2SpotSlave newSpotSlave(SpotInstanceRequest sir, String name) throws FormException, IOException {
return new EC2SpotSlave(name, sir.getSpotInstanceRequestId(), description, remoteFS, getNumExecutors(), mode, initScript, tmpDir, labels, remoteAdmin, jvmopts, idleTerminationMinutes, EC2Tag.fromAmazonTags(sir.getTags()), parent.name, usePrivateDnsName, getLaunchTimeout(), amiType);
}
/**
* Get a KeyPair from the configured information for the slave template
*/
private KeyPair getKeyPair(AmazonEC2 ec2) throws IOException, AmazonClientException{
KeyPair keyPair = parent.getPrivateKey().find(ec2);
if(keyPair==null) {
throw new AmazonClientException("No matching keypair found on EC2. Is the EC2 private key a valid one?");
}
return keyPair;
}
/**
* Update the tags stored in EC2 with the specified information
*/
private void updateRemoteTags(AmazonEC2 ec2, Collection<Tag> inst_tags, String... params) {
CreateTagsRequest tag_request = new CreateTagsRequest();
tag_request.withResources(params).setTags(inst_tags);
ec2.createTags(tag_request);
}
/**
* Get a list of security group ids for the slave
*/
private List<String> getEc2SecurityGroups(AmazonEC2 ec2) throws AmazonClientException{
List<String> group_ids = new ArrayList<String>();
DescribeSecurityGroupsResult group_result = getSecurityGroupsBy("group-name", securityGroupSet, ec2);
if (group_result.getSecurityGroups().size() == 0) {
group_result = getSecurityGroupsBy("group-id", securityGroupSet, ec2);
}
for (SecurityGroup group : group_result.getSecurityGroups()) {
if (group.getVpcId() != null && !group.getVpcId().isEmpty()) {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new Filter("vpc-id").withValues(group.getVpcId()));
filters.add(new Filter("state").withValues("available"));
filters.add(new Filter("subnet-id").withValues(getSubnetId()));
DescribeSubnetsRequest subnet_req = new DescribeSubnetsRequest();
subnet_req.withFilters(filters);
DescribeSubnetsResult subnet_result = ec2.describeSubnets(subnet_req);
List<Subnet> subnets = subnet_result.getSubnets();
if(subnets != null && !subnets.isEmpty()) {
group_ids.add(group.getGroupId());
}
}
}
if (securityGroupSet.size() != group_ids.size()) {
throw new AmazonClientException( "Security groups must all be VPC security groups to work in a VPC context" );
}
return group_ids;
}
private DescribeSecurityGroupsResult getSecurityGroupsBy(String filterName, Set<String> filterValues, AmazonEC2 ec2) {
DescribeSecurityGroupsRequest group_req = new DescribeSecurityGroupsRequest();
group_req.withFilters(new Filter(filterName).withValues(filterValues));
return ec2.describeSecurityGroups(group_req);
}
/**
* Provisions a new EC2 slave based on the currently running instance on EC2,
* instead of starting a new one.
*/
public EC2AbstractSlave attach(String instanceId, TaskListener listener) throws AmazonClientException, IOException {
PrintStream logger = listener.getLogger();
AmazonEC2 ec2 = getParent().connect();
try {
logger.println("Attaching to "+instanceId);
DescribeInstancesRequest request = new DescribeInstancesRequest();
request.setInstanceIds(Collections.singletonList(instanceId));
Instance inst = ec2.describeInstances(request).getReservations().get(0).getInstances().get(0);
return newOndemandSlave(inst);
} catch (FormException e) {
throw new AssertionError(); // we should have discovered all configuration issues upfront
}
}
/**
* Initializes data structure that we don't persist.
*/
protected Object readResolve() {
labelSet = Label.parse(labels);
securityGroupSet = parseSecurityGroups();
/**
* In releases of this plugin prior to 1.18, template-specific instance caps could be configured
* but were not enforced. As a result, it was possible to have the instance cap for a template
* be configured to 0 (zero) with no ill effects. Starting with version 1.18, template-specific
* instance caps are enforced, so if a configuration has a cap of zero for a template, no instances
* will be launched from that template. Since there is no practical value of intentionally setting
* the cap to zero, this block will override such a setting to a value that means 'no cap'.
*/
if (instanceCap == 0) {
instanceCap = Integer.MAX_VALUE;
}
if (amiType == null) {
amiType = new UnixData(rootCommandPrefix, sshPort);
}
return this;
}
public Descriptor<SlaveTemplate> getDescriptor() {
return Hudson.getInstance().getDescriptor(getClass());
}
public int getLaunchTimeout() {
return launchTimeout <= 0 ? Integer.MAX_VALUE : launchTimeout;
}
public String getLaunchTimeoutStr() {
if (launchTimeout==Integer.MAX_VALUE) {
return "";
} else {
return String.valueOf(launchTimeout);
}
}
public boolean isWindowsSlave()
{
return amiType.isWindows();
}
public boolean isUnixSlave()
{
return amiType.isUnix();
}
public String getAdminPassword()
{
return amiType.isWindows() ? ((WindowsData)amiType).getPassword() : "";
}
private boolean isUseHTTPS() {
return amiType.isWindows() ? ((WindowsData)amiType).isUseHTTPS() : false;
}
@Extension
public static final class DescriptorImpl extends Descriptor<SlaveTemplate> {
@Override
public String getDisplayName() {
return null;
}
public List<Descriptor<AMITypeData>> getAMITypeDescriptors()
{
return Hudson.getInstance().<AMITypeData,Descriptor<AMITypeData>>getDescriptorList(AMITypeData.class);
}
/**
* Since this shares much of the configuration with {@link EC2Computer}, check its help page, too.
*/
@Override
public String getHelpFile(String fieldName) {
String p = super.getHelpFile(fieldName);
if (p==null)
p = Hudson.getInstance().getDescriptor(EC2OndemandSlave.class).getHelpFile(fieldName);
if (p==null)
p = Hudson.getInstance().getDescriptor(EC2SpotSlave.class).getHelpFile(fieldName);
return p;
}
/***
* Check that the AMI requested is available in the cloud and can be used.
*/
public FormValidation doValidateAmi(
@QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId, @QueryParameter String secretKey,
@QueryParameter String ec2endpoint, @QueryParameter String region,
final @QueryParameter String ami) throws IOException {
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
AmazonEC2 ec2;
if (region != null) {
ec2 = EC2Cloud.connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region));
} else {
ec2 = EC2Cloud.connect(credentialsProvider, new URL(ec2endpoint));
}
if(ec2!=null) {
try {
List<String> images = new LinkedList<String>();
images.add(ami);
List<String> owners = new LinkedList<String>();
List<String> users = new LinkedList<String>();
DescribeImagesRequest request = new DescribeImagesRequest();
request.setImageIds(images);
request.setOwners(owners);
request.setExecutableUsers(users);
List<Image> img = ec2.describeImages(request).getImages();
if(img==null || img.isEmpty()) {
// de-registered AMI causes an empty list to be returned. so be defensive
// against other possibilities
return FormValidation.error("No such AMI, or not usable with this accessId: "+ami);
}
String ownerAlias = img.get(0).getImageOwnerAlias();
return FormValidation.ok(img.get(0).getImageLocation() +
(ownerAlias != null ? " by " + ownerAlias : ""));
} catch (AmazonClientException e) {
return FormValidation.error(e.getMessage());
}
} else
return FormValidation.ok(); // can't test
}
public FormValidation doCheckLabelString(@QueryParameter String value, @QueryParameter Node.Mode mode) {
if (mode == Node.Mode.EXCLUSIVE && (value == null || value.trim() == "")) {
return FormValidation.warning("You may want to assign labels to this node;" +
" it's marked to only run jobs that are exclusively tied to itself or a label.");
}
return FormValidation.ok();
}
public FormValidation doCheckIdleTerminationMinutes(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val >= -59) return FormValidation.ok();
}
catch ( NumberFormatException nfe ) {}
return FormValidation.error("Idle Termination time must be a greater than -59 (or null)");
}
public FormValidation doCheckInstanceCapStr(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val > 0) return FormValidation.ok();
} catch ( NumberFormatException nfe ) {}
return FormValidation.error("InstanceCap must be a non-negative integer (or null)");
}
public FormValidation doCheckLaunchTimeoutStr(@QueryParameter String value) {
if (value == null || value.trim() == "") return FormValidation.ok();
try {
int val = Integer.parseInt(value);
if (val >= 0) return FormValidation.ok();
} catch ( NumberFormatException nfe ) {}
return FormValidation.error("Launch Timeout must be a non-negative integer (or null)");
}
public ListBoxModel doFillZoneItems( @QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId,
@QueryParameter String secretKey,
@QueryParameter String region)
throws IOException, ServletException
{
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
return EC2AbstractSlave.fillZoneItems(credentialsProvider, region);
}
/* Validate the Spot Max Bid Price to ensure that it is a floating point number >= .001 */
public FormValidation doCheckSpotMaxBidPrice( @QueryParameter String spotMaxBidPrice ) {
if(SpotConfiguration.normalizeBid(spotMaxBidPrice) != null){
return FormValidation.ok();
}
return FormValidation.error("Not a correct bid price");
}
// Retrieve the availability zones for the region
private ArrayList<String> getAvailabilityZones(AmazonEC2 ec2) {
ArrayList<String> availabilityZones = new ArrayList<String>();
DescribeAvailabilityZonesResult zones = ec2.describeAvailabilityZones();
List<AvailabilityZone> zoneList = zones.getAvailabilityZones();
for (AvailabilityZone z : zoneList) {
availabilityZones.add(z.getZoneName());
}
return availabilityZones;
}
/**
* Populates the Bid Type Drop down on the slave template config.
* @return
*/
public ListBoxModel doFillBidTypeItems() {
ListBoxModel items = new ListBoxModel();
items.add(SpotInstanceType.OneTime.toString());
items.add(SpotInstanceType.Persistent.toString());
return items;
}
/* Check the current Spot price of the selected instance type for the selected region */
public FormValidation doCurrentSpotPrice( @QueryParameter boolean useInstanceProfileForCredentials,
@QueryParameter String accessId, @QueryParameter String secretKey,
@QueryParameter String region, @QueryParameter String type,
@QueryParameter String zone ) throws IOException, ServletException {
String cp = "";
String zoneStr = "";
// Connect to the EC2 cloud with the access id, secret key, and region queried from the created cloud
AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, accessId, secretKey);
AmazonEC2 ec2 = EC2Cloud.connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region));
if(ec2!=null) {
try {
// Build a new price history request with the currently selected type
DescribeSpotPriceHistoryRequest request = new DescribeSpotPriceHistoryRequest();
// If a zone is specified, set the availability zone in the request
// Else, proceed with no availability zone which will result with the cheapest Spot price
if(getAvailabilityZones(ec2).contains(zone)){
request.setAvailabilityZone(zone);
zoneStr = zone + " availability zone";
} else {
zoneStr = region + " region";
}
/*
* Iterate through the AWS instance types to see if can find a match for the databound
* String type. This is necessary because the AWS API needs the instance type
* string formatted a particular way to retrieve prices and the form gives us the strings
* in a different format. For example "T1Micro" vs "t1.micro".
*/
InstanceType ec2Type = null;
for(InstanceType it : InstanceType.values()){
if (it.name().toString().equals(type)){
ec2Type = it;
break;
}
}
/*
* If the type string cannot be matched with an instance type,
* throw a Form error
*/
if(ec2Type == null){
return FormValidation.error("Could not resolve instance type: " + type);
}
Collection<String> instanceType = new ArrayList<String>();
instanceType.add(ec2Type.toString());
request.setInstanceTypes(instanceType);
request.setStartTime(new Date());
// Retrieve the price history request result and store the current price
DescribeSpotPriceHistoryResult result = ec2.describeSpotPriceHistory(request);
if(!result.getSpotPriceHistory().isEmpty()){
SpotPrice currentPrice = result.getSpotPriceHistory().get(0);
cp = currentPrice.getSpotPrice();
}
} catch (AmazonClientException e) {
return FormValidation.error(e.getMessage());
}
}
/*
* If we could not return the current price of the instance display an error
* Else, remove the additional zeros from the current price and return it to the interface
* in the form of a message
*/
if(cp.isEmpty()){
return FormValidation.error("Could not retrieve current Spot price");
} else {
cp = cp.substring(0, cp.length() - 3);
return FormValidation.ok("The current Spot price for a " + type +
" in the " + zoneStr + " is $" + cp );
}
}
}
private static final Logger LOGGER = Logger.getLogger(SlaveTemplate.class.getName());
}
|
Small cleanup in logging statements
I cleaned up the on calls to log the launch of on demand and spot
instances so that they are now the same format and also now both make
the same calls, previously provisionSpot was missing the call to
LOGGER.info
|
src/main/java/hudson/plugins/ec2/SlaveTemplate.java
|
Small cleanup in logging statements
|
|
Java
|
mit
|
0d66a0d18ca0f2a8909b28d1fbf146afd1666d17
| 0
|
integeruser/jgltut
|
package rosick.mckesson.IV.tut16;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL12.*;
import static org.lwjgl.opengl.GL13.*;
import static org.lwjgl.opengl.GL15.*;
import static org.lwjgl.opengl.GL20.*;
import static org.lwjgl.opengl.GL21.*;
import static org.lwjgl.opengl.GL30.*;
import static org.lwjgl.opengl.GL31.*;
import static org.lwjgl.opengl.GL32.*;
import static org.lwjgl.opengl.GL33.*;
import static org.lwjgl.opengl.EXTTextureFilterAnisotropic.*;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import org.lwjgl.BufferUtils;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL12;
import rosick.LWJGLWindow;
import rosick.PortingUtils.BufferableData;
import rosick.jglsdk.framework.Framework;
import rosick.jglsdk.framework.Mesh;
import rosick.jglsdk.framework.Timer;
import rosick.jglsdk.glimg.ImageSet;
import rosick.jglsdk.glimg.ImageSet.Dimensions;
import rosick.jglsdk.glimg.ImageSet.SingleImage;
import rosick.jglsdk.glimg.loaders.Dds;
import rosick.jglsdk.glm.Glm;
import rosick.jglsdk.glm.Mat4;
import rosick.jglsdk.glm.Vec3;
import rosick.jglsdk.glutil.MatrixStack;
/**
* Visit https://github.com/rosickteam/OpenGL for project info, updates and license terms.
*
* IV. Texturing
* 16. Gamma and Textures
* http://www.arcsynthesis.org/gltut/Texturing/Tutorial%2016.html
* @author integeruser
*
* A - toggles gamma correction.
* G - switches to a texture who's mipmaps were properly generated.
* SPACE - presses A and G keys.
* Y - toggles between plane/corridor mesh.
* P - toggles pausing on/off.
* 1,2 - select linear mipmap filtering and anisotropic filtering (using the maximum possible anisotropy).
*/
public class GammaCheckers02 extends LWJGLWindow {
public static void main(String[] args) {
new GammaCheckers02().start();
}
private final static int FLOAT_SIZE = Float.SIZE / 8;
private final String TUTORIAL_DATAPATH = "/rosick/mckesson/IV/tut16/data/";
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private class ProgramData {
int theProgram;
int modelToCameraMatrixUnif;
}
private final int g_projectionBlockIndex = 0;
private final int g_colorTexUnit = 0;
private ProgramData g_progNoGamma;
private ProgramData g_progGamma;
private int g_projectionUniformBuffer;
private int g_linearTexture;
private int g_gammaTexture;
private float g_fzNear = 1.0f;
private float g_fzFar = 1000.0f;
private MatrixStack modelMatrix = new MatrixStack();
private FloatBuffer tempFloatBuffer16 = BufferUtils.createFloatBuffer(16);
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private ProgramData loadProgram(String strVertexShader, String strFragmentShader) {
ArrayList<Integer> shaderList = new ArrayList<>();
shaderList.add(Framework.loadShader(GL_VERTEX_SHADER, strVertexShader));
shaderList.add(Framework.loadShader(GL_FRAGMENT_SHADER, strFragmentShader));
ProgramData data = new ProgramData();
data.theProgram = Framework.createProgram(shaderList);
data.modelToCameraMatrixUnif = glGetUniformLocation(data.theProgram, "modelToCameraMatrix");
int projectionBlock = glGetUniformBlockIndex(data.theProgram, "Projection");
glUniformBlockBinding(data.theProgram, projectionBlock, g_projectionBlockIndex);
int colorTextureUnif = glGetUniformLocation(data.theProgram, "colorTexture");
glUseProgram(data.theProgram);
glUniform1i(colorTextureUnif, g_colorTexUnit);
glUseProgram(0);
return data;
}
private void initializePrograms() {
g_progNoGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureNoGamma.frag");
g_progGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureGamma.frag");
}
@Override
protected void init() {
initializePrograms();
try {
g_pCorridor = new Mesh(TUTORIAL_DATAPATH + "Corridor.xml");
g_pPlane = new Mesh(TUTORIAL_DATAPATH + "BigPlane.xml");
} catch (Exception exception) {
exception.printStackTrace();
System.exit(0);
}
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
final float depthZNear = 0.0f;
final float depthZFar = 1.0f;
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(depthZNear, depthZFar);
glEnable(GL_DEPTH_CLAMP);
// Setup our Uniform Buffers
g_projectionUniformBuffer = glGenBuffers();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferData(GL_UNIFORM_BUFFER, ProjectionBlock.SIZE, GL_DYNAMIC_DRAW);
glBindBufferRange(GL_UNIFORM_BUFFER, g_projectionBlockIndex, g_projectionUniformBuffer,
0, ProjectionBlock.SIZE);
glBindBuffer(GL_UNIFORM_BUFFER, 0);
loadCheckerTexture();
createSamplers();
}
@Override
protected void update() {
while (Keyboard.next()) {
boolean particularKeyPressed = false;
if (Keyboard.getEventKeyState()) {
switch (Keyboard.getEventKey()) {
case Keyboard.KEY_A:
g_drawGammaProgram = !g_drawGammaProgram;
particularKeyPressed = true;
break;
case Keyboard.KEY_G:
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_SPACE:
g_drawGammaProgram = !g_drawGammaProgram;
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_Y:
g_drawCorridor = !g_drawCorridor;
break;
case Keyboard.KEY_P:
g_camTimer.togglePause();
break;
case Keyboard.KEY_ESCAPE:
leaveMainLoop();
break;
}
if (Keyboard.KEY_1 <= Keyboard.getEventKey() && Keyboard.getEventKey() <= Keyboard.KEY_9) {
int number = Keyboard.getEventKey() - Keyboard.KEY_1;
if (number < NUM_SAMPLERS) {
g_currSampler = number;
}
}
}
if (particularKeyPressed) {
System.out.printf("----\n");
System.out.printf("Rendering:\t\t%s\n", g_drawGammaProgram ? "Gamma" : "Linear");
System.out.printf("Mipmap Generation:\t%s\n", g_drawGammaTexture ? "Gamma" : "Linear");
}
}
}
@Override
protected void display() {
glClearColor(0.75f, 0.75f, 1.0f, 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
g_camTimer.update((float) getElapsedTime());
float cyclicAngle = g_camTimer.getAlpha() * 6.28f;
float hOffset = (float) (Math.cos(cyclicAngle) * 0.25f);
float vOffset = (float) (Math.sin(cyclicAngle) * 0.25f);
modelMatrix.clear();
final Mat4 worldToCamMat = Glm.lookAt(
new Vec3(hOffset, 1.0f, -64.0f),
new Vec3(hOffset, -5.0f + vOffset, -44.0f),
new Vec3(0.0f, 1.0f, 0.0f));
modelMatrix.applyMatrix(worldToCamMat);
final ProgramData prog = g_drawGammaProgram ? g_progGamma : g_progNoGamma;
glUseProgram(prog.theProgram);
glUniformMatrix4(prog.modelToCameraMatrixUnif, false,
modelMatrix.top().fillAndFlipBuffer(tempFloatBuffer16));
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D, g_drawGammaTexture ? g_gammaTexture : g_linearTexture);
glBindSampler(g_colorTexUnit, g_samplers[g_currSampler]);
if (g_drawCorridor) {
g_pCorridor.render("tex");
} else {
g_pPlane.render("tex");
}
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glUseProgram(0);
}
@Override
protected void reshape(int width, int height) {
MatrixStack persMatrix = new MatrixStack();
persMatrix.perspective(90.0f, (width / (float) height), g_fzNear, g_fzFar);
ProjectionBlock projData = new ProjectionBlock();
projData.cameraToClipMatrix = persMatrix.top();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferSubData(GL_UNIFORM_BUFFER, 0, projData.fillAndFlipBuffer(tempFloatBuffer16));
glBindBuffer(GL_UNIFORM_BUFFER, 0);
glViewport(0, 0, width, height);
}
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private class ProjectionBlock extends BufferableData<FloatBuffer> {
Mat4 cameraToClipMatrix;
static final int SIZE = 16 * FLOAT_SIZE;
@Override
public FloatBuffer fillBuffer(FloatBuffer buffer) {
return cameraToClipMatrix.fillBuffer(buffer);
}
}
private final int NUM_SAMPLERS = 2;
private Mesh g_pPlane;
private Mesh g_pCorridor;
private Timer g_camTimer = new Timer(Timer.Type.TT_LOOP, 5.0f);
private boolean g_drawCorridor;
private boolean g_drawGammaTexture;
private boolean g_drawGammaProgram;
private int g_samplers[] = new int[NUM_SAMPLERS];
private int g_currSampler;
private void createSamplers() {
for (int samplerIx = 0; samplerIx < NUM_SAMPLERS; samplerIx++) {
g_samplers[samplerIx] = glGenSamplers();
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_S, GL_REPEAT);
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_T, GL_REPEAT);
}
// Linear mipmap linear
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
// Max anisotropic
float maxAniso = glGetFloat(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glSamplerParameterf(g_samplers[1], GL_TEXTURE_MAX_ANISOTROPY_EXT, maxAniso);
}
private void loadCheckerTexture() {
try {
ImageSet pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_linear.dds");
g_linearTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_linearTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_SRGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_gamma.dds");
g_gammaTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_gammaTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_SRGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
rosick/mckesson/IV/tut16/GammaCheckers02.java
|
package rosick.mckesson.IV.tut16;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL12.*;
import static org.lwjgl.opengl.GL13.*;
import static org.lwjgl.opengl.GL15.*;
import static org.lwjgl.opengl.GL20.*;
import static org.lwjgl.opengl.GL21.*;
import static org.lwjgl.opengl.GL30.*;
import static org.lwjgl.opengl.GL31.*;
import static org.lwjgl.opengl.GL32.*;
import static org.lwjgl.opengl.GL33.*;
import static org.lwjgl.opengl.EXTTextureFilterAnisotropic.*;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import org.lwjgl.BufferUtils;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL12;
import rosick.LWJGLWindow;
import rosick.PortingUtils.BufferableData;
import rosick.jglsdk.framework.Framework;
import rosick.jglsdk.framework.Mesh;
import rosick.jglsdk.framework.Timer;
import rosick.jglsdk.glimg.ImageSet;
import rosick.jglsdk.glimg.ImageSet.Dimensions;
import rosick.jglsdk.glimg.ImageSet.SingleImage;
import rosick.jglsdk.glimg.loaders.Dds;
import rosick.jglsdk.glm.Glm;
import rosick.jglsdk.glm.Mat4;
import rosick.jglsdk.glm.Vec3;
import rosick.jglsdk.glutil.MatrixStack;
/**
* Visit https://github.com/rosickteam/OpenGL for project info, updates and license terms.
*
* IV. Texturing
* 16. Gamma and Textures
* http://www.arcsynthesis.org/gltut/Texturing/Tutorial%2016.html
* @author integeruser
*
* A - toggles gamma correction.
* G - switches to a texture who's mipmaps were properly generated.
* SPACE - presses A and G keys.
* Y - toggles between plane/corridor mesh.
* P - toggles pausing on/off.
* 1,2 - select linear mipmap filtering and anisotropic filtering (using the maximum possible anisotropy).
*/
public class GammaCheckers02 extends LWJGLWindow {
public static void main(String[] args) {
new GammaCheckers02().start();
}
private final static int FLOAT_SIZE = Float.SIZE / 8;
private final String TUTORIAL_DATAPATH = "/rosick/mckesson/IV/tut16/data/";
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private class ProgramData {
int theProgram;
int modelToCameraMatrixUnif;
}
private final int g_projectionBlockIndex = 0;
private final int g_colorTexUnit = 0;
private ProgramData g_progNoGamma;
private ProgramData g_progGamma;
private int g_projectionUniformBuffer;
private int g_linearTexture;
private int g_gammaTexture;
private float g_fzNear = 1.0f;
private float g_fzFar = 1000.0f;
private MatrixStack modelMatrix = new MatrixStack();
private FloatBuffer tempFloatBuffer16 = BufferUtils.createFloatBuffer(16);
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private ProgramData loadProgram(String strVertexShader, String strFragmentShader) {
ArrayList<Integer> shaderList = new ArrayList<>();
shaderList.add(Framework.loadShader(GL_VERTEX_SHADER, strVertexShader));
shaderList.add(Framework.loadShader(GL_FRAGMENT_SHADER, strFragmentShader));
ProgramData data = new ProgramData();
data.theProgram = Framework.createProgram(shaderList);
data.modelToCameraMatrixUnif = glGetUniformLocation(data.theProgram, "modelToCameraMatrix");
int projectionBlock = glGetUniformBlockIndex(data.theProgram, "Projection");
glUniformBlockBinding(data.theProgram, projectionBlock, g_projectionBlockIndex);
int colorTextureUnif = glGetUniformLocation(data.theProgram, "colorTexture");
glUseProgram(data.theProgram);
glUniform1i(colorTextureUnif, g_colorTexUnit);
glUseProgram(0);
return data;
}
private void initializePrograms() {
g_progNoGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureNoGamma.frag");
g_progGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureGamma.frag");
}
@Override
protected void init() {
initializePrograms();
try {
g_pCorridor = new Mesh(TUTORIAL_DATAPATH + "Corridor.xml");
g_pPlane = new Mesh(TUTORIAL_DATAPATH + "BigPlane.xml");
} catch (Exception exception) {
exception.printStackTrace();
System.exit(0);
}
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
final float depthZNear = 0.0f;
final float depthZFar = 1.0f;
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(depthZNear, depthZFar);
glEnable(GL_DEPTH_CLAMP);
// Setup our Uniform Buffers
g_projectionUniformBuffer = glGenBuffers();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferData(GL_UNIFORM_BUFFER, ProjectionBlock.SIZE, GL_DYNAMIC_DRAW);
glBindBufferRange(GL_UNIFORM_BUFFER, g_projectionBlockIndex, g_projectionUniformBuffer,
0, ProjectionBlock.SIZE);
glBindBuffer(GL_UNIFORM_BUFFER, 0);
loadCheckerTexture();
createSamplers();
}
@Override
protected void update() {
while (Keyboard.next()) {
boolean particularKeyPressed = false;
if (Keyboard.getEventKeyState()) {
switch (Keyboard.getEventKey()) {
case Keyboard.KEY_A:
g_drawGammaProgram = !g_drawGammaProgram;
particularKeyPressed = true;
break;
case Keyboard.KEY_G:
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_SPACE:
g_drawGammaProgram = !g_drawGammaProgram;
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_Y:
g_drawCorridor = !g_drawCorridor;
break;
case Keyboard.KEY_P:
g_camTimer.togglePause();
break;
case Keyboard.KEY_ESCAPE:
leaveMainLoop();
break;
}
if (Keyboard.KEY_1 <= Keyboard.getEventKey() && Keyboard.getEventKey() <= Keyboard.KEY_9) {
int number = Keyboard.getEventKey() - Keyboard.KEY_1;
if (number < NUM_SAMPLERS) {
g_currSampler = number;
}
}
}
if (particularKeyPressed) {
System.out.printf("----\n");
System.out.printf("Rendering:\t\t%s\n", g_drawGammaProgram ? "Gamma" : "Linear");
System.out.printf("Mipmap Generation:\t%s\n", g_drawGammaTexture ? "Gamma" : "Linear");
}
}
}
@Override
protected void display() {
glClearColor(0.75f, 0.75f, 1.0f, 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
g_camTimer.update((float) getElapsedTime());
float cyclicAngle = g_camTimer.getAlpha() * 6.28f;
float hOffset = (float) (Math.cos(cyclicAngle) * 0.25f);
float vOffset = (float) (Math.sin(cyclicAngle) * 0.25f);
modelMatrix.clear();
final Mat4 worldToCamMat = Glm.lookAt(
new Vec3(hOffset, 1.0f, -64.0f),
new Vec3(hOffset, -5.0f + vOffset, -44.0f),
new Vec3(0.0f, 1.0f, 0.0f));
modelMatrix.applyMatrix(worldToCamMat);
final ProgramData prog = g_drawGammaProgram ? g_progGamma : g_progNoGamma;
glUseProgram(prog.theProgram);
glUniformMatrix4(prog.modelToCameraMatrixUnif, false,
modelMatrix.top().fillAndFlipBuffer(tempFloatBuffer16));
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D, g_drawGammaTexture ? g_gammaTexture : g_linearTexture);
glBindSampler(g_colorTexUnit, g_samplers[g_currSampler]);
if (g_drawCorridor) {
g_pCorridor.render("tex");
} else {
g_pPlane.render("tex");
}
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glUseProgram(0);
}
@Override
protected void reshape(int width, int height) {
MatrixStack persMatrix = new MatrixStack();
persMatrix.perspective(90.0f, (width / (float) height), g_fzNear, g_fzFar);
ProjectionBlock projData = new ProjectionBlock();
projData.cameraToClipMatrix = persMatrix.top();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferSubData(GL_UNIFORM_BUFFER, 0, projData.fillAndFlipBuffer(tempFloatBuffer16));
glBindBuffer(GL_UNIFORM_BUFFER, 0);
glViewport(0, 0, width, height);
}
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
private class ProjectionBlock extends BufferableData<FloatBuffer> {
Mat4 cameraToClipMatrix;
static final int SIZE = 16 * FLOAT_SIZE;
@Override
public FloatBuffer fillBuffer(FloatBuffer buffer) {
return cameraToClipMatrix.fillBuffer(buffer);
}
}
private final int NUM_SAMPLERS = 2;
private Mesh g_pPlane;
private Mesh g_pCorridor;
private Timer g_camTimer = new Timer(Timer.Type.TT_LOOP, 5.0f);
private boolean g_drawCorridor;
private boolean g_drawGammaTexture;
private boolean g_drawGammaProgram;
private int g_samplers[] = new int[NUM_SAMPLERS];
private int g_currSampler;
private void createSamplers() {
for (int samplerIx = 0; samplerIx < NUM_SAMPLERS; samplerIx++) {
g_samplers[samplerIx] = glGenSamplers();
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_S, GL_REPEAT);
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_T, GL_REPEAT);
}
// Linear mipmap linear
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
// Max anisotropic
float maxAniso = glGetFloat(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glSamplerParameterf(g_samplers[1], GL_TEXTURE_MAX_ANISOTROPY_EXT, maxAniso);
}
private void loadCheckerTexture() {
try {
ImageSet pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_linear.dds");
g_linearTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_linearTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_SRGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_gamma.dds");
g_gammaTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_gammaTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
/* GL_RGB8 should be GL_SRGB8 :\ */
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_RGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
*Changed GL_RGB8 with the correct GL_SRGB8
|
rosick/mckesson/IV/tut16/GammaCheckers02.java
|
*Changed GL_RGB8 with the correct GL_SRGB8
|
|
Java
|
epl-1.0
|
f15ee64ef898f26775b90e4baf19251a76e5b1e0
| 0
|
junit-team/junit,stefanbirkner/junit,kcooney/junit,alb-i986/junit,powazny/junit4,kcooney/junit,panchenko/junit,stefanbirkner/junit,mekwin87/junit4,powazny/junit4,kcooney/junit,powazny/junit4,ashleyfrieze/junit,mekwin87/junit4,mekwin87/junit4,mekwin87/junit4,alb-i986/junit,ashleyfrieze/junit,stefanbirkner/junit,alb-i986/junit,junit-team/junit4,junit-team/junit4,ashleyfrieze/junit,panchenko/junit,panchenko/junit,junit-team/junit,junit-team/junit4,junit-team/junit
|
package org.junit.rules;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
/**
* <tt>TemporaryFolderUsageTest</tt> provides tests for API usage correctness
* and ensure implementation symmetry of public methods against a root folder.
*/
public class TemporaryFolderUsageTest {
private TemporaryFolder tempFolder;
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
tempFolder = new TemporaryFolder();
}
@After
public void tearDown() {
tempFolder.delete();
}
@Test(expected = IllegalStateException.class)
public void getRootShouldThrowIllegalStateExceptionIfCreateWasNotInvoked() {
new TemporaryFolder().getRoot();
}
@Test(expected = IllegalStateException.class)
public void newFileThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFile();
}
@Test(expected = IllegalStateException.class)
public void newFileWithGivenNameThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFile("MyFile.txt");
}
@Test
public void newFileWithGivenFilenameThrowsIOExceptionIfFileExists() throws IOException {
tempFolder.create();
tempFolder.newFile("MyFile.txt");
thrown.expect(IOException.class);
thrown.expectMessage("a file with the name 'MyFile.txt' already exists in the test folder");
tempFolder.newFile("MyFile.txt");
}
@Test(expected = IllegalStateException.class)
public void newFolderThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFolder();
}
@Test(expected = IllegalStateException.class)
public void newFolderWithGivenPathThrowsIllegalStateExceptionIfCreateWasNotInvoked() throws IOException {
new TemporaryFolder().newFolder("level1", "level2", "level3");
}
@Test
public void newFolderWithGivenFolderThrowsIOExceptionIfFolderExists() throws IOException {
tempFolder.create();
tempFolder.newFolder("level1");
thrown.expect(IOException.class);
thrown.expectMessage("a folder with the path 'level1' already exists");
tempFolder.newFolder("level1");
}
@Test
public void newFolderWithGivenFolderThrowsIOExceptionIfFileExists() throws IOException {
tempFolder.create();
File file = new File(tempFolder.getRoot(), "level1");
assertTrue("Could not create" + file, file.createNewFile());
thrown.expect(IOException.class);
thrown.expectMessage("could not create a folder with the path 'level1'");
tempFolder.newFolder("level1");
}
@Test
public void newFolderWithPathStartingWithFileSeparatorThrowsIOException()
throws IOException {
String fileAtRoot;
File[] roots = File.listRoots();
if (roots != null && roots.length > 0) {
fileAtRoot = roots[0].getAbsolutePath() + "temp1";
} else {
fileAtRoot = File.separator + "temp1";
}
tempFolder.create();
thrown.expect(IOException.class);
thrown.expectMessage("folder path '/temp1' is not a relative path");
tempFolder.newFolder(fileAtRoot);
}
@Test
public void newFolderWithPathContainingFileSeparaterCreatesDirectories()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1" + File.separator + "temp2");
File temp1 = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(temp1);
assertFileIsDirectory(new File(temp1, "temp2"));
}
@Test
public void newFolderWithPathContainingForwardSlashCreatesDirectories()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1/temp2");
File temp1 = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(temp1);
assertFileIsDirectory(new File(temp1, "temp2"));
}
@Test
public void newFolderWithGivenPathThrowsIOExceptionIfFolderExists() throws IOException {
tempFolder.create();
tempFolder.newFolder("level1", "level2", "level3");
thrown.expect(IOException.class);
String path = "level1" + File.separator + "level2" + File.separator + "level3";
thrown.expectMessage("a folder with the path '" + path + "' already exists");
tempFolder.newFolder("level1", "level2", "level3");
}
@Test
public void newFolderWithGivenEmptyArrayThrowsIllegalArgumentException() throws IOException {
tempFolder.create();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("must pass at least one path");
tempFolder.newFolder(new String[0]);
}
@Test
public void newFolderWithPathsContainingForwardSlashCreatesFullPath()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1", "temp2", "temp3/temp4");
File directory = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(directory);
directory = new File(directory, "temp2/temp3/temp4");
assertFileIsDirectory(directory);
}
@Test
public void newFolderWithPathsContainingFileSeparatorCreatesFullPath()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1", "temp2", "temp3" + File.separator + "temp4");
File directory = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(directory);
directory = new File(directory, "temp2/temp3/temp4");
assertFileIsDirectory(directory);
}
@Test
public void createInitializesRootFolder() throws IOException {
tempFolder.create();
assertFileIsDirectory(tempFolder.getRoot());
}
@Test
public void deleteShouldDoNothingIfRootFolderWasNotInitialized() {
tempFolder.delete();
}
@Test
public void deleteRemovesRootFolder() throws IOException {
tempFolder.create();
tempFolder.delete();
assertFileDoesNotExist(tempFolder.getRoot());
}
@Test
public void newRandomFileIsCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFile();
assertFileExists(f);
assertFileCreatedUnderRootFolder("Random file", f);
}
@Test
public void newNamedFileIsCreatedUnderRootFolder() throws IOException {
final String fileName = "SampleFile.txt";
tempFolder.create();
File f = tempFolder.newFile(fileName);
assertFileExists(f);
assertFileCreatedUnderRootFolder("Named file", f);
assertThat("file name", f.getName(), equalTo(fileName));
}
@Test
public void newRandomFolderIsCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFolder();
assertFileIsDirectory(f);
assertFileCreatedUnderRootFolder("Random folder", f);
}
@Test
public void newNestedFoldersCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFolder("top", "middle", "bottom");
assertFileIsDirectory(f);
assertParentFolderForFileIs(f, new File(tempFolder.getRoot(),
"top/middle"));
assertParentFolderForFileIs(f.getParentFile(),
new File(tempFolder.getRoot(), "top"));
assertFileCreatedUnderRootFolder("top", f.getParentFile()
.getParentFile());
}
@Test
public void canSetTheBaseFileForATemporaryFolder() throws IOException {
File tempDir = createTemporaryFolder();
TemporaryFolder folder = new TemporaryFolder(tempDir);
folder.create();
assertThat(tempDir, is(folder.getRoot().getParentFile()));
}
private File createTemporaryFolder() throws IOException {
File tempDir = File.createTempFile("junit", "tempFolder");
assertTrue("Unable to delete temporary file", tempDir.delete());
assertTrue("Unable to create temp directory", tempDir.mkdir());
return tempDir;
}
private void assertFileDoesNotExist(File file) {
checkFileExists("exists", file, false);
}
private void checkFileExists(String msg, File file, boolean exists) {
assertThat("File is null", file, is(notNullValue()));
assertThat("File '" + file.getAbsolutePath() + "' " + msg,
file.exists(), is(exists));
}
private void checkFileIsDirectory(String msg, File file, boolean isDirectory) {
assertThat("File is null", file, is(notNullValue()));
assertThat("File '" + file.getAbsolutePath() + "' " + msg,
file.isDirectory(), is(isDirectory));
}
private void assertFileExists(File file) {
checkFileExists("does not exist", file, true);
checkFileIsDirectory("is a directory", file, false);
}
private void assertFileIsDirectory(File file) {
checkFileExists("does not exist", file, true);
checkFileIsDirectory("is not a directory", file, true);
}
private void assertFileCreatedUnderRootFolder(String msg, File f) {
assertParentFolderForFileIs(f, tempFolder.getRoot());
}
private void assertParentFolderForFileIs(File f, File parentFolder) {
assertThat("'" + f.getAbsolutePath() + "': not under root",
f.getParentFile(), is(parentFolder));
}
}
|
src/test/java/org/junit/rules/TemporaryFolderUsageTest.java
|
package org.junit.rules;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
/**
* <tt>TemporaryFolderUsageTest</tt> provides tests for API usage correctness
* and ensure implementation symmetry of public methods against a root folder.
*/
public class TemporaryFolderUsageTest {
private TemporaryFolder tempFolder;
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
tempFolder = new TemporaryFolder();
}
@After
public void tearDown() {
tempFolder.delete();
}
@Test(expected = IllegalStateException.class)
public void getRootShouldThrowIllegalStateExceptionIfCreateWasNotInvoked() {
new TemporaryFolder().getRoot();
}
@Test(expected = IllegalStateException.class)
public void newFileThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFile();
}
@Test(expected = IllegalStateException.class)
public void newFileWithGivenNameThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFile("MyFile.txt");
}
@Test
public void newFileWithGivenFilenameThrowsIllegalArgumentExceptionIfFileExists() throws IOException {
tempFolder.create();
tempFolder.newFile("MyFile.txt");
thrown.expect(IOException.class);
thrown.expectMessage("a file with the name 'MyFile.txt' already exists in the test folder");
tempFolder.newFile("MyFile.txt");
}
@Test(expected = IllegalStateException.class)
public void newFolderThrowsIllegalStateExceptionIfCreateWasNotInvoked()
throws IOException {
new TemporaryFolder().newFolder();
}
@Test(expected = IllegalStateException.class)
public void newFolderWithGivenPathThrowsIllegalStateExceptionIfCreateWasNotInvoked() throws IOException {
new TemporaryFolder().newFolder("level1", "level2", "level3");
}
@Test
public void newFolderWithGivenFolderThrowsIllegalArgumentExceptionIfFolderExists() throws IOException {
tempFolder.create();
tempFolder.newFolder("level1");
thrown.expect(IOException.class);
thrown.expectMessage("a folder with the path 'level1' already exists");
tempFolder.newFolder("level1");
}
@Test
public void newFolderWithGivenFolderThrowsIllegalArgumentExceptionIfFileExists() throws IOException {
tempFolder.create();
File file = new File(tempFolder.getRoot(), "level1");
assertTrue("Could not create" + file, file.createNewFile());
thrown.expect(IOException.class);
thrown.expectMessage("could not create a folder with the path 'level1'");
tempFolder.newFolder("level1");
}
@Test
public void newFolderWithPathStartingWithFileSeparatorThrowsIOException()
throws IOException {
String fileAtRoot;
File[] roots = File.listRoots();
if (roots != null && roots.length > 0) {
fileAtRoot = roots[0].getAbsolutePath() + "temp1";
} else {
fileAtRoot = File.separator + "temp1";
}
tempFolder.create();
thrown.expect(IOException.class);
thrown.expectMessage("folder path '/temp1' is not a relative path");
tempFolder.newFolder(fileAtRoot);
}
@Test
public void newFolderWithPathContainingFileSeparaterCreatesDirectories()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1" + File.separator + "temp2");
File temp1 = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(temp1);
assertFileIsDirectory(new File(temp1, "temp2"));
}
@Test
public void newFolderWithPathContainingForwardSlashCreatesDirectories()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1/temp2");
File temp1 = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(temp1);
assertFileIsDirectory(new File(temp1, "temp2"));
}
@Test
public void newFolderWithGivenPathThrowsIllegalArgumentExceptionIfFolderExists() throws IOException {
tempFolder.create();
tempFolder.newFolder("level1", "level2", "level3");
thrown.expect(IOException.class);
String path = "level1" + File.separator + "level2" + File.separator + "level3";
thrown.expectMessage("a folder with the path '" + path + "' already exists");
tempFolder.newFolder("level1", "level2", "level3");
}
@Test
public void newFolderWithGivenEmptyArrayThrowsIllegalArgumentException() throws IOException {
tempFolder.create();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("must pass at least one path");
tempFolder.newFolder(new String[0]);
}
@Test
public void newFolderWithPathsContainingForwardSlashCreatesFullPath()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1", "temp2", "temp3/temp4");
File directory = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(directory);
directory = new File(directory, "temp2/temp3/temp4");
assertFileIsDirectory(directory);
}
@Test
public void newFolderWithPathsContainingFileSeparatorCreatesFullPath()
throws IOException {
tempFolder.create();
tempFolder.newFolder("temp1", "temp2", "temp3" + File.separator + "temp4");
File directory = new File(tempFolder.getRoot(), "temp1");
assertFileIsDirectory(directory);
directory = new File(directory, "temp2/temp3/temp4");
assertFileIsDirectory(directory);
}
@Test
public void createInitializesRootFolder() throws IOException {
tempFolder.create();
assertFileIsDirectory(tempFolder.getRoot());
}
@Test
public void deleteShouldDoNothingIfRootFolderWasNotInitialized() {
tempFolder.delete();
}
@Test
public void deleteRemovesRootFolder() throws IOException {
tempFolder.create();
tempFolder.delete();
assertFileDoesNotExist(tempFolder.getRoot());
}
@Test
public void newRandomFileIsCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFile();
assertFileExists(f);
assertFileCreatedUnderRootFolder("Random file", f);
}
@Test
public void newNamedFileIsCreatedUnderRootFolder() throws IOException {
final String fileName = "SampleFile.txt";
tempFolder.create();
File f = tempFolder.newFile(fileName);
assertFileExists(f);
assertFileCreatedUnderRootFolder("Named file", f);
assertThat("file name", f.getName(), equalTo(fileName));
}
@Test
public void newRandomFolderIsCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFolder();
assertFileIsDirectory(f);
assertFileCreatedUnderRootFolder("Random folder", f);
}
@Test
public void newNestedFoldersCreatedUnderRootFolder() throws IOException {
tempFolder.create();
File f = tempFolder.newFolder("top", "middle", "bottom");
assertFileIsDirectory(f);
assertParentFolderForFileIs(f, new File(tempFolder.getRoot(),
"top/middle"));
assertParentFolderForFileIs(f.getParentFile(),
new File(tempFolder.getRoot(), "top"));
assertFileCreatedUnderRootFolder("top", f.getParentFile()
.getParentFile());
}
@Test
public void canSetTheBaseFileForATemporaryFolder() throws IOException {
File tempDir = createTemporaryFolder();
TemporaryFolder folder = new TemporaryFolder(tempDir);
folder.create();
assertThat(tempDir, is(folder.getRoot().getParentFile()));
}
private File createTemporaryFolder() throws IOException {
File tempDir = File.createTempFile("junit", "tempFolder");
assertTrue("Unable to delete temporary file", tempDir.delete());
assertTrue("Unable to create temp directory", tempDir.mkdir());
return tempDir;
}
private void assertFileDoesNotExist(File file) {
checkFileExists("exists", file, false);
}
private void checkFileExists(String msg, File file, boolean exists) {
assertThat("File is null", file, is(notNullValue()));
assertThat("File '" + file.getAbsolutePath() + "' " + msg,
file.exists(), is(exists));
}
private void checkFileIsDirectory(String msg, File file, boolean isDirectory) {
assertThat("File is null", file, is(notNullValue()));
assertThat("File '" + file.getAbsolutePath() + "' " + msg,
file.isDirectory(), is(isDirectory));
}
private void assertFileExists(File file) {
checkFileExists("does not exist", file, true);
checkFileIsDirectory("is a directory", file, false);
}
private void assertFileIsDirectory(File file) {
checkFileExists("does not exist", file, true);
checkFileIsDirectory("is not a directory", file, true);
}
private void assertFileCreatedUnderRootFolder(String msg, File f) {
assertParentFolderForFileIs(f, tempFolder.getRoot());
}
private void assertParentFolderForFileIs(File f, File parentFolder) {
assertThat("'" + f.getAbsolutePath() + "': not under root",
f.getParentFile(), is(parentFolder));
}
}
|
Fix method names in TemporaryFolderUsageTest.
|
src/test/java/org/junit/rules/TemporaryFolderUsageTest.java
|
Fix method names in TemporaryFolderUsageTest.
|
|
Java
|
epl-1.0
|
693c8a0ff2d25766f891602377d99d1a82f8df6e
| 0
|
gnodet/wikitext
|
/*******************************************************************************
* Copyright (c) 2004, 2008 Tasktop Technologies and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Tasktop Technologies - initial API and implementation
*******************************************************************************/
package org.eclipse.mylyn.internal.context.ui;
import org.eclipse.mylyn.internal.context.ui.actions.FocusOutlineAction;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IPartListener;
import org.eclipse.ui.IWorkbenchPart;
/**
* @author Mik Kersten
*/
public class ContentOutlineManager implements IPartListener {
public void partBroughtToTop(final IWorkbenchPart part) {
// use the display async due to bug 261977: [context] outline view does not filter contents when new editor is opened
Display.getDefault().asyncExec(new Runnable() {
public void run() {
if (part instanceof IEditorPart) {
IEditorPart editorPart = (IEditorPart) part;
FocusOutlineAction applyAction = FocusOutlineAction.getOutlineActionForEditor(editorPart);
if (applyAction != null) {
applyAction.update(editorPart);
}
}
}
});
}
public void partActivated(IWorkbenchPart part) {
// ignore
}
public void partOpened(IWorkbenchPart part) {
// ignore
}
public void partClosed(IWorkbenchPart partRef) {
// ignore
}
public void partDeactivated(IWorkbenchPart partRef) {
// ignore
}
}
|
org.eclipse.mylyn.context.ui/src/org/eclipse/mylyn/internal/context/ui/ContentOutlineManager.java
|
/*******************************************************************************
* Copyright (c) 2004, 2008 Tasktop Technologies and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Tasktop Technologies - initial API and implementation
*******************************************************************************/
package org.eclipse.mylyn.internal.context.ui;
import org.eclipse.mylyn.internal.context.ui.actions.FocusOutlineAction;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IPartListener;
import org.eclipse.ui.IWorkbenchPart;
/**
* @author Mik Kersten
*/
public class ContentOutlineManager implements IPartListener {
public void partBroughtToTop(final IWorkbenchPart part) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
if (part instanceof IEditorPart) {
IEditorPart editorPart = (IEditorPart) part;
FocusOutlineAction applyAction = FocusOutlineAction.getOutlineActionForEditor(editorPart);
if (applyAction != null) {
applyAction.update(editorPart);
}
}
}
});
}
public void partActivated(IWorkbenchPart part) {
// ignore
}
public void partOpened(IWorkbenchPart part) {
// ignore
}
public void partClosed(IWorkbenchPart partRef) {
// ignore
}
public void partDeactivated(IWorkbenchPart partRef) {
// ignore
}
}
|
RESOLVED - bug 261977: [context] outline view does not filter contents when new editor is opened
https://bugs.eclipse.org/bugs/show_bug.cgi?id=261977
|
org.eclipse.mylyn.context.ui/src/org/eclipse/mylyn/internal/context/ui/ContentOutlineManager.java
|
RESOLVED - bug 261977: [context] outline view does not filter contents when new editor is opened https://bugs.eclipse.org/bugs/show_bug.cgi?id=261977
|
|
Java
|
agpl-3.0
|
b28bd01cc53024f175f35da25eee8cca9e544a36
| 0
|
duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot
|
/*
* Skybot, a multipurpose discord bot
* Copyright (C) 2017 - 2019 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.duncte123.skybot;
import net.dv8tion.jda.bot.sharding.ShardManager;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.JDA.ShardInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
class ShardWatcher {
private final long[] pings;
private final Logger logger = LoggerFactory.getLogger(ShardWatcher.class);
ShardWatcher(SkyBot skyBot) {
final ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
final int totalShards = skyBot.getShardManager().getShardsTotal();
this.pings = new long[totalShards];
service.scheduleAtFixedRate(this::checkShards,10, 10, TimeUnit.MINUTES);
}
private void checkShards() {
final ShardManager shardManager = SkyBot.getInstance().getShardManager();
logger.debug("Checking shards");
for (final JDA shard : shardManager.getShardCache()) {
final ShardInfo info = shard.getShardInfo();
final long ping = shard.getPing();
final long oldPing = this.pings[info.getShardId()];
if (oldPing != ping) {
this.pings[info.getShardId()] = ping;
} else {
logger.warn("{} is possibly down", info);
}
}
logger.debug("Checking done");
}
}
|
src/main/java/ml/duncte123/skybot/ShardWatcher.java
|
/*
* Skybot, a multipurpose discord bot
* Copyright (C) 2017 - 2019 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.duncte123.skybot;
import net.dv8tion.jda.bot.sharding.ShardManager;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.JDA.ShardInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
class ShardWatcher {
private final long[] pings;
private final Logger logger = LoggerFactory.getLogger(ShardWatcher.class);
ShardWatcher(SkyBot skyBot) {
final ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
final int totalShards = skyBot.getShardManager().getShardsTotal();
this.pings = new long[totalShards];
service.scheduleAtFixedRate(this::checkShards, 5, 5, TimeUnit.MINUTES);
}
private void checkShards() {
final ShardManager shardManager = SkyBot.getInstance().getShardManager();
logger.debug("Checking shards");
for (final JDA shard : shardManager.getShardCache()) {
final ShardInfo info = shard.getShardInfo();
final long ping = shard.getPing();
final long oldPing = this.pings[info.getShardId()];
if (oldPing != ping) {
this.pings[info.getShardId()] = ping;
} else {
logger.warn("{} is possibly down", info);
}
}
logger.debug("Checking done");
}
}
|
Increase shard check timeout
|
src/main/java/ml/duncte123/skybot/ShardWatcher.java
|
Increase shard check timeout
|
|
Java
|
agpl-3.0
|
c3d7dfcad722cbddfe89ac89111a4dc6d9b9a9b8
| 0
|
ColostateResearchServices/kc,iu-uits-es/kc,jwillia/kc-old1,jwillia/kc-old1,mukadder/kc,geothomasp/kcmit,mukadder/kc,iu-uits-es/kc,iu-uits-es/kc,ColostateResearchServices/kc,jwillia/kc-old1,UniversityOfHawaiiORS/kc,ColostateResearchServices/kc,kuali/kc,jwillia/kc-old1,geothomasp/kcmit,geothomasp/kcmit,geothomasp/kcmit,UniversityOfHawaiiORS/kc,mukadder/kc,kuali/kc,kuali/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc
|
/*
* Copyright 2005-2010 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.institutionalproposal.rules;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.kra.award.home.Award;
import org.kuali.kra.common.specialreview.rule.event.SaveSpecialReviewEvent;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.institutionalproposal.InstitutionalProposalCustomDataAuditRule;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalCreditSplitBean;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonAuditRule;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonSaveRuleEvent;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonSaveRuleImpl;
import org.kuali.kra.institutionalproposal.customdata.InstitutionalProposalCustomDataRuleImpl;
import org.kuali.kra.institutionalproposal.customdata.InstitutionalProposalSaveCustomDataRuleEvent;
import org.kuali.kra.institutionalproposal.document.InstitutionalProposalDocument;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposal;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalCostShare;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalScienceKeyword;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalUnrecoveredFandA;
import org.kuali.kra.institutionalproposal.specialreview.InstitutionalProposalSpecialReview;
import org.kuali.kra.rule.BusinessRuleInterface;
import org.kuali.kra.rule.event.KraDocumentEventBaseExtension;
import org.kuali.kra.rules.ResearchDocumentRuleBase;
import org.kuali.rice.kns.document.Document;
import org.kuali.rice.kns.service.BusinessObjectService;
import org.kuali.rice.kns.util.ErrorMap;
import org.kuali.rice.kns.util.GlobalVariables;
/**
* This class...
*/
public class InstitutionalProposalDocumentRule extends ResearchDocumentRuleBase implements BusinessRuleInterface {
public static final String DOCUMENT_ERROR_PATH = "document";
public static final String INSTITUTIONAL_PROPOSAL_ERROR_PATH = "institutionalProposalList[0]";
public static final String IP_ERROR_PATH = "institutionalProposal";
public static final boolean VALIDATION_REQUIRED = true;
public static final boolean CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME = false;
private static final String SAVE_SPECIAL_REVIEW_FIELD = "document.institutionalProposalList[0].specialReviews";
/**
*
* @see org.kuali.core.rules.DocumentRuleBase#processCustomSaveDocumentBusinessRules(
* org.kuali.rice.kns.document.Document)
*/
@Override
protected boolean processCustomSaveDocumentBusinessRules(Document document) {
boolean retval = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
if (!(document instanceof InstitutionalProposalDocument)) {
return false;
}
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
getDictionaryValidationService().validateDocumentAndUpdatableReferencesRecursively(
document, getMaxDictionaryValidationDepth(),
VALIDATION_REQUIRED, CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
retval &= processSaveInstitutionalProposalCustomDataBusinessRules(document);
retval &= processUnrecoveredFandABusinessRules(document);
retval &= processSponsorProgramBusinessRule(document);
retval &= processInstitutionalProposalBusinessRules(document);
retval &= processInstitutionalProposalFinancialRules(document);
retval &= processSpecialReviewBusinessRule(document);
retval &= processInstitutionalProposalPersonBusinessRules(errorMap, document);
// moved to processRunAuditBusinessRules()
// retval &= processInstitutionalProposalPersonCreditSplitBusinessRules(document);
// retval &= processInstitutionalProposalPersonUnitCreditSplitBusinessRules(document);
retval &= processKeywordBusinessRule(document);
retval &= processAccountIdBusinessRule(document);
retval &= processCostShareRules(document);
return retval;
}
/**
*
* process save Custom Data Business Rules.
* @param institutionalProposalDocument
* @return
*/
private boolean processSaveInstitutionalProposalCustomDataBusinessRules(Document document) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(INSTITUTIONAL_PROPOSAL_ERROR_PATH);
String errorPath = "institutionalProposalCustomData";
errorMap.addToErrorPath(errorPath);
InstitutionalProposalSaveCustomDataRuleEvent event = new InstitutionalProposalSaveCustomDataRuleEvent(errorPath,
institutionalProposalDocument);
valid &= new InstitutionalProposalCustomDataRuleImpl().processSaveInstitutionalProposalCustomDataBusinessRules(event);
errorMap.removeFromErrorPath(errorPath);
errorMap.removeFromErrorPath(INSTITUTIONAL_PROPOSAL_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return valid;
}
/**
*
* process Cost Share business rules.
* @param awardDocument
* @return
*/
private boolean processUnrecoveredFandABusinessRules(Document document) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
int i = 0;
List<InstitutionalProposalUnrecoveredFandA> institutionalProposalUnrecoveredFandAs =
institutionalProposalDocument.getInstitutionalProposal().getInstitutionalProposalUnrecoveredFandAs();
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(IP_ERROR_PATH);
for (InstitutionalProposalUnrecoveredFandA institutionalProposalUnrecoveredFandA : institutionalProposalUnrecoveredFandAs) {
String errorPath = "institutionalProposalUnrecoveredFandAs[" + i + Constants.RIGHT_SQUARE_BRACKET;
errorMap.addToErrorPath(errorPath);
InstitutionalProposalSaveUnrecoveredFandARuleEvent event = new InstitutionalProposalSaveUnrecoveredFandARuleEvent(errorPath,
institutionalProposalDocument,
institutionalProposalUnrecoveredFandA);
valid &= new InstitutionalProposalUnrecoveredFandARuleImpl().processSaveInstitutionalProposalUnrecoveredFandABusinessRules(event);
errorMap.removeFromErrorPath(errorPath);
i++;
}
errorMap.removeFromErrorPath(IP_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return valid;
}
/**
* @see org.kuali.core.rule.DocumentAuditRule#processRunAuditBusinessRules(
* org.kuali.rice.kns.document.Document)
*/
public boolean processRunAuditBusinessRules(Document document){
boolean retval = true;
//retval &= super.processRunAuditBusinessRules(document);
retval &= new InstitutionalProposalCustomDataAuditRule().processRunAuditBusinessRules(document);
retval &= new InstitutionalProposalPersonAuditRule().processRunAuditBusinessRules(document);
retval &= processInstitutionalProposalPersonCreditSplitBusinessRules(document);
retval &= processInstitutionalProposalPersonUnitCreditSplitBusinessRules(document);
return retval;
}
private boolean processInstitutionalProposalPersonBusinessRules(ErrorMap errorMap, Document document) {
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(IP_ERROR_PATH);
InstitutionalProposalPersonSaveRuleEvent event = new InstitutionalProposalPersonSaveRuleEvent("Project Persons", "projectPersons", document);
boolean success = new InstitutionalProposalPersonSaveRuleImpl().processInstitutionalProposalPersonSaveBusinessRules(event);
errorMap.removeFromErrorPath(IP_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return success;
}
private boolean processInstitutionalProposalPersonCreditSplitBusinessRules(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
return new InstitutionalProposalCreditSplitBean(institutionalProposalDocument).recalculateCreditSplit();
}
private boolean processInstitutionalProposalPersonUnitCreditSplitBusinessRules(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
return new InstitutionalProposalCreditSplitBean(institutionalProposalDocument).recalculateCreditSplit();
}
private boolean processKeywordBusinessRule(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
List<InstitutionalProposalScienceKeyword> keywords = institutionalProposalDocument.getInstitutionalProposal().getKeywords();
for ( InstitutionalProposalScienceKeyword keyword : keywords ) {
for ( InstitutionalProposalScienceKeyword keyword2 : keywords ) {
if ( keyword == keyword2 ) {
continue;
} else if ( StringUtils.equalsIgnoreCase(keyword.getScienceKeywordCode(), keyword2.getScienceKeywordCode()) ) {
GlobalVariables.getErrorMap().putError("document.institutionalProposalList[0].keyword", "error.proposalKeywords.duplicate");
return false;
}
}
}
return true;
}
private boolean processAccountIdBusinessRule(Document document) {
boolean retVal = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
InstitutionalProposal institutionalProposal = institutionalProposalDocument.getInstitutionalProposal();
String ipAccountNumber = institutionalProposal.getCurrentAccountNumber();
String awardNumber = institutionalProposal.getCurrentAwardNumber();
if (!StringUtils.isEmpty(awardNumber) && !StringUtils.isEmpty(ipAccountNumber)) {
BusinessObjectService boService = KraServiceLocator.getService(BusinessObjectService.class);
Map<String, String> fieldValues = new HashMap<String, String>();
fieldValues.put("awardNumber", awardNumber);
Collection awardCol = boService.findMatching(Award.class, fieldValues);
if (!awardCol.isEmpty()) {
Award award = (Award) (awardCol.toArray())[0];
String awardAccountNumber = award.getAccountNumber();
System.err.println("awardAccountNumber:" + awardAccountNumber);
if (!StringUtils.equalsIgnoreCase(ipAccountNumber, awardAccountNumber)) {
GlobalVariables.getMessageMap().putError("document.institutionalProposal.currentAccountNumber",
"error.institutionalProposal.accountNumber.invalid", ipAccountNumber);
retVal = false;
}
}
}
return retVal;
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processSponsorProgramBusinessRule(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalSponsorAndProgram";
InstitutionalProposalSponsorAndProgramRuleEvent event = new InstitutionalProposalSponsorAndProgramRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalSponsorAndProgramRuleImpl().processInstitutionalProposalSponsorAndProgramRules(event);
return valid;
}
/**
* This method validates 'Proposal Special review'. It checks
* validSpecialReviewApproval table, and if there is a match, then checks
* protocalnumberflag, applicationdateflag, and approvaldataflag.
*
* @paramDocument : The institutionalProposalDocument that is being validated
* @return valid Does the validation pass
*/
private boolean processSpecialReviewBusinessRule(Document document) {
InstitutionalProposalDocument proposalDocument = (InstitutionalProposalDocument) document;
List<InstitutionalProposalSpecialReview> specialReviews = proposalDocument.getInstitutionalProposal().getSpecialReviews();
boolean isProtocolLinkingEnabled
= getParameterService().getIndicatorParameter("KC-PROTOCOL", "Document", "irb.protocol.institute.proposal.linking.enabled");
return processRules(new SaveSpecialReviewEvent<InstitutionalProposalSpecialReview>(
SAVE_SPECIAL_REVIEW_FIELD, proposalDocument, specialReviews, isProtocolLinkingEnabled));
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processInstitutionalProposalFinancialRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposalFinancial";
InstitutionalProposalFinancialRuleEvent event = new InstitutionalProposalFinancialRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalFinancialRuleImpl().processInstitutionalProposalFinancialRules(event);
return valid;
}
/**
* Validate information on Institutional Proposal Tab from Institutional Proposal Home page.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processInstitutionalProposalBusinessRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposal";
InstitutionalProposalRuleEvent event = new InstitutionalProposalRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalRuleImpl().processInstitutionalProposalRules(event);
return valid;
}
public boolean processRules(KraDocumentEventBaseExtension event) {
boolean retVal = false;
retVal = event.getRule().processRules(event);
return retVal;
}
private boolean processCostShareRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposal";
int i = 0;
List<InstitutionalProposalCostShare> costShares = institutionalProposalDocument.getInstitutionalProposal().getInstitutionalProposalCostShares();
for (InstitutionalProposalCostShare costShare : costShares) {
InstitutionalProposalAddCostShareRuleEvent event = new InstitutionalProposalAddCostShareRuleEvent(errorPath, institutionalProposalDocument, costShare);
valid &= new InstitutionalProposalAddCostShareRuleImpl().processInstitutionalProposalCostShareBusinessRules(event, i);
i++;
}
return valid;
}
}
|
src/main/java/org/kuali/kra/institutionalproposal/rules/InstitutionalProposalDocumentRule.java
|
/*
* Copyright 2005-2010 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.institutionalproposal.rules;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.kra.award.home.Award;
import org.kuali.kra.common.specialreview.rule.event.SaveSpecialReviewEvent;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.institutionalproposal.InstitutionalProposalCustomDataAuditRule;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalCreditSplitBean;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonAuditRule;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonSaveRuleEvent;
import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPersonSaveRuleImpl;
import org.kuali.kra.institutionalproposal.customdata.InstitutionalProposalCustomDataRuleImpl;
import org.kuali.kra.institutionalproposal.customdata.InstitutionalProposalSaveCustomDataRuleEvent;
import org.kuali.kra.institutionalproposal.document.InstitutionalProposalDocument;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposal;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalCostShare;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalScienceKeyword;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposalUnrecoveredFandA;
import org.kuali.kra.institutionalproposal.specialreview.InstitutionalProposalSpecialReview;
import org.kuali.kra.rule.BusinessRuleInterface;
import org.kuali.kra.rule.event.KraDocumentEventBaseExtension;
import org.kuali.kra.rules.ResearchDocumentRuleBase;
import org.kuali.rice.kns.document.Document;
import org.kuali.rice.kns.service.BusinessObjectService;
import org.kuali.rice.kns.util.ErrorMap;
import org.kuali.rice.kns.util.GlobalVariables;
/**
* This class...
*/
public class InstitutionalProposalDocumentRule extends ResearchDocumentRuleBase implements BusinessRuleInterface {
public static final String DOCUMENT_ERROR_PATH = "document";
public static final String INSTITUTIONAL_PROPOSAL_ERROR_PATH = "institutionalProposalList[0]";
public static final String IP_ERROR_PATH = "institutionalProposal";
public static final boolean VALIDATION_REQUIRED = true;
public static final boolean CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME = false;
private static final String SAVE_SPECIAL_REVIEW_FIELD = "document.institutionalProposalList[0].specialReviews";
/**
*
* @see org.kuali.core.rules.DocumentRuleBase#processCustomSaveDocumentBusinessRules(
* org.kuali.rice.kns.document.Document)
*/
@Override
protected boolean processCustomSaveDocumentBusinessRules(Document document) {
boolean retval = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
if (!(document instanceof InstitutionalProposalDocument)) {
return false;
}
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
getDictionaryValidationService().validateDocumentAndUpdatableReferencesRecursively(
document, getMaxDictionaryValidationDepth(),
VALIDATION_REQUIRED, CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
retval &= processSaveInstitutionalProposalCustomDataBusinessRules(document);
retval &= processUnrecoveredFandABusinessRules(document);
retval &= processSponsorProgramBusinessRule(document);
retval &= processInstitutionalProposalBusinessRules(document);
retval &= processInstitutionalProposalFinancialRules(document);
retval &= processSpecialReviewBusinessRule(document);
retval &= processInstitutionalProposalPersonBusinessRules(errorMap, document);
// moved to processRunAuditBusinessRules()
// retval &= processInstitutionalProposalPersonCreditSplitBusinessRules(document);
// retval &= processInstitutionalProposalPersonUnitCreditSplitBusinessRules(document);
retval &= processKeywordBusinessRule(document);
retval &= processAccountIdBusinessRule(document);
retval &= processCostShareRules(document);
return retval;
}
/**
*
* process save Custom Data Business Rules.
* @param institutionalProposalDocument
* @return
*/
private boolean processSaveInstitutionalProposalCustomDataBusinessRules(Document document) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(INSTITUTIONAL_PROPOSAL_ERROR_PATH);
String errorPath = "institutionalProposalCustomData";
errorMap.addToErrorPath(errorPath);
InstitutionalProposalSaveCustomDataRuleEvent event = new InstitutionalProposalSaveCustomDataRuleEvent(errorPath,
institutionalProposalDocument);
valid &= new InstitutionalProposalCustomDataRuleImpl().processSaveInstitutionalProposalCustomDataBusinessRules(event);
errorMap.removeFromErrorPath(errorPath);
errorMap.removeFromErrorPath(INSTITUTIONAL_PROPOSAL_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return valid;
}
/**
*
* process Cost Share business rules.
* @param awardDocument
* @return
*/
private boolean processUnrecoveredFandABusinessRules(Document document) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
int i = 0;
List<InstitutionalProposalUnrecoveredFandA> institutionalProposalUnrecoveredFandAs =
institutionalProposalDocument.getInstitutionalProposal().getInstitutionalProposalUnrecoveredFandAs();
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(IP_ERROR_PATH);
for (InstitutionalProposalUnrecoveredFandA institutionalProposalUnrecoveredFandA : institutionalProposalUnrecoveredFandAs) {
String errorPath = "institutionalProposalUnrecoveredFandAs[" + i + Constants.RIGHT_SQUARE_BRACKET;
errorMap.addToErrorPath(errorPath);
InstitutionalProposalSaveUnrecoveredFandARuleEvent event = new InstitutionalProposalSaveUnrecoveredFandARuleEvent(errorPath,
institutionalProposalDocument,
institutionalProposalUnrecoveredFandA);
valid &= new InstitutionalProposalUnrecoveredFandARuleImpl().processSaveInstitutionalProposalUnrecoveredFandABusinessRules(event);
errorMap.removeFromErrorPath(errorPath);
i++;
}
errorMap.removeFromErrorPath(IP_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return valid;
}
/**
* @see org.kuali.core.rule.DocumentAuditRule#processRunAuditBusinessRules(
* org.kuali.rice.kns.document.Document)
*/
public boolean processRunAuditBusinessRules(Document document){
boolean retval = true;
//retval &= super.processRunAuditBusinessRules(document);
retval &= new InstitutionalProposalCustomDataAuditRule().processRunAuditBusinessRules(document);
retval &= new InstitutionalProposalPersonAuditRule().processRunAuditBusinessRules(document);
retval &= processInstitutionalProposalPersonCreditSplitBusinessRules(document);
retval &= processInstitutionalProposalPersonUnitCreditSplitBusinessRules(document);
return retval;
}
private boolean processInstitutionalProposalPersonBusinessRules(ErrorMap errorMap, Document document) {
errorMap.addToErrorPath(DOCUMENT_ERROR_PATH);
errorMap.addToErrorPath(IP_ERROR_PATH);
InstitutionalProposalPersonSaveRuleEvent event = new InstitutionalProposalPersonSaveRuleEvent("Project Persons", "projectPersons", document);
boolean success = new InstitutionalProposalPersonSaveRuleImpl().processInstitutionalProposalPersonSaveBusinessRules(event);
errorMap.removeFromErrorPath(IP_ERROR_PATH);
errorMap.removeFromErrorPath(DOCUMENT_ERROR_PATH);
return success;
}
private boolean processInstitutionalProposalPersonCreditSplitBusinessRules(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
return new InstitutionalProposalCreditSplitBean(institutionalProposalDocument).recalculateCreditSplit();
}
private boolean processInstitutionalProposalPersonUnitCreditSplitBusinessRules(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
return new InstitutionalProposalCreditSplitBean(institutionalProposalDocument).recalculateCreditSplit();
}
private boolean processKeywordBusinessRule(Document document) {
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
List<InstitutionalProposalScienceKeyword> keywords = institutionalProposalDocument.getInstitutionalProposal().getKeywords();
for ( InstitutionalProposalScienceKeyword keyword : keywords ) {
for ( InstitutionalProposalScienceKeyword keyword2 : keywords ) {
if ( keyword == keyword2 ) {
continue;
} else if ( StringUtils.equalsIgnoreCase(keyword.getScienceKeywordCode(), keyword2.getScienceKeywordCode()) ) {
GlobalVariables.getErrorMap().putError("document.institutionalProposalList[0].keyword", "error.proposalKeywords.duplicate");
return false;
}
}
}
return true;
}
private boolean processAccountIdBusinessRule(Document document) {
System.err.println("Got to processAccountIdBusinessRule");
boolean retVal = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
InstitutionalProposal institutionalProposal = institutionalProposalDocument.getInstitutionalProposal();
String ipAccountNumber = institutionalProposal.getCurrentAccountNumber();
String awardNumber = institutionalProposal.getCurrentAwardNumber();
if (!StringUtils.isEmpty(awardNumber) && !StringUtils.isEmpty(ipAccountNumber)) {
BusinessObjectService boService = KraServiceLocator.getService(BusinessObjectService.class);
Map<String, String> fieldValues = new HashMap<String, String>();
fieldValues.put("awardNumber", awardNumber);
Collection awardCol = boService.findMatching(Award.class, fieldValues);
if (!awardCol.isEmpty()) {
Award award = (Award) (awardCol.toArray())[0];
String awardAccountNumber = award.getAccountNumber();
System.err.println("awardAccountNumber:" + awardAccountNumber);
if (!StringUtils.equalsIgnoreCase(ipAccountNumber, awardAccountNumber)) {
GlobalVariables.getMessageMap().putError("document.institutionalProposal.currentAccountNumber",
"error.institutionalProposal.accountNumber.invalid", ipAccountNumber);
retVal = false;
}
}
}
System.err.println("About to return " + (retVal));
return retVal;
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processSponsorProgramBusinessRule(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalSponsorAndProgram";
InstitutionalProposalSponsorAndProgramRuleEvent event = new InstitutionalProposalSponsorAndProgramRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalSponsorAndProgramRuleImpl().processInstitutionalProposalSponsorAndProgramRules(event);
return valid;
}
/**
* This method validates 'Proposal Special review'. It checks
* validSpecialReviewApproval table, and if there is a match, then checks
* protocalnumberflag, applicationdateflag, and approvaldataflag.
*
* @paramDocument : The institutionalProposalDocument that is being validated
* @return valid Does the validation pass
*/
private boolean processSpecialReviewBusinessRule(Document document) {
InstitutionalProposalDocument proposalDocument = (InstitutionalProposalDocument) document;
List<InstitutionalProposalSpecialReview> specialReviews = proposalDocument.getInstitutionalProposal().getSpecialReviews();
boolean isProtocolLinkingEnabled
= getParameterService().getIndicatorParameter("KC-PROTOCOL", "Document", "irb.protocol.institute.proposal.linking.enabled");
return processRules(new SaveSpecialReviewEvent<InstitutionalProposalSpecialReview>(
SAVE_SPECIAL_REVIEW_FIELD, proposalDocument, specialReviews, isProtocolLinkingEnabled));
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processInstitutionalProposalFinancialRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposalFinancial";
InstitutionalProposalFinancialRuleEvent event = new InstitutionalProposalFinancialRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalFinancialRuleImpl().processInstitutionalProposalFinancialRules(event);
return valid;
}
/**
* Validate information on Institutional Proposal Tab from Institutional Proposal Home page.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processInstitutionalProposalBusinessRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposal";
InstitutionalProposalRuleEvent event = new InstitutionalProposalRuleEvent(errorPath,
institutionalProposalDocument, institutionalProposalDocument.getInstitutionalProposal());
valid &= new InstitutionalProposalRuleImpl().processInstitutionalProposalRules(event);
return valid;
}
public boolean processRules(KraDocumentEventBaseExtension event) {
boolean retVal = false;
retVal = event.getRule().processRules(event);
return retVal;
}
private boolean processCostShareRules(Document document) {
boolean valid = true;
InstitutionalProposalDocument institutionalProposalDocument = (InstitutionalProposalDocument) document;
String errorPath = "institutionalProposal";
int i = 0;
List<InstitutionalProposalCostShare> costShares = institutionalProposalDocument.getInstitutionalProposal().getInstitutionalProposalCostShares();
for (InstitutionalProposalCostShare costShare : costShares) {
InstitutionalProposalAddCostShareRuleEvent event = new InstitutionalProposalAddCostShareRuleEvent(errorPath, institutionalProposalDocument, costShare);
valid &= new InstitutionalProposalAddCostShareRuleImpl().processInstitutionalProposalCostShareBusinessRules(event, i);
i++;
}
return valid;
}
}
|
remove debug code
|
src/main/java/org/kuali/kra/institutionalproposal/rules/InstitutionalProposalDocumentRule.java
|
remove debug code
|
|
Java
|
agpl-3.0
|
e8a81dda378be7d0adcf60fc0e784f502e5d2136
| 0
|
mnlipp/jgrapes,mnlipp/jgrapes
|
/*
* JGrapes Event Driven Framework
* Copyright (C) 2017 Michael N. Lipp
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, see <http://www.gnu.org/licenses/>.
*/
package org.jgrapes.portal;
import freemarker.template.Configuration;
import freemarker.template.SimpleScalar;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import freemarker.template.TemplateMethodModelEx;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PipedReader;
import java.io.PipedWriter;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.CharBuffer;
import java.security.Principal;
import java.text.Collator;
import java.text.ParseException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.StreamSupport;
import javax.json.Json;
import javax.json.JsonReader;
import org.jdrupes.httpcodec.protocols.http.HttpConstants.HttpStatus;
import org.jdrupes.httpcodec.protocols.http.HttpField;
import org.jdrupes.httpcodec.protocols.http.HttpRequest;
import org.jdrupes.httpcodec.protocols.http.HttpResponse;
import org.jdrupes.httpcodec.types.Converters;
import org.jdrupes.httpcodec.types.Directive;
import org.jdrupes.httpcodec.types.MediaType;
import org.jdrupes.json.JsonDecodeException;
import org.jgrapes.core.Channel;
import org.jgrapes.core.CompletionLock;
import org.jgrapes.core.Component;
import org.jgrapes.core.annotation.Handler;
import org.jgrapes.core.events.Error;
import org.jgrapes.http.LanguageSelector.Selection;
import org.jgrapes.http.Session;
import org.jgrapes.http.annotation.RequestHandler;
import org.jgrapes.http.events.GetRequest;
import org.jgrapes.http.events.Response;
import org.jgrapes.http.events.WebSocketAccepted;
import org.jgrapes.io.IOSubchannel;
import org.jgrapes.io.events.Closed;
import org.jgrapes.io.events.Input;
import org.jgrapes.io.events.Output;
import org.jgrapes.io.util.ByteBufferOutputStream;
import org.jgrapes.io.util.CharBufferWriter;
import org.jgrapes.io.util.InputStreamPipeline;
import org.jgrapes.io.util.LinkedIOSubchannel;
import org.jgrapes.io.util.ManagedCharBuffer;
import org.jgrapes.portal.events.JsonInput;
import org.jgrapes.portal.events.JsonOutput;
import org.jgrapes.portal.events.PortalReady;
import org.jgrapes.portal.events.PortletResourceRequest;
import org.jgrapes.portal.events.PortletResourceResponse;
import org.jgrapes.portal.events.SetLocale;
import org.jgrapes.portal.events.SetTheme;
import org.jgrapes.portal.themes.base.Provider;
import org.jgrapes.util.events.KeyValueStoreData;
import org.jgrapes.util.events.KeyValueStoreQuery;
import org.jgrapes.util.events.KeyValueStoreUpdate;
/**
*
*/
public class PortalView extends Component {
private Portal portal;
private ServiceLoader<ThemeProvider> themeLoader;
private static Configuration fmConfig = null;
private Function<Locale,ResourceBundle> resourceBundleSupplier;
private BiFunction<ThemeProvider,String,InputStream> fallbackResourceSupplier
= (themeProvider, resource) -> { return null; };
private Set<Locale> supportedLocales;
private ThemeProvider baseTheme;
private Map<String,Object> portalBaseModel = new HashMap<>();
private RenderSupport renderSupport = new RenderSupportImpl();
/**
* @param componentChannel
*/
public PortalView(Portal portal, Channel componentChannel) {
super(componentChannel);
this.portal = portal;
if (fmConfig == null) {
fmConfig = new Configuration(Configuration.VERSION_2_3_26);
fmConfig.setClassLoaderForTemplateLoading(
getClass().getClassLoader(), "org/jgrapes/portal");
fmConfig.setDefaultEncoding("utf-8");
fmConfig.setTemplateExceptionHandler(
TemplateExceptionHandler.RETHROW_HANDLER);
fmConfig.setLogTemplateExceptions(false);
}
baseTheme = new Provider();
supportedLocales = new HashSet<>();
for (Locale locale: Locale.getAvailableLocales()) {
if (locale.getLanguage().equals("")) {
continue;
}
if (resourceBundleSupplier != null) {
ResourceBundle rb = resourceBundleSupplier.apply(locale);
if (rb.getLocale().equals(locale)) {
supportedLocales.add(locale);
}
}
ResourceBundle rb = ResourceBundle.getBundle(getClass()
.getPackage().getName() + ".l10n", locale);
if (rb.getLocale().equals(locale)) {
supportedLocales.add(locale);
}
}
RequestHandler.Evaluator.add(this, "onGet", portal.prefix() + "**");
RequestHandler.Evaluator.add(this, "onGetRedirect",
portal.prefix().getPath().substring(
0, portal.prefix().getPath().length() - 1));
// Create portal model
portalBaseModel.put("resourceUrl", new TemplateMethodModelEx() {
@Override
public Object exec(@SuppressWarnings("rawtypes") List arguments)
throws TemplateModelException {
@SuppressWarnings("unchecked")
List<TemplateModel> args = (List<TemplateModel>)arguments;
if (!(args.get(0) instanceof SimpleScalar)) {
throw new TemplateModelException("Not a string.");
}
return portal.prefix().resolve(
((SimpleScalar)args.get(0)).getAsString()).getRawPath();
}
});
portalBaseModel = Collections.unmodifiableMap(portalBaseModel);
// Handlers attached to the portal side channel
Handler.Evaluator.add(this, "onPortalReady", portal.channel());
Handler.Evaluator.add(this, "onKeyValueStoreData", portal.channel());
Handler.Evaluator.add(
this, "onPortletResourceResponse", portal.channel());
Handler.Evaluator.add(this, "onJsonOutput", portal.channel());
Handler.Evaluator.add(this, "onSetLocale", portal.channel());
Handler.Evaluator.add(this, "onSetTheme", portal.channel());
}
/**
* The service loader must be created lazily, else the OSGi
* service mediator doesn't work properly.
*
* @return
*/
private ServiceLoader<ThemeProvider> themeLoader() {
if (themeLoader != null) {
return themeLoader;
}
return themeLoader = ServiceLoader.load(ThemeProvider.class);
}
void setResourceBundleSupplier(
Function<Locale,ResourceBundle> supplier) {
this.resourceBundleSupplier = supplier;
}
void setFallbackResourceSupplier(
BiFunction<ThemeProvider,String,InputStream> supplier) {
this.fallbackResourceSupplier = supplier;
}
RenderSupport renderSupport() {
return renderSupport;
}
private LinkedIOSubchannel portalChannel(IOSubchannel channel) {
@SuppressWarnings("unchecked")
Optional<LinkedIOSubchannel> portalChannel
= (Optional<LinkedIOSubchannel>)LinkedIOSubchannel
.downstreamChannel(portal, channel);
return portalChannel.orElseGet(
() -> new LinkedIOSubchannel(portal, channel));
}
@RequestHandler(dynamic=true)
public void onGetRedirect(GetRequest event, IOSubchannel channel)
throws InterruptedException, IOException, ParseException {
HttpResponse response = event.httpRequest().response().get();
response.setStatus(HttpStatus.MOVED_PERMANENTLY)
.setContentType("text", "plain", "utf-8")
.setField(HttpField.LOCATION, portal.prefix());
fire(new Response(response), channel);
try {
fire(Output.wrap(portal.prefix().toString()
.getBytes("utf-8"), true), channel);
} catch (UnsupportedEncodingException e) {
// Supported by definition
}
event.stop();
}
@RequestHandler(dynamic=true)
public void onGet(GetRequest event, IOSubchannel channel)
throws InterruptedException, IOException {
URI requestUri = event.requestUri();
// Append trailing slash, if missing
if ((requestUri.getRawPath() + "/").equals(
portal.prefix().getRawPath())) {
requestUri = portal.prefix();
}
// Request for portal?
if (!requestUri.getRawPath().startsWith(portal.prefix().getRawPath())) {
return;
}
// Normalize and evaluate
requestUri = portal.prefix().relativize(
URI.create(requestUri.getRawPath()));
if (requestUri.getRawPath().isEmpty()) {
if (event.httpRequest().findField(
HttpField.UPGRADE, Converters.STRING_LIST)
.map(f -> f.value().containsIgnoreCase("websocket"))
.orElse(false)) {
channel.setAssociated(this, new PortalInfo());
channel.respond(new WebSocketAccepted(event));
event.stop();
return;
}
renderPortal(event, channel);
return;
}
URI subUri = uriFromPath("portal-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
sendPortalResource(event, channel, subUri.getPath());
return;
}
subUri = uriFromPath("theme-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
sendThemeResource(event, channel, subUri.getPath());
return;
}
subUri = uriFromPath("portlet-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
requestPortletResource(event, channel, subUri);
return;
}
}
private void renderPortal(GetRequest event, IOSubchannel channel)
throws IOException, InterruptedException {
event.stop();
// Because language is changed via websocket, locale cookie
// may be out-dated
event.associated(Selection.class)
.ifPresent(s -> s.prefer(s.get()[0]));
// Prepare response
HttpResponse response = event.httpRequest().response().get();
MediaType mediaType = MediaType.builder().setType("text", "html")
.setParameter("charset", "utf-8").build();
response.setField(HttpField.CONTENT_TYPE, mediaType);
response.setStatus(HttpStatus.OK);
response.setHasPayload(true);
channel.respond(new Response(response));
try (Writer out = new OutputStreamWriter(new ByteBufferOutputStream(
channel, channel.responsePipeline()), "utf-8")) {
Map<String,Object> portalModel = new HashMap<>(portalBaseModel);
// Add locale
final Locale locale = event.associated(Selection.class).map(
s -> s.get()[0]).orElse(Locale.getDefault());
portalModel.put("locale", locale);
// Add supported locales
final Collator coll = Collator.getInstance(locale);
final Comparator<LanguageInfo> comp
= new Comparator<PortalView.LanguageInfo>() {
@Override
public int compare(LanguageInfo o1, LanguageInfo o2) {
return coll.compare(o1.getLabel(), o2.getLabel());
}
};
LanguageInfo[] languages = supportedLocales.stream()
.map(l -> new LanguageInfo(l))
.sorted(comp).toArray(size -> new LanguageInfo[size]);
portalModel.put("supportedLanguages", languages);
// Add localization
final ResourceBundle additionalResources = resourceBundleSupplier == null
? null : resourceBundleSupplier.apply(locale);
final ResourceBundle baseResources = ResourceBundle.getBundle(
getClass().getPackage().getName() + ".l10n", locale,
ResourceBundle.Control.getNoFallbackControl(
ResourceBundle.Control.FORMAT_DEFAULT));
portalModel.put("_", new TemplateMethodModelEx() {
@Override
public Object exec(@SuppressWarnings("rawtypes") List arguments)
throws TemplateModelException {
@SuppressWarnings("unchecked")
List<TemplateModel> args = (List<TemplateModel>)arguments;
if (!(args.get(0) instanceof SimpleScalar)) {
throw new TemplateModelException("Not a string.");
}
String key = ((SimpleScalar)args.get(0)).getAsString();
try {
return additionalResources.getString(key);
} catch (MissingResourceException e) {
// try base resources
}
try {
return baseResources.getString(key);
} catch (MissingResourceException e) {
// no luck
}
return key;
}
});
// Add themes. Doing this on every reload allows themes
// to be added dynamically. Note that we must load again
// (not reload) in order for this to work in an OSGi environment.
themeLoader = ServiceLoader.load(ThemeProvider.class);
portalModel.put("themeInfos",
StreamSupport.stream(themeLoader().spliterator(), false)
.map(t -> new ThemeInfo(t.themeId(), t.themeName()))
.sorted().toArray(size -> new ThemeInfo[size]));
Template tpl = fmConfig.getTemplate("portal.ftlh");
tpl.process(portalModel, out);
} catch (TemplateException e) {
throw new IOException(e);
}
}
private void sendPortalResource(GetRequest event, IOSubchannel channel,
String resource) {
// Look for content
InputStream in = this.getClass().getResourceAsStream(resource);
if (in == null) {
return;
}
// Send header
HttpResponse response = event.httpRequest().response().get();
prepareResourceResponse(response, event.requestUri());
channel.respond(new Response(response));
// Send content
activeEventPipeline().executorService()
.submit(new InputStreamPipeline(in, channel));
// Done
event.stop();
}
private void sendThemeResource(GetRequest event, IOSubchannel channel,
String resource) {
// Get resource
ThemeProvider themeProvider = event.associated(Session.class).flatMap(
session -> Optional.ofNullable(session.get("themeProvider")).flatMap(
themeId -> StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(themeId)).findFirst()
)).orElse(baseTheme);
InputStream resIn;
try {
resIn = themeProvider.getResourceAsStream(resource);
} catch (ResourceNotFoundException e) {
try {
resIn = baseTheme.getResourceAsStream(resource);
} catch (ResourceNotFoundException e1) {
resIn = fallbackResourceSupplier.apply(themeProvider, resource);
if (resIn == null) {
return;
}
}
}
// Send header
HttpResponse response = event.httpRequest().response().get();
prepareResourceResponse(response, event.requestUri());
channel.respond(new Response(response));
// Send content
activeEventPipeline().executorService()
.submit(new InputStreamPipeline(resIn, channel));
// Done
event.stop();
}
public static void prepareResourceResponse(
HttpResponse response, URI request) {
response.setContentType(request);
// Set max age in cache-control header
List<Directive> directives = new ArrayList<>();
directives.add(new Directive("max-age", 600));
response.setField(HttpField.CACHE_CONTROL, directives);
response.setField(HttpField.LAST_MODIFIED, Instant.now());
response.setStatus(HttpStatus.OK);
}
private void requestPortletResource(GetRequest event, IOSubchannel channel,
URI resource) throws InterruptedException {
String resPath = resource.getPath();
int sep = resPath.indexOf('/');
// Send events to portlets on portal's channel
if (Boolean.TRUE.equals(newEventPipeline().fire(
new PortletResourceRequest(resPath.substring(0, sep),
uriFromPath(resPath.substring(sep + 1)),
event.httpRequest(), channel), portalChannel(channel))
.get())) {
event.stop();
}
}
@Handler
public void onInput(Input<ManagedCharBuffer> event, IOSubchannel channel)
throws IOException {
Optional<PortalInfo> optPortalInfo
= channel.associated(this, PortalInfo.class);
if (!optPortalInfo.isPresent()) {
return;
}
optPortalInfo.get().toEvent(portalChannel(channel),
event.buffer().backingBuffer(), event.isEndOfRecord());
}
/**
* Forward the {@link Closed} event to the portal channel.
*
* @param event the event
* @param channel the channel
*/
@Handler
public void onClosed(Closed event, IOSubchannel channel) {
fire(new Closed(), portalChannel(channel));
}
@Handler(dynamic=true)
public void onPortalReady(PortalReady event, IOSubchannel channel) {
String principal = channel.associated(Session.class).map(session ->
session.getOrDefault(Principal.class, "").toString())
.orElse("");
KeyValueStoreQuery query = new KeyValueStoreQuery(
"/" + principal + "/themeProvider", true);
channel.setAssociated(this, new CompletionLock(event, 3000));
fire(query, channel);
}
@Handler(dynamic=true)
public void onKeyValueStoreData(
KeyValueStoreData event, IOSubchannel channel)
throws JsonDecodeException {
Optional<Session> optSession = channel.associated(Session.class);
if (!optSession.isPresent()) {
return;
}
Session session = optSession.get();
String principal = session.getOrDefault(Principal.class, "").toString();
if (!event.event().query().equals("/" + principal + "/themeProvider")) {
return;
}
channel.associated(this, CompletionLock.class)
.ifPresent(lock -> lock.remove());
if (!event.data().values().iterator().hasNext()) {
return;
}
String requestedThemeId = event.data().values().iterator().next();
ThemeProvider themeProvider = Optional.ofNullable(
session.get("themeProvider")).flatMap(
themeId -> StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(themeId)).findFirst()
).orElse(baseTheme);
if (!themeProvider.themeId().equals(requestedThemeId)) {
fire(new SetTheme(requestedThemeId), channel);
}
}
@Handler(dynamic=true)
public void onPortletResourceResponse(PortletResourceResponse event,
LinkedIOSubchannel channel) {
HttpRequest request = event.request().httpRequest();
// Send header
HttpResponse response = request.response().get();
prepareResourceResponse(response, request.requestUri());
channel.upstreamChannel().respond(new Response(response));
// Send content
activeEventPipeline().executorService().submit(
new InputStreamPipeline(
event.stream(), channel.upstreamChannel()));
}
@Handler(dynamic=true)
public void onSetLocale(SetLocale event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
supportedLocales.stream()
.filter(l -> l.equals(event.locale())).findFirst()
.ifPresent(l -> channel.associated(Selection.class)
.map(s -> s.prefer(l)));
fire(new JsonOutput("reload"), channel);
}
@Handler(dynamic=true)
public void onSetTheme(SetTheme event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
ThemeProvider themeProvider = StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(event.theme())).findFirst()
.orElse(baseTheme);
Optional<Session> optSession = channel.associated(Session.class);
if (optSession.isPresent()) {
Session session = optSession.get();
session.put("themeProvider", themeProvider.themeId());
channel.respond(new KeyValueStoreUpdate().update(
"/" + session.getOrDefault(Principal.class, "").toString()
+ "/themeProvider", themeProvider.themeId())).get();
}
fire(new JsonOutput("reload"), channel);
}
@Handler(dynamic=true)
public void onJsonOutput(JsonOutput event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
IOSubchannel upstream = channel.upstreamChannel();
@SuppressWarnings("resource")
CharBufferWriter out = new CharBufferWriter(upstream,
upstream.responsePipeline()).suppressClose();
event.toJson(out);
out.close();
}
private class PortalInfo {
private PipedWriter decodeWriter;
public void toEvent(IOSubchannel channel, CharBuffer buffer,
boolean last) throws IOException {
if (decodeWriter == null) {
decodeWriter = new PipedWriter();
PipedReader reader = new PipedReader(
decodeWriter, buffer.capacity());
activeEventPipeline().executorService()
.submit(new DecodeTask(reader, channel));
}
decodeWriter.append(buffer);
if (last) {
decodeWriter.close();
decodeWriter = null;
}
}
private class DecodeTask implements Runnable {
IOSubchannel channel;
private Reader reader;
public DecodeTask(Reader reader, IOSubchannel channel) {
this.reader = reader;
this.channel = channel;
}
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
try (Reader in = reader) {
JsonReader reader = Json.createReader(in);
fire(new JsonInput(reader.readObject()), channel);
} catch (Throwable e) {
fire(new Error(null, e));
}
}
}
}
public static class LanguageInfo {
private Locale locale;
/**
* @param locale
*/
public LanguageInfo(Locale locale) {
this.locale = locale;
}
/**
* @return the locale
*/
public Locale getLocale() {
return locale;
}
public String getLabel() {
String str = locale.getDisplayName(locale);
return Character.toUpperCase(str.charAt(0)) + str.substring(1);
}
}
public static class ThemeInfo implements Comparable<ThemeInfo> {
private String id;
private String name;
/**
* @param id
* @param name
*/
public ThemeInfo(String id, String name) {
super();
this.id = id;
this.name = name;
}
/**
* @return the id
*/
public String id() {
return id;
}
/**
* @return the name
*/
public String name() {
return name;
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(ThemeInfo other) {
return name().compareToIgnoreCase(other.name());
}
}
/**
* Create a {@link URI} from a path. This is similar to calling
* `new URI(null, null, path, null)` with the {@link URISyntaxException}
* converted to a {@link IllegalArgumentException}.
*
* @param path the path
* @return the uri
* @throws IllegalArgumentException if the string violates
* RFC 2396
*/
public static URI uriFromPath(String path) throws IllegalArgumentException {
try {
return new URI(null, null, path, null);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
private class RenderSupportImpl implements RenderSupport {
/* (non-Javadoc)
* @see org.jgrapes.portal.RenderSupport#portletResource(java.lang.String, java.net.URI)
*/
@Override
public URI portletResource(String portletType, URI uri) {
return portal.prefix().resolve(uriFromPath(
"portlet-resource/" + portletType + "/")).resolve(uri);
}
}
}
|
org.jgrapes.portal/src/org/jgrapes/portal/PortalView.java
|
/*
* JGrapes Event Driven Framework
* Copyright (C) 2017 Michael N. Lipp
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, see <http://www.gnu.org/licenses/>.
*/
package org.jgrapes.portal;
import freemarker.template.Configuration;
import freemarker.template.SimpleScalar;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import freemarker.template.TemplateMethodModelEx;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PipedReader;
import java.io.PipedWriter;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.CharBuffer;
import java.security.Principal;
import java.text.Collator;
import java.text.ParseException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.StreamSupport;
import javax.json.Json;
import javax.json.JsonReader;
import org.jdrupes.httpcodec.protocols.http.HttpConstants.HttpStatus;
import org.jdrupes.httpcodec.protocols.http.HttpField;
import org.jdrupes.httpcodec.protocols.http.HttpRequest;
import org.jdrupes.httpcodec.protocols.http.HttpResponse;
import org.jdrupes.httpcodec.types.Converters;
import org.jdrupes.httpcodec.types.Directive;
import org.jdrupes.httpcodec.types.MediaType;
import org.jdrupes.json.JsonDecodeException;
import org.jgrapes.core.Channel;
import org.jgrapes.core.CompletionLock;
import org.jgrapes.core.Component;
import org.jgrapes.core.annotation.Handler;
import org.jgrapes.core.events.Error;
import org.jgrapes.http.LanguageSelector.Selection;
import org.jgrapes.http.Session;
import org.jgrapes.http.annotation.RequestHandler;
import org.jgrapes.http.events.GetRequest;
import org.jgrapes.http.events.Response;
import org.jgrapes.http.events.WebSocketAccepted;
import org.jgrapes.io.IOSubchannel;
import org.jgrapes.io.events.Closed;
import org.jgrapes.io.events.Input;
import org.jgrapes.io.events.Output;
import org.jgrapes.io.util.ByteBufferOutputStream;
import org.jgrapes.io.util.CharBufferWriter;
import org.jgrapes.io.util.InputStreamPipeline;
import org.jgrapes.io.util.LinkedIOSubchannel;
import org.jgrapes.io.util.ManagedCharBuffer;
import org.jgrapes.portal.events.JsonInput;
import org.jgrapes.portal.events.JsonOutput;
import org.jgrapes.portal.events.PortalReady;
import org.jgrapes.portal.events.PortletResourceRequest;
import org.jgrapes.portal.events.PortletResourceResponse;
import org.jgrapes.portal.events.SetLocale;
import org.jgrapes.portal.events.SetTheme;
import org.jgrapes.portal.themes.base.Provider;
import org.jgrapes.util.events.KeyValueStoreData;
import org.jgrapes.util.events.KeyValueStoreQuery;
import org.jgrapes.util.events.KeyValueStoreUpdate;
/**
*
*/
public class PortalView extends Component {
private Portal portal;
private ServiceLoader<ThemeProvider> themeLoader;
private static Configuration fmConfig = null;
private Function<Locale,ResourceBundle> resourceBundleSupplier;
private BiFunction<ThemeProvider,String,InputStream> fallbackResourceSupplier
= (themeProvider, resource) -> { return null; };
private Set<Locale> supportedLocales;
private ThemeProvider baseTheme;
private Map<String,Object> portalBaseModel = new HashMap<>();
private RenderSupport renderSupport = new RenderSupportImpl();
/**
* @param componentChannel
*/
public PortalView(Portal portal, Channel componentChannel) {
super(componentChannel);
this.portal = portal;
if (fmConfig == null) {
fmConfig = new Configuration(Configuration.VERSION_2_3_26);
fmConfig.setClassLoaderForTemplateLoading(
getClass().getClassLoader(), "org/jgrapes/portal");
fmConfig.setDefaultEncoding("utf-8");
fmConfig.setTemplateExceptionHandler(
TemplateExceptionHandler.RETHROW_HANDLER);
fmConfig.setLogTemplateExceptions(false);
}
baseTheme = new Provider();
supportedLocales = new HashSet<>();
for (Locale locale: Locale.getAvailableLocales()) {
if (locale.getLanguage().equals("")) {
continue;
}
if (resourceBundleSupplier != null) {
ResourceBundle rb = resourceBundleSupplier.apply(locale);
if (rb.getLocale().equals(locale)) {
supportedLocales.add(locale);
}
}
ResourceBundle rb = ResourceBundle.getBundle(getClass()
.getPackage().getName() + ".l10n", locale);
if (rb.getLocale().equals(locale)) {
supportedLocales.add(locale);
}
}
RequestHandler.Evaluator.add(this, "onGet", portal.prefix() + "**");
RequestHandler.Evaluator.add(this, "onGetRedirect",
portal.prefix().getPath().substring(
0, portal.prefix().getPath().length() - 1));
// Create portal model
portalBaseModel.put("resourceUrl", new TemplateMethodModelEx() {
@Override
public Object exec(@SuppressWarnings("rawtypes") List arguments)
throws TemplateModelException {
@SuppressWarnings("unchecked")
List<TemplateModel> args = (List<TemplateModel>)arguments;
if (!(args.get(0) instanceof SimpleScalar)) {
throw new TemplateModelException("Not a string.");
}
return portal.prefix().resolve(
((SimpleScalar)args.get(0)).getAsString()).getRawPath();
}
});
portalBaseModel = Collections.unmodifiableMap(portalBaseModel);
// Handlers attached to the portal side channel
Handler.Evaluator.add(this, "onPortalReady", portal.channel());
Handler.Evaluator.add(this, "onKeyValueStoreData", portal.channel());
Handler.Evaluator.add(
this, "onPortletResourceResponse", portal.channel());
Handler.Evaluator.add(this, "onJsonOutput", portal.channel());
Handler.Evaluator.add(this, "onSetLocale", portal.channel());
Handler.Evaluator.add(this, "onSetTheme", portal.channel());
}
/**
* The service loader must be created lazily, else the OSGi
* service mediator doesn't work properly.
*
* @return
*/
private ServiceLoader<ThemeProvider> themeLoader() {
if (themeLoader != null) {
return themeLoader;
}
return themeLoader = ServiceLoader.load(ThemeProvider.class);
}
void setResourceBundleSupplier(
Function<Locale,ResourceBundle> supplier) {
this.resourceBundleSupplier = supplier;
}
void setFallbackResourceSupplier(
BiFunction<ThemeProvider,String,InputStream> supplier) {
this.fallbackResourceSupplier = supplier;
}
RenderSupport renderSupport() {
return renderSupport;
}
private LinkedIOSubchannel portalChannel(IOSubchannel channel) {
@SuppressWarnings("unchecked")
Optional<LinkedIOSubchannel> portalChannel
= (Optional<LinkedIOSubchannel>)LinkedIOSubchannel
.downstreamChannel(portal, channel);
return portalChannel.orElseGet(
() -> new LinkedIOSubchannel(portal, channel));
}
@RequestHandler(dynamic=true)
public void onGetRedirect(GetRequest event, IOSubchannel channel)
throws InterruptedException, IOException, ParseException {
HttpResponse response = event.httpRequest().response().get();
response.setStatus(HttpStatus.MOVED_PERMANENTLY)
.setContentType("text", "plain", "utf-8")
.setField(HttpField.LOCATION, portal.prefix());
fire(new Response(response), channel);
try {
fire(Output.wrap(portal.prefix().toString()
.getBytes("utf-8"), true), channel);
} catch (UnsupportedEncodingException e) {
// Supported by definition
}
event.stop();
}
@RequestHandler(dynamic=true)
public void onGet(GetRequest event, IOSubchannel channel)
throws InterruptedException, IOException {
URI requestUri = event.requestUri();
// Append trailing slash, if missing
if ((requestUri.getRawPath() + "/").equals(
portal.prefix().getRawPath())) {
requestUri = portal.prefix();
}
// Request for portal?
if (!requestUri.getRawPath().startsWith(portal.prefix().getRawPath())) {
return;
}
// Normalize and evaluate
requestUri = portal.prefix().relativize(
URI.create(requestUri.getRawPath()));
if (requestUri.getRawPath().isEmpty()) {
if (event.httpRequest().findField(
HttpField.UPGRADE, Converters.STRING_LIST)
.map(f -> f.value().containsIgnoreCase("websocket"))
.orElse(false)) {
channel.setAssociated(this, new PortalInfo());
channel.respond(new WebSocketAccepted(event));
event.stop();
return;
}
renderPortal(event, channel);
return;
}
URI subUri = uriFromPath("portal-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
sendPortalResource(event, channel, subUri.getPath());
return;
}
subUri = uriFromPath("theme-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
sendThemeResource(event, channel, subUri.getPath());
return;
}
subUri = uriFromPath("portlet-resource/").relativize(requestUri);
if (!subUri.equals(requestUri)) {
requestPortletResource(event, channel, subUri);
return;
}
}
private void renderPortal(GetRequest event, IOSubchannel channel)
throws IOException, InterruptedException {
event.stop();
// Because language is changed via websocket, locale cookie
// may be out-dated
event.associated(Selection.class)
.ifPresent(s -> s.prefer(s.get()[0]));
// Prepare response
HttpResponse response = event.httpRequest().response().get();
MediaType mediaType = MediaType.builder().setType("text", "html")
.setParameter("charset", "utf-8").build();
response.setField(HttpField.CONTENT_TYPE, mediaType);
response.setStatus(HttpStatus.OK);
response.setHasPayload(true);
channel.respond(new Response(response));
try (Writer out = new OutputStreamWriter(new ByteBufferOutputStream(
channel, channel.responsePipeline()), "utf-8")) {
Map<String,Object> portalModel = new HashMap<>(portalBaseModel);
// Add locale
final Locale locale = event.associated(Selection.class).map(
s -> s.get()[0]).orElse(Locale.getDefault());
portalModel.put("locale", locale);
// Add supported locales
final Collator coll = Collator.getInstance(locale);
final Comparator<LanguageInfo> comp
= new Comparator<PortalView.LanguageInfo>() {
@Override
public int compare(LanguageInfo o1, LanguageInfo o2) {
return coll.compare(o1.getLabel(), o2.getLabel());
}
};
LanguageInfo[] languages = supportedLocales.stream()
.map(l -> new LanguageInfo(l))
.sorted(comp).toArray(size -> new LanguageInfo[size]);
portalModel.put("supportedLanguages", languages);
// Add localization
final ResourceBundle additionalResources = resourceBundleSupplier == null
? null : resourceBundleSupplier.apply(locale);
final ResourceBundle baseResources = ResourceBundle.getBundle(
getClass().getPackage().getName() + ".l10n", locale,
ResourceBundle.Control.getNoFallbackControl(
ResourceBundle.Control.FORMAT_DEFAULT));
portalModel.put("_", new TemplateMethodModelEx() {
@Override
public Object exec(@SuppressWarnings("rawtypes") List arguments)
throws TemplateModelException {
@SuppressWarnings("unchecked")
List<TemplateModel> args = (List<TemplateModel>)arguments;
if (!(args.get(0) instanceof SimpleScalar)) {
throw new TemplateModelException("Not a string.");
}
String key = ((SimpleScalar)args.get(0)).getAsString();
try {
return additionalResources.getString(key);
} catch (MissingResourceException e) {
// try base resources
}
try {
return baseResources.getString(key);
} catch (MissingResourceException e) {
// no luck
}
return key;
}
});
// Add themes. Doing this on every reload allows themes
// to be added dynamically. Note that we must load again
// (not reload) in order for this to work in an OSGi environment.
themeLoader = ServiceLoader.load(ThemeProvider.class);
portalModel.put("themeInfos",
StreamSupport.stream(themeLoader().spliterator(), false)
.map(t -> new ThemeInfo(t.themeId(), t.themeName()))
.sorted().toArray(size -> new ThemeInfo[size]));
Template tpl = fmConfig.getTemplate("portal.ftlh");
tpl.process(portalModel, out);
} catch (TemplateException e) {
throw new IOException(e);
}
}
private void sendPortalResource(GetRequest event, IOSubchannel channel,
String resource) {
// Look for content
InputStream in = this.getClass().getResourceAsStream(resource);
if (in == null) {
return;
}
// Send header
HttpResponse response = event.httpRequest().response().get();
prepareResourceResponse(response, event.requestUri());
channel.respond(new Response(response));
// Send content
activeEventPipeline().executorService()
.submit(new InputStreamPipeline(in, channel));
// Done
event.stop();
}
private void sendThemeResource(GetRequest event, IOSubchannel channel,
String resource) {
// Get resource
ThemeProvider themeProvider = event.associated(Session.class).flatMap(
session -> Optional.ofNullable(session.get("themeProvider")).flatMap(
themeId -> StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(themeId)).findFirst()
)).orElse(baseTheme);
InputStream resIn;
try {
resIn = themeProvider.getResourceAsStream(resource);
} catch (ResourceNotFoundException e) {
try {
resIn = baseTheme.getResourceAsStream(resource);
} catch (ResourceNotFoundException e1) {
resIn = fallbackResourceSupplier.apply(themeProvider, resource);
if (resIn == null) {
return;
}
}
}
// Send header
HttpResponse response = event.httpRequest().response().get();
prepareResourceResponse(response, event.requestUri());
channel.respond(new Response(response));
// Send content
activeEventPipeline().executorService()
.submit(new InputStreamPipeline(resIn, channel));
// Done
event.stop();
}
public static void prepareResourceResponse(
HttpResponse response, URI request) {
response.setContentType(request);
// Set max age in cache-control header
List<Directive> directives = new ArrayList<>();
directives.add(new Directive("max-age", 600));
response.setField(HttpField.CACHE_CONTROL, directives);
response.setField(HttpField.LAST_MODIFIED, Instant.now());
response.setStatus(HttpStatus.OK);
}
private void requestPortletResource(GetRequest event, IOSubchannel channel,
URI resource) throws InterruptedException {
String resPath = resource.getPath();
int sep = resPath.indexOf('/');
// Send events to portlets on portal's channel
if (Boolean.TRUE.equals(newEventPipeline().fire(
new PortletResourceRequest(resPath.substring(0, sep),
uriFromPath(resPath.substring(sep + 1)),
event.httpRequest(), channel), portalChannel(channel))
.get())) {
event.stop();
}
}
@Handler
public void onInput(Input<ManagedCharBuffer> event, IOSubchannel channel)
throws IOException {
Optional<PortalInfo> optPortalInfo
= channel.associated(this, PortalInfo.class);
if (!optPortalInfo.isPresent()) {
return;
}
optPortalInfo.get().toEvent(portalChannel(channel),
event.buffer().backingBuffer(), event.isEndOfRecord());
}
/**
* Forward the {@link Closed} event to the portal channel.
*
* @param event the event
* @param channel the channel
*/
@Handler
public void onClosed(Closed event, IOSubchannel channel) {
fire(new Closed(), portalChannel(channel));
}
@Handler(dynamic=true)
public void onPortalReady(PortalReady event, IOSubchannel channel) {
KeyValueStoreQuery query = new KeyValueStoreQuery(
"/themeProvider", true);
channel.setAssociated(this, new CompletionLock(event, 3000));
fire(query, channel);
}
@Handler(dynamic=true)
public void onKeyValueStoreData(
KeyValueStoreData event, IOSubchannel channel)
throws JsonDecodeException {
if (!event.event().query().equals("/themeProvider")) {
return;
}
channel.associated(this, CompletionLock.class)
.ifPresent(lock -> lock.remove());
if (!event.data().values().iterator().hasNext()) {
return;
}
String requestedThemeId = event.data().values().iterator().next();
ThemeProvider themeProvider = event.associated(Session.class).flatMap(
session -> Optional.ofNullable(session.get("themeProvider")).flatMap(
themeId -> StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(themeId)).findFirst()
)).orElse(baseTheme);
if (!themeProvider.themeId().equals(requestedThemeId)) {
fire(new SetTheme(requestedThemeId), channel);
}
}
@Handler(dynamic=true)
public void onPortletResourceResponse(PortletResourceResponse event,
LinkedIOSubchannel channel) {
HttpRequest request = event.request().httpRequest();
// Send header
HttpResponse response = request.response().get();
prepareResourceResponse(response, request.requestUri());
channel.upstreamChannel().respond(new Response(response));
// Send content
activeEventPipeline().executorService().submit(
new InputStreamPipeline(
event.stream(), channel.upstreamChannel()));
}
@Handler(dynamic=true)
public void onSetLocale(SetLocale event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
supportedLocales.stream()
.filter(l -> l.equals(event.locale())).findFirst()
.ifPresent(l -> channel.associated(Selection.class)
.map(s -> s.prefer(l)));
fire(new JsonOutput("reload"), channel);
}
@Handler(dynamic=true)
public void onSetTheme(SetTheme event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
ThemeProvider themeProvider = StreamSupport
.stream(themeLoader().spliterator(), false)
.filter(t -> t.themeId().equals(event.theme())).findFirst()
.orElse(baseTheme);
Optional<Session> optSession = channel.associated(Session.class);
if (optSession.isPresent()) {
Session session = optSession.get();
session.put("themeProvider", themeProvider.themeId());
channel.respond(new KeyValueStoreUpdate().update(
"/" + session.getOrDefault(Principal.class, "").toString()
+ "/themeProvider", themeProvider.themeId())).get();
}
fire(new JsonOutput("reload"), channel);
}
@Handler(dynamic=true)
public void onJsonOutput(JsonOutput event, LinkedIOSubchannel channel)
throws InterruptedException, IOException {
IOSubchannel upstream = channel.upstreamChannel();
@SuppressWarnings("resource")
CharBufferWriter out = new CharBufferWriter(upstream,
upstream.responsePipeline()).suppressClose();
event.toJson(out);
out.close();
}
private class PortalInfo {
private PipedWriter decodeWriter;
public void toEvent(IOSubchannel channel, CharBuffer buffer,
boolean last) throws IOException {
if (decodeWriter == null) {
decodeWriter = new PipedWriter();
PipedReader reader = new PipedReader(
decodeWriter, buffer.capacity());
activeEventPipeline().executorService()
.submit(new DecodeTask(reader, channel));
}
decodeWriter.append(buffer);
if (last) {
decodeWriter.close();
decodeWriter = null;
}
}
private class DecodeTask implements Runnable {
IOSubchannel channel;
private Reader reader;
public DecodeTask(Reader reader, IOSubchannel channel) {
this.reader = reader;
this.channel = channel;
}
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
try (Reader in = reader) {
JsonReader reader = Json.createReader(in);
fire(new JsonInput(reader.readObject()), channel);
} catch (Throwable e) {
fire(new Error(null, e));
}
}
}
}
public static class LanguageInfo {
private Locale locale;
/**
* @param locale
*/
public LanguageInfo(Locale locale) {
this.locale = locale;
}
/**
* @return the locale
*/
public Locale getLocale() {
return locale;
}
public String getLabel() {
String str = locale.getDisplayName(locale);
return Character.toUpperCase(str.charAt(0)) + str.substring(1);
}
}
public static class ThemeInfo implements Comparable<ThemeInfo> {
private String id;
private String name;
/**
* @param id
* @param name
*/
public ThemeInfo(String id, String name) {
super();
this.id = id;
this.name = name;
}
/**
* @return the id
*/
public String id() {
return id;
}
/**
* @return the name
*/
public String name() {
return name;
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(ThemeInfo other) {
return name().compareToIgnoreCase(other.name());
}
}
/**
* Create a {@link URI} from a path. This is similar to calling
* `new URI(null, null, path, null)` with the {@link URISyntaxException}
* converted to a {@link IllegalArgumentException}.
*
* @param path the path
* @return the uri
* @throws IllegalArgumentException if the string violates
* RFC 2396
*/
public static URI uriFromPath(String path) throws IllegalArgumentException {
try {
return new URI(null, null, path, null);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
private class RenderSupportImpl implements RenderSupport {
/* (non-Javadoc)
* @see org.jgrapes.portal.RenderSupport#portletResource(java.lang.String, java.net.URI)
*/
@Override
public URI portletResource(String portletType, URI uri) {
return portal.prefix().resolve(uriFromPath(
"portlet-resource/" + portletType + "/")).resolve(uri);
}
}
}
|
Fixed theme handling.
|
org.jgrapes.portal/src/org/jgrapes/portal/PortalView.java
|
Fixed theme handling.
|
|
Java
|
apache-2.0
|
9409725de2a95e93c8e824fcc1b6b9bc457bc4ad
| 0
|
gamlerhart/adbcj,gamlerhart/adbcj
|
/*
* Copyright (c) 2007 Mike Heath. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.adbcj.support;
import org.adbcj.DbException;
import org.adbcj.DbFuture;
import org.adbcj.DbListener;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
public class DefaultDbFuture<T> implements DbFuture<T> {
private final Object lock;
private List<DbListener<T>> otherListeners = new ArrayList<DbListener<T>>(1);
/**
* The result of this future.
*/
private volatile T result;
/**
* The exception thrown if there was an error.
*/
private volatile Throwable exception;
/**
* Indicates if the future was cancelled.
*/
private volatile boolean cancelled;
/**
* Indicates if the future has completed or not.
*/
private volatile boolean done;
public static DbFuture<Void> completed() {
DefaultDbFuture f = new DefaultDbFuture();
f.setResult(null);
return f;
}
public DefaultDbFuture() {
this.lock = this;
}
public DbFuture<T> addListener(DbListener<T> listener) {
if (listener == null) {
throw new IllegalArgumentException("listener can NOT be null");
}
synchronized (lock) {
if (done) {
notifyListener(listener);
}
otherListeners.add(listener);
}
return this;
}
public boolean removeListener(DbListener<T> listener) {
if (listener == null) {
throw new IllegalArgumentException("listener can NOT be null");
}
synchronized (lock) {
return otherListeners.remove(listener);
}
}
public final boolean cancel(boolean mayInterruptIfRunning) {
if (done) {
return false;
}
synchronized (lock) {
if (done) {
return false;
}
cancelled = doCancel(mayInterruptIfRunning);
if (cancelled) {
done = true;
lock.notifyAll();
}
}
if (cancelled) {
notifyListeners();
}
return cancelled;
}
protected boolean doCancel(boolean mayInterruptIfRunning) {
return false;
}
public T get() throws InterruptedException, DbException {
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
while (!done) {
lock.wait();
}
}
return getResult();
}
public T get(long timeout, TimeUnit unit) throws InterruptedException, DbException, TimeoutException {
long timeoutMillis = unit.toMillis(timeout);
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
lock.wait(timeoutMillis);
if (!done) {
throw new TimeoutException();
}
}
return getResult();
}
public T getUninterruptably() throws DbException {
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
boolean interrupted = false;
try {
while (!done) {
try {
lock.wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
return getResult();
}
private T getResult() throws DbException {
if (!done) {
throw new IllegalStateException("Should not be calling this method when future is not done");
}
if (exception != null) {
throw new DbException(exception);
}
if (cancelled) {
throw new CancellationException();
}
return result;
}
public void setResult(T result) {
synchronized (lock) {
// Allow only once.
if (done) {
throw new IllegalStateException("Should not set result if future is completed");
}
this.result = result;
done = true;
lock.notifyAll();
notifyListeners();
}
}
private void notifyListener(DbListener<T> listener) {
try {
listener.onCompletion(this);
} catch (Throwable t) {
throw UncheckedThrow.throwUnchecked(t);
}
}
private void notifyListeners() {
// There won't be any visibility problem or concurrent modification
// because 'ready' flag will be checked against both addListener and
// removeListener calls.
synchronized (lock) {
if (otherListeners != null) {
for (DbListener<T> l : otherListeners) {
notifyListener(l);
}
otherListeners = null;
}
}
}
public boolean isCancelled() {
return cancelled;
}
public boolean isDone() {
return done;
}
public void setException(Throwable exception) {
synchronized (lock) {
if (done) {
throw new IllegalStateException("Can't set exception on completed future");
}
this.exception = exception;
done = true;
lock.notifyAll();
}
notifyListeners();
}
}
|
api/src/main/java/org/adbcj/support/DefaultDbFuture.java
|
/*
* Copyright (c) 2007 Mike Heath. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.adbcj.support;
import org.adbcj.DbException;
import org.adbcj.DbFuture;
import org.adbcj.DbListener;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
public class DefaultDbFuture<T> implements DbFuture<T> {
private final Object lock;
private List<DbListener<T>> otherListeners;
/**
* The result of this future.
*/
private volatile T result;
/**
* The exception thrown if there was an error.
*/
private volatile Throwable exception;
/**
* Indicates if the future was cancelled.
*/
private volatile boolean cancelled;
/**
* Indicates if the future has completed or not.
*/
private volatile boolean done;
public static DbFuture<Void> completed() {
DefaultDbFuture f = new DefaultDbFuture();
f.setResult(null);
return f;
}
public DefaultDbFuture() {
this.lock = this;
}
public DbFuture<T> addListener(DbListener<T> listener) {
if (listener == null) {
throw new IllegalArgumentException("listener can NOT be null");
}
boolean notifyNow = true;
if (!done) {
synchronized (lock) {
if (!done) {
notifyNow = false;
if (otherListeners == null) {
otherListeners = new ArrayList<DbListener<T>>(1);
}
otherListeners.add(listener);
}
}
}
if (notifyNow) {
notifyListener(listener);
}
return this;
}
public boolean removeListener(DbListener<T> listener) {
if (listener == null) {
throw new IllegalArgumentException("listener can NOT be null");
}
boolean removed = false;
synchronized (lock) {
if (!done) {
removed = otherListeners.remove(listener);
}
}
return removed;
}
public final boolean cancel(boolean mayInterruptIfRunning) {
if (done) {
return false;
}
synchronized (lock) {
if (done) {
return false;
}
cancelled = doCancel(mayInterruptIfRunning);
if (cancelled) {
done = true;
lock.notifyAll();
}
}
if (cancelled) {
notifyListeners();
}
return cancelled;
}
protected boolean doCancel(boolean mayInterruptIfRunning) {
return false;
}
public T get() throws InterruptedException, DbException {
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
while (!done) {
lock.wait();
}
}
return getResult();
}
public T get(long timeout, TimeUnit unit) throws InterruptedException, DbException, TimeoutException {
long timeoutMillis = unit.toMillis(timeout);
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
lock.wait(timeoutMillis);
if (!done) {
throw new TimeoutException();
}
}
return getResult();
}
public T getUninterruptably() throws DbException {
if (done) {
return getResult();
}
synchronized (lock) {
if (done) {
return getResult();
}
boolean interrupted = false;
try {
while (!done) {
try {
lock.wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
return getResult();
}
private T getResult() throws DbException {
if (!done) {
throw new IllegalStateException("Should not be calling this method when future is not done");
}
if (exception != null) {
throw new DbException(exception);
}
if (cancelled) {
throw new CancellationException();
}
return result;
}
public void setResult(T result) {
synchronized (lock) {
// Allow only once.
if (done) {
return;
}
this.result = result;
done = true;
lock.notifyAll();
notifyListeners();
}
}
private void notifyListener(DbListener<T> listener) {
try {
listener.onCompletion(this);
} catch (Throwable t) {
throw UncheckedThrow.throwUnchecked(t);
}
}
private void notifyListeners() {
// There won't be any visibility problem or concurrent modification
// because 'ready' flag will be checked against both addListener and
// removeListener calls.
synchronized (lock) {
if (otherListeners != null) {
for (DbListener<T> l : otherListeners) {
notifyListener(l);
}
otherListeners = null;
}
}
}
public boolean isCancelled() {
return cancelled;
}
public boolean isDone() {
return done;
}
public void setException(Throwable exception) {
synchronized (lock) {
if (done) {
throw new IllegalStateException("Can't set exception on completed future");
}
this.exception = exception;
done = true;
lock.notifyAll();
}
notifyListeners();
}
}
|
removing code
|
api/src/main/java/org/adbcj/support/DefaultDbFuture.java
|
removing code
|
|
Java
|
apache-2.0
|
d5abadad23ace14250ca7aac2fb55854c18e2885
| 0
|
shaneataylor/dita-ot,drmacro/dita-ot,Hasimir/dita-ot,dita-ot/dita-ot,queshaw/dita-ot,infotexture/dita-ot,dita-ot/dita-ot,queshaw/dita-ot,shaneataylor/dita-ot,robander/dita-ot,dita-ot/dita-ot,zanyants/dita-ot,shaneataylor/dita-ot,drmacro/dita-ot,drmacro/dita-ot,dita-ot/dita-ot,robander/dita-ot,infotexture/dita-ot,shaneataylor/dita-ot,zanyants/dita-ot,Hasimir/dita-ot,drmacro/dita-ot,robander/dita-ot,eerohele/dita-ot,doctales/dita-ot,eerohele/dita-ot,zanyants/dita-ot,dita-ot/dita-ot,infotexture/dita-ot,queshaw/dita-ot,drmacro/dita-ot,doctales/dita-ot,infotexture/dita-ot,robander/dita-ot,Hasimir/dita-ot,doctales/dita-ot,Hasimir/dita-ot,robander/dita-ot,queshaw/dita-ot,zanyants/dita-ot,eerohele/dita-ot,eerohele/dita-ot,doctales/dita-ot,infotexture/dita-ot,shaneataylor/dita-ot
|
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2004, 2005 All Rights Reserved.
*/
package org.dita.dost.module;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.FileUtils.getRelativePath;
import static org.dita.dost.util.FileUtils.getRelativeUnixPath;
import static org.dita.dost.util.FileUtils.resolve;
import static org.dita.dost.util.Job.*;
import static org.dita.dost.util.Configuration.*;
import static org.dita.dost.util.URLUtils.*;
import static org.dita.dost.util.FilterUtils.*;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.xerces.xni.grammars.XMLGrammarPool;
import org.apache.xml.resolver.tools.CatalogResolver;
import org.dita.dost.exception.DITAOTException;
import org.dita.dost.exception.DITAOTXMLErrorHandler;
import org.dita.dost.log.MessageUtils;
import org.dita.dost.pipeline.AbstractPipelineInput;
import org.dita.dost.pipeline.AbstractPipelineOutput;
import org.dita.dost.reader.DitaValReader;
import org.dita.dost.reader.GrammarPoolManager;
import org.dita.dost.reader.SubjectSchemeReader;
import org.dita.dost.util.*;
import org.dita.dost.writer.*;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.*;
import org.xml.sax.helpers.XMLFilterImpl;
/**
* DebugAndFilterModule implement the second step in preprocess. It will insert debug
* information into every dita files and filter out the information that is not
* necessary.
*
* @author Zhang, Yuan Peng
*/
public final class DebugAndFilterModule extends AbstractPipelineModuleImpl {
/** Generate {@code xtrf} and {@code xtrc} attributes */
final boolean genDebugInfo = Boolean.parseBoolean(Configuration.configuration.get("generate-debug-attributes"));
/** Absolute input map path. */
private File inputMap;
/** use grammar pool cache */
private boolean gramcache = true;
private boolean setSystemId;
/** Profiling is enabled. */
private boolean profilingEnabled;
private boolean validate;
private String transtype;
private boolean forceUnique;
/** Absolute DITA-OT base path. */
private File ditaDir;
private File ditavalFile;
/** Absolute input directory path. */
private File inputDir;
private FilterUtils filterUtils;
/** Absolute path to current destination file. */
private File outputFile;
private Map<String, Map<String, Set<String>>> validateMap;
private Map<String, Map<String, String>> defaultValueMap;
/** XMLReader instance for parsing dita file */
private XMLReader reader;
/** Absolute path to current source file. */
private File currentFile;
private Map<File, Set<File>> dic;
private SubjectSchemeReader subjectSchemeReader;
private FilterUtils baseFilterUtils;
private ForceUniqueFilter forceUniqueFilter;
private DitaWriterFilter ditaWriterFilter;
@Override
public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws DITAOTException {
if (logger == null) {
throw new IllegalStateException("Logger not set");
}
try {
readArguments(input);
init();
for (final FileInfo f: job.getFileInfo()) {
if (ATTR_FORMAT_VALUE_DITA.equals(f.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(f.format)
|| f.isConrefTarget || f.isCopyToSource) {
processFile(f);
}
}
performCopytoTask();
job.write();
} catch (final Exception e) {
e.printStackTrace();
throw new DITAOTException("Exception doing debug and filter module processing: " + e.getMessage(), e);
}
return null;
}
private void processFile(final FileInfo f) {
currentFile = new File(inputDir, f.file.getPath());
if (!currentFile.exists()) {
// Assuming this is an copy-to target file, ignore it
logger.debug("Ignoring a copy-to file " + f.file);
return;
}
outputFile = new File(job.tempDir, f.file.getPath());
final File outputDir = outputFile.getParentFile();
if (!outputDir.exists() && !outputDir.mkdirs()) {
logger.error("Failed to create output directory " + outputDir.getAbsolutePath());
return;
}
logger.info("Processing " + currentFile.getAbsolutePath());
final Set<File> schemaSet = dic.get(f.file);
if (schemaSet != null && !schemaSet.isEmpty()) {
logger.debug("Loading subject schemes");
subjectSchemeReader.reset();
for (final File schema : schemaSet) {
subjectSchemeReader.loadSubjectScheme(new File(FileUtils.resolve(job.tempDir.getAbsolutePath(), schema.getPath()) + SUBJECT_SCHEME_EXTENSION));
}
validateMap = subjectSchemeReader.getValidValuesMap();
defaultValueMap = subjectSchemeReader.getDefaultValueMap();
} else {
validateMap = Collections.EMPTY_MAP;
defaultValueMap = Collections.EMPTY_MAP;
}
if (profilingEnabled) {
filterUtils = baseFilterUtils.refine(subjectSchemeReader.getSubjectSchemeMap());
}
OutputStream out = null;
try {
out = new FileOutputStream(outputFile);
reader.setErrorHandler(new DITAOTXMLErrorHandler(currentFile.getAbsolutePath(), logger));
final TransformerFactory tf = TransformerFactory.newInstance();
final Transformer serializer = tf.newTransformer();
XMLReader xmlSource = reader;
for (final XMLFilter filter: getProcessingPipe(currentFile, f.file)) {
filter.setParent(xmlSource);
xmlSource = filter;
}
// ContentHandler must be reset so e.g. Saxon 9.1 will reassign ContentHandler
// when reusing filter with multiple Transformers.
xmlSource.setContentHandler(null);
final Source source = new SAXSource(xmlSource, new InputSource(currentFile.toURI().toString()));
final Result result = new StreamResult(out);
serializer.transform(source, result);
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
} finally {
if (out != null) {
try {
out.close();
}catch (final Exception e) {
logger.error(e.getMessage(), e) ;
}
}
}
}
private void init() throws IOException, DITAOTException, SAXException {
// Output subject schemas
outputSubjectScheme();
subjectSchemeReader = new SubjectSchemeReader();
subjectSchemeReader.setLogger(logger);
dic = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY));
if (profilingEnabled) {
final DitaValReader filterReader = new DitaValReader();
filterReader.setLogger(logger);
filterReader.initXMLReader(setSystemId);
Map<FilterKey, Action> filterMap;
if (ditavalFile != null) {
filterReader.read(ditavalFile.getAbsoluteFile());
filterMap = filterReader.getFilterMap();
} else {
filterMap = Collections.EMPTY_MAP;
}
baseFilterUtils = new FilterUtils(printTranstype.contains(transtype), filterMap);
baseFilterUtils.setLogger(logger);
}
initXmlReader();
initFilters();
}
/**
* Init xml reader used for pipeline parsing.
*/
private void initXmlReader() throws SAXException {
CatalogUtils.setDitaDir(ditaDir);
reader = XMLUtils.getXMLReader();
if (validate) {
reader.setFeature(FEATURE_VALIDATION, true);
try {
reader.setFeature(FEATURE_VALIDATION_SCHEMA, true);
} catch (final SAXNotRecognizedException e) {
// Not Xerces, ignore exception
}
}
reader.setFeature(FEATURE_NAMESPACE, true);
final CatalogResolver resolver = CatalogUtils.getCatalogResolver();
reader.setEntityResolver(resolver);
if (gramcache) {
final XMLGrammarPool grammarPool = GrammarPoolManager.getGrammarPool();
try {
reader.setProperty("http://apache.org/xml/properties/internal/grammar-pool", grammarPool);
logger.info("Using Xerces grammar pool for DTD and schema caching.");
} catch (final NoClassDefFoundError e) {
logger.debug("Xerces not available, not using grammar caching");
} catch (final SAXNotRecognizedException e) {
logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage());
} catch (final SAXNotSupportedException e) {
logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage());
}
}
}
/**
* Initialize reusable filters.
*/
private void initFilters() {
forceUniqueFilter = new ForceUniqueFilter();
forceUniqueFilter.setLogger(logger);
forceUniqueFilter.setJob(job);
forceUniqueFilter.setEntityResolver(reader.getEntityResolver());
ditaWriterFilter = new DitaWriterFilter();
ditaWriterFilter.setLogger(logger);
ditaWriterFilter.setJob(job);
ditaWriterFilter.setEntityResolver(reader.getEntityResolver());
}
/**
* Get pipe line filters
*
* @param fileToParse absolute path to current file being processed
* @param inFile relative file path
*/
private List<XMLFilter> getProcessingPipe(final File fileToParse, final File inFile) {
final List<XMLFilter> pipe = new ArrayList<XMLFilter>();
if (genDebugInfo) {
final DebugFilter debugFilter = new DebugFilter();
debugFilter.setLogger(logger);
debugFilter.setInputFile(fileToParse);
pipe.add(debugFilter);
}
if (filterUtils != null) {
final ProfilingFilter profilingFilter = new ProfilingFilter();
profilingFilter.setLogger(logger);
profilingFilter.setFilterUtils(filterUtils);
pipe.add(profilingFilter);
}
final ValidationFilter validationFilter = new ValidationFilter();
validationFilter.setLogger(logger);
validationFilter.setValidateMap(validateMap);
validationFilter.setCurrentFile(toURI(inFile));
validationFilter.setJob(job);
pipe.add(validationFilter);
final NormalizeFilter normalizeFilter = new NormalizeFilter();
normalizeFilter.setLogger(logger);
pipe.add(normalizeFilter);
if (forceUnique) {
forceUniqueFilter.setCurrentFile(currentFile);
pipe.add(forceUniqueFilter);
}
ditaWriterFilter.setDefaultValueMap(defaultValueMap);
ditaWriterFilter.setCurrentFile(currentFile);
ditaWriterFilter.setOutputFile(outputFile);
pipe.add(ditaWriterFilter);
return pipe;
}
private void readArguments(AbstractPipelineInput input) {
final String baseDir = input.getAttribute(ANT_INVOKER_PARAM_BASEDIR);
ditaDir = new File(input.getAttribute(ANT_INVOKER_EXT_PARAM_DITADIR));
transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE);
profilingEnabled = true;
if (input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED) != null) {
profilingEnabled = Boolean.parseBoolean(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED));
}
if (profilingEnabled) {
if (input.getAttribute(ANT_INVOKER_PARAM_DITAVAL) != null) {
ditavalFile = new File(input.getAttribute(ANT_INVOKER_PARAM_DITAVAL));
if (!ditavalFile.isAbsolute()) {
ditavalFile = new File(baseDir, ditavalFile.getPath()).getAbsoluteFile();
}
}
}
gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE));
validate = Boolean.valueOf(input.getAttribute("validate"));
setSystemId = "yes".equals(input.getAttribute(ANT_INVOKER_EXT_PARAN_SETSYSTEMID));
forceUnique = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAN_FORCE_UNIQUE));
inputDir = job.getInputDir();
if (!inputDir.isAbsolute()) {
inputDir = new File(baseDir, inputDir.getPath()).getAbsoluteFile();
}
inputMap = new File(inputDir, job.getInputMap().getPath()).getAbsoluteFile();
}
/**
* Output subject schema file.
*
* @throws DITAOTException if generation files
*/
private void outputSubjectScheme() throws DITAOTException {
try {
final Map<File, Set<File>> graph = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_RELATION));
final Queue<File> queue = new LinkedList<File>(graph.keySet());
final Set<File> visitedSet = new HashSet<File>();
final DocumentBuilder builder = XMLUtils.getDocumentBuilder();
builder.setEntityResolver(CatalogUtils.getCatalogResolver());
while (!queue.isEmpty()) {
final File parent = queue.poll();
final Set<File> children = graph.get(parent);
if (children != null) {
queue.addAll(children);
}
if (new File("ROOT").equals(parent) || visitedSet.contains(parent)) {
continue;
}
visitedSet.add(parent);
File tmprel = new File(FileUtils.resolve(job.tempDir, parent) + SUBJECT_SCHEME_EXTENSION);
Document parentRoot;
if (!tmprel.exists()) {
final File src = new File(job.getInputDir(), parent.getPath());
parentRoot = builder.parse(src);
} else {
parentRoot = builder.parse(tmprel);
}
if (children != null) {
for (final File childpath: children) {
final Document childRoot = builder.parse(new File(inputMap.getParentFile(), childpath.getPath()));
mergeScheme(parentRoot, childRoot);
generateScheme(new File(job.tempDir, childpath.getPath() + SUBJECT_SCHEME_EXTENSION), childRoot);
}
}
//Output parent scheme
generateScheme(new File(job.tempDir.getAbsoluteFile(), parent.getPath() + SUBJECT_SCHEME_EXTENSION), parentRoot);
}
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
throw new DITAOTException(e);
}
}
private void mergeScheme(final Document parentRoot, final Document childRoot) {
final Queue<Element> pQueue = new LinkedList<Element>();
pQueue.offer(parentRoot.getDocumentElement());
while (!pQueue.isEmpty()) {
final Element pe = pQueue.poll();
NodeList pList = pe.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node node = pList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
pQueue.offer((Element)node);
}
}
String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS);
if (StringUtils.isEmptyString(value)
|| !SUBJECTSCHEME_SUBJECTDEF.matches(value)) {
continue;
}
if (!StringUtils.isEmptyString(
value = pe.getAttribute(ATTRIBUTE_NAME_KEYREF))) {
// extend child scheme
final Element target = searchForKey(childRoot.getDocumentElement(), value);
if (target == null) {
/*
* TODO: we have a keyref here to extend into child scheme, but can't
* find any matching <subjectdef> in child scheme. Shall we throw out
* a warning?
*
* Not for now, just bypass it.
*/
continue;
}
// target found
pList = pe.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node tmpnode = childRoot.importNode(pList.item(i), false);
if (tmpnode.getNodeType() == Node.ELEMENT_NODE
&& searchForKey(target,
((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) {
continue;
}
target.appendChild(tmpnode);
}
} else if (!StringUtils.isEmptyString(
value = pe.getAttribute(ATTRIBUTE_NAME_KEYS))) {
// merge into parent scheme
final Element target = searchForKey(childRoot.getDocumentElement(), value);
if (target != null) {
pList = target.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node tmpnode = parentRoot.importNode(pList.item(i), false);
if (tmpnode.getNodeType() == Node.ELEMENT_NODE
&& searchForKey(pe,
((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) {
continue;
}
pe.appendChild(tmpnode);
}
}
}
}
}
private Element searchForKey(final Element root, final String key) {
if (root == null || StringUtils.isEmptyString(key)) {
return null;
}
final Queue<Element> queue = new LinkedList<Element>();
queue.offer(root);
while (!queue.isEmpty()) {
final Element pe = queue.poll();
final NodeList pchildrenList = pe.getChildNodes();
for (int i = 0; i < pchildrenList.getLength(); i++) {
final Node node = pchildrenList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
queue.offer((Element)node);
}
}
String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS);
if (StringUtils.isEmptyString(value)
|| !SUBJECTSCHEME_SUBJECTDEF.matches(value)) {
continue;
}
value = pe.getAttribute(ATTRIBUTE_NAME_KEYS);
if (StringUtils.isEmptyString(value)) {
continue;
}
if (value.equals(key)) {
return pe;
}
}
return null;
}
/**
* Serialize subject scheme file.
*
* @param filename output filepath
* @param root subject scheme document
*
* @throws DITAOTException if generation fails
*/
private void generateScheme(final File filename, final Document root) throws DITAOTException {
final File p = filename.getParentFile();
if (!p.exists() && !p.mkdirs()) {
throw new DITAOTException("Failed to make directory " + p.getAbsolutePath());
}
FileOutputStream out = null;
try {
out = new FileOutputStream(filename);
final StreamResult res = new StreamResult(out);
final DOMSource ds = new DOMSource(root);
final TransformerFactory tff = TransformerFactory.newInstance();
final Transformer tf = tff.newTransformer();
tf.transform(ds, res);
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
throw new DITAOTException(e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
throw new DITAOTException(e);
}
}
}
}
/**
* Execute copy-to task, generate copy-to targets base on sources
*/
private void performCopytoTask() {
final Map<File, File> copytoMap = new HashMap<File, File>();
for (final Map.Entry<URI, URI> e: job.getCopytoMap().entrySet()) {
copytoMap.put(toFile(e.getKey()), toFile(e.getValue()));
}
if (forceUniqueFilter != null) {
copytoMap.putAll(forceUniqueFilter.copyToMap);
}
for (final Map.Entry<File, File> entry: copytoMap.entrySet()) {
final File copytoTarget = entry.getKey();
final File copytoSource = entry.getValue();
final File srcFile = new File(job.tempDir, copytoSource.getPath());
final File targetFile = new File(job.tempDir, copytoTarget.getPath());
if (targetFile.exists()) {
logger.warn(MessageUtils.getInstance().getMessage("DOTX064W", copytoTarget.getPath()).toString());
} else {
final File inputMapInTemp = new File(job.tempDir, job.getInputMap().getPath()).getAbsoluteFile();
copyFileWithPIReplaced(srcFile, targetFile, copytoTarget, inputMapInTemp);
// add new file info into job
final FileInfo src = job.getFileInfo(toURI(copytoSource));
final FileInfo.Builder b = src != null ? new FileInfo.Builder(src) : new FileInfo.Builder();
final FileInfo dst = b.uri(toURI(copytoTarget)).isCopyToSource(false).build();
job.add(dst);
}
}
}
/**
* Copy files and replace workdir PI contents.
*
* @param src
* @param target
* @param copytoTargetFilename
* @param inputMapInTemp
*/
public void copyFileWithPIReplaced(final File src, final File target, final File copytoTargetFilename, final File inputMapInTemp) {
if (!target.getParentFile().exists() && !target.getParentFile().mkdirs()) {
logger.error("Failed to create copy-to target directory " + target.getParentFile().getAbsolutePath());
return;
}
final File path2project = DebugAndFilterModule.getPathtoProject(copytoTargetFilename, target, inputMapInTemp, job);
final File workdir = target.getParentFile();
XMLFilter filter = new CopyToFilter(workdir, path2project);
logger.info("Processing " + src.getAbsolutePath() + " to " + target.getAbsolutePath());
try {
XMLUtils.transform(src, target, Arrays.asList(filter));
} catch (final DITAOTException e) {
logger.error("Failed to write copy-to file: " + e.getMessage(), e);
}
}
/**
* XML filter to rewrite processing instructions to reflect copy-to location. The following processing-instructions are
* processed:
*
* <ul>
* <li>{@link Constants#PI_WORKDIR_TARGET PI_WORKDIR_TARGET}</li>
* <li>{@link Constants#PI_WORKDIR_TARGET_URI PI_WORKDIR_TARGET_URI}</li>
* <li>{@link Constants#PI_PATH2PROJ_TARGET PI_PATH2PROJ_TARGET}</li>
* <li>{@link Constants#PI_PATH2PROJ_TARGET_URI PI_PATH2PROJ_TARGET_URI}</li>
* </ul>
*/
private static final class CopyToFilter extends XMLFilterImpl {
private final File workdir;
private final File path2project;
CopyToFilter(final File workdir, final File path2project) {
super();
this.workdir = workdir;
this.path2project = path2project;
}
@Override
public void processingInstruction(final String target, final String data) throws SAXException {
String d = data;
if(target.equals(PI_WORKDIR_TARGET)) {
if (workdir != null) {
try {
if (!OS_NAME.toLowerCase().contains(OS_NAME_WINDOWS)) {
d = workdir.getCanonicalPath();
} else {
d = UNIX_SEPARATOR + workdir.getCanonicalPath();
}
} catch (final IOException e) {
throw new RuntimeException("Failed to get canonical path for working directory: " + e.getMessage(), e);
}
}
} else if(target.equals(PI_WORKDIR_TARGET_URI)) {
if (workdir != null) {
d = workdir.toURI().toString();
}
} else if (target.equals(PI_PATH2PROJ_TARGET)) {
if (path2project != null) {
d = path2project.getPath();
}
} else if (target.equals(PI_PATH2PROJ_TARGET_URI)) {
if (path2project != null) {
d = toURI(path2project).toString();
if (!d.endsWith(URI_SEPARATOR)) {
d = d + URI_SEPARATOR;
}
}
}
getContentHandler().processingInstruction(target, d);
}
}
/**
* Get path to base directory
*
* @param filename relative input file path from base directory
* @param traceFilename absolute input file
* @param inputMap absolute path to start file
* @return path to base directory, {@code null} if not available
*/
public static File getPathtoProject(final File filename, final File traceFilename, final File inputMap, final Job job) {
if (job.getGeneratecopyouter() != Job.Generate.OLDSOLUTION) {
if (isOutFile(traceFilename, inputMap)) {
return toFile(getRelativePathFromOut(traceFilename.getAbsoluteFile(), job));
} else {
return new File(getRelativeUnixPath(traceFilename.getAbsolutePath(), inputMap.getAbsolutePath())).getParentFile();
}
} else {
return getRelativePath(filename);
}
}
/**
* Just for the overflowing files.
* @param overflowingFile overflowingFile
* @return relative path to out
*/
public static String getRelativePathFromOut(final File overflowingFile, final Job job) {
final File relativePath = getRelativePath(job.getInputFile(), overflowingFile);
final File outputDir = job.getOutputDir().getAbsoluteFile();
final File outputPathName = new File(outputDir, "index.html");
final File finalOutFilePathName = resolve(outputDir, relativePath.getPath());
final File finalRelativePathName = getRelativePath(finalOutFilePathName, outputPathName);
File parentDir = finalRelativePathName.getParentFile();
if (parentDir == null || parentDir.getPath().isEmpty()) {
parentDir = new File(".");
}
return parentDir.getPath() + File.separator;
}
/**
* Check if path falls outside start document directory
*
* @param filePathName absolute path to test
* @param inputMap absolute input map path
* @return {@code true} if outside start directory, otherwise {@code false}
*/
private static boolean isOutFile(final File filePathName, final File inputMap){
final File relativePath = getRelativePath(inputMap.getAbsoluteFile(), filePathName.getAbsoluteFile());
return !(relativePath.getPath().length() == 0 || !relativePath.getPath().startsWith(".."));
}
}
|
src/main/java/org/dita/dost/module/DebugAndFilterModule.java
|
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2004, 2005 All Rights Reserved.
*/
package org.dita.dost.module;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.FileUtils.getRelativePath;
import static org.dita.dost.util.FileUtils.getRelativeUnixPath;
import static org.dita.dost.util.FileUtils.resolve;
import static org.dita.dost.util.Job.*;
import static org.dita.dost.util.Configuration.*;
import static org.dita.dost.util.URLUtils.*;
import static org.dita.dost.util.FilterUtils.*;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.xerces.xni.grammars.XMLGrammarPool;
import org.apache.xml.resolver.tools.CatalogResolver;
import org.dita.dost.exception.DITAOTException;
import org.dita.dost.exception.DITAOTXMLErrorHandler;
import org.dita.dost.log.MessageUtils;
import org.dita.dost.pipeline.AbstractPipelineInput;
import org.dita.dost.pipeline.AbstractPipelineOutput;
import org.dita.dost.reader.DitaValReader;
import org.dita.dost.reader.GrammarPoolManager;
import org.dita.dost.reader.SubjectSchemeReader;
import org.dita.dost.util.*;
import org.dita.dost.writer.*;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.*;
import org.xml.sax.helpers.XMLFilterImpl;
/**
* DebugAndFilterModule implement the second step in preprocess. It will insert debug
* information into every dita files and filter out the information that is not
* necessary.
*
* @author Zhang, Yuan Peng
*/
public final class DebugAndFilterModule extends AbstractPipelineModuleImpl {
/** Generate {@code xtrf} and {@code xtrc} attributes */
final boolean genDebugInfo = Boolean.parseBoolean(Configuration.configuration.get("generate-debug-attributes"));
/** Absolute input map path. */
private File inputMap;
/** use grammar pool cache */
private boolean gramcache = true;
private boolean setSystemId;
/** Profiling is enabled. */
private boolean profilingEnabled;
private boolean validate;
private String transtype;
private boolean forceUnique;
/** Absolute DITA-OT base path. */
private File ditaDir;
private File ditavalFile;
/** Absolute input directory path. */
private File inputDir;
private FilterUtils filterUtils;
/** Absolute path to current destination file. */
private File outputFile;
private Map<String, Map<String, Set<String>>> validateMap;
private Map<String, Map<String, String>> defaultValueMap;
/** XMLReader instance for parsing dita file */
private XMLReader reader;
/** Absolute path to current source file. */
private File currentFile;
private Map<File, Set<File>> dic;
private SubjectSchemeReader subjectSchemeReader;
private FilterUtils baseFilterUtils;
private ForceUniqueFilter forceUniqueFilter;
private DitaWriterFilter ditaWriterFilter;
@Override
public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws DITAOTException {
if (logger == null) {
throw new IllegalStateException("Logger not set");
}
try {
readArguments(input);
init();
for (final FileInfo f: job.getFileInfo()) {
if (ATTR_FORMAT_VALUE_DITA.equals(f.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(f.format)
|| f.isConrefTarget || f.isCopyToSource) {
processFile(f);
}
}
performCopytoTask();
job.write();
} catch (final Exception e) {
e.printStackTrace();
throw new DITAOTException("Exception doing debug and filter module processing: " + e.getMessage(), e);
}
return null;
}
private void processFile(final FileInfo f) {
currentFile = new File(inputDir, f.file.getPath());
if (!currentFile.exists()) {
// Assuming this is an copy-to target file, ignore it
logger.debug("Ignoring a copy-to file " + f.file);
return;
}
outputFile = new File(job.tempDir, f.file.getPath());
final File outputDir = outputFile.getParentFile();
if (!outputDir.exists() && !outputDir.mkdirs()) {
logger.error("Failed to create output directory " + outputDir.getAbsolutePath());
return;
}
logger.info("Processing " + currentFile.getAbsolutePath());
final Set<File> schemaSet = dic.get(f.file);
if (schemaSet != null && !schemaSet.isEmpty()) {
logger.debug("Loading subject schemes");
subjectSchemeReader.reset();
for (final File schema : schemaSet) {
subjectSchemeReader.loadSubjectScheme(new File(FileUtils.resolve(job.tempDir.getAbsolutePath(), schema.getPath()) + SUBJECT_SCHEME_EXTENSION));
}
validateMap = subjectSchemeReader.getValidValuesMap();
defaultValueMap = subjectSchemeReader.getDefaultValueMap();
} else {
validateMap = Collections.EMPTY_MAP;
defaultValueMap = Collections.EMPTY_MAP;
}
if (profilingEnabled) {
filterUtils = baseFilterUtils.refine(subjectSchemeReader.getSubjectSchemeMap());
}
OutputStream out = null;
try {
out = new FileOutputStream(outputFile);
reader.setErrorHandler(new DITAOTXMLErrorHandler(currentFile.getAbsolutePath(), logger));
final TransformerFactory tf = TransformerFactory.newInstance();
final Transformer serializer = tf.newTransformer();
XMLReader xmlSource = reader;
for (final XMLFilter filter: getProcessingPipe(currentFile, f.file)) {
filter.setParent(xmlSource);
xmlSource = filter;
}
// ContentHandler must be reset so e.g. Saxon 9.1 will reassign ContentHandler
// when reusing filter with multiple Transformers.
xmlSource.setContentHandler(null);
final Source source = new SAXSource(xmlSource, new InputSource(currentFile.toURI().toString()));
final Result result = new StreamResult(out);
serializer.transform(source, result);
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
} finally {
if (out != null) {
try {
out.close();
}catch (final Exception e) {
logger.error(e.getMessage(), e) ;
}
}
}
}
private void init() throws IOException, DITAOTException, SAXException {
// Output subject schemas
outputSubjectScheme();
subjectSchemeReader = new SubjectSchemeReader();
subjectSchemeReader.setLogger(logger);
dic = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY));
if (profilingEnabled) {
final DitaValReader filterReader = new DitaValReader();
filterReader.setLogger(logger);
filterReader.initXMLReader(setSystemId);
Map<FilterKey, Action> filterMap;
if (ditavalFile != null) {
filterReader.read(ditavalFile.getAbsoluteFile());
filterMap = filterReader.getFilterMap();
} else {
filterMap = Collections.EMPTY_MAP;
}
baseFilterUtils = new FilterUtils(printTranstype.contains(transtype), filterMap);
baseFilterUtils.setLogger(logger);
}
initXmlReader();
initFilters();
}
/**
* Init xml reader used for pipeline parsing.
*/
private void initXmlReader() throws SAXException {
CatalogUtils.setDitaDir(ditaDir);
reader = XMLUtils.getXMLReader();
if (validate) {
reader.setFeature(FEATURE_VALIDATION, true);
try {
reader.setFeature(FEATURE_VALIDATION_SCHEMA, true);
} catch (final SAXNotRecognizedException e) {
// Not Xerces, ignore exception
}
}
reader.setFeature(FEATURE_NAMESPACE, true);
final CatalogResolver resolver = CatalogUtils.getCatalogResolver();
reader.setEntityResolver(resolver);
if (gramcache) {
final XMLGrammarPool grammarPool = GrammarPoolManager.getGrammarPool();
try {
reader.setProperty("http://apache.org/xml/properties/internal/grammar-pool", grammarPool);
logger.info("Using Xerces grammar pool for DTD and schema caching.");
} catch (final NoClassDefFoundError e) {
logger.debug("Xerces not available, not using grammar caching");
} catch (final SAXNotRecognizedException e) {
logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage());
} catch (final SAXNotSupportedException e) {
logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage());
}
}
}
/**
* Initialize reusable filters.
*/
private void initFilters() {
forceUniqueFilter = new ForceUniqueFilter();
forceUniqueFilter.setLogger(logger);
forceUniqueFilter.setJob(job);
forceUniqueFilter.setEntityResolver(reader.getEntityResolver());
ditaWriterFilter = new DitaWriterFilter();
ditaWriterFilter.setLogger(logger);
ditaWriterFilter.setJob(job);
ditaWriterFilter.setEntityResolver(reader.getEntityResolver());
}
/**
* Get pipe line filters
*
* @param fileToParse absolute path to current file being processed
* @param inFile relative file path
*/
private List<XMLFilter> getProcessingPipe(final File fileToParse, final File inFile) {
final List<XMLFilter> pipe = new ArrayList<XMLFilter>();
if (genDebugInfo) {
final DebugFilter debugFilter = new DebugFilter();
debugFilter.setLogger(logger);
debugFilter.setInputFile(fileToParse);
pipe.add(debugFilter);
}
if (filterUtils != null) {
final ProfilingFilter profilingFilter = new ProfilingFilter();
profilingFilter.setLogger(logger);
profilingFilter.setFilterUtils(filterUtils);
pipe.add(profilingFilter);
}
final ValidationFilter validationFilter = new ValidationFilter();
validationFilter.setLogger(logger);
validationFilter.setValidateMap(validateMap);
validationFilter.setCurrentFile(toURI(inFile));
validationFilter.setJob(job);
pipe.add(validationFilter);
final NormalizeFilter normalizeFilter = new NormalizeFilter();
normalizeFilter.setLogger(logger);
pipe.add(normalizeFilter);
if (forceUnique) {
forceUniqueFilter.setCurrentFile(currentFile);
pipe.add(forceUniqueFilter);
}
ditaWriterFilter.setDefaultValueMap(defaultValueMap);
ditaWriterFilter.setCurrentFile(currentFile);
ditaWriterFilter.setOutputFile(outputFile);
pipe.add(ditaWriterFilter);
return pipe;
}
private void readArguments(AbstractPipelineInput input) {
final String baseDir = input.getAttribute(ANT_INVOKER_PARAM_BASEDIR);
ditaDir = new File(input.getAttribute(ANT_INVOKER_EXT_PARAM_DITADIR));
transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE);
profilingEnabled = true;
if (input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED) != null) {
profilingEnabled = Boolean.parseBoolean(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED));
}
if (profilingEnabled) {
if (input.getAttribute(ANT_INVOKER_PARAM_DITAVAL) != null) {
ditavalFile = new File(input.getAttribute(ANT_INVOKER_PARAM_DITAVAL));
if (!ditavalFile.isAbsolute()) {
ditavalFile = new File(baseDir, ditavalFile.getPath()).getAbsoluteFile();
}
}
}
gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE));
validate = Boolean.valueOf(input.getAttribute("validate"));
setSystemId = "yes".equals(input.getAttribute(ANT_INVOKER_EXT_PARAN_SETSYSTEMID));
forceUnique = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAN_FORCE_UNIQUE));
inputDir = job.getInputDir();
if (!inputDir.isAbsolute()) {
inputDir = new File(baseDir, inputDir.getPath()).getAbsoluteFile();
}
inputMap = new File(inputDir, job.getInputMap().getPath()).getAbsoluteFile();
}
/**
* Output subject schema file.
*
* @throws DITAOTException if generation files
*/
private void outputSubjectScheme() throws DITAOTException {
try {
final Map<File, Set<File>> graph = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_RELATION));
final Queue<File> queue = new LinkedList<File>(graph.keySet());
final Set<File> visitedSet = new HashSet<File>();
final DocumentBuilder builder = XMLUtils.getDocumentBuilder();
builder.setEntityResolver(CatalogUtils.getCatalogResolver());
while (!queue.isEmpty()) {
final File parent = queue.poll();
final Set<File> children = graph.get(parent);
if (children != null) {
queue.addAll(children);
}
if (new File("ROOT").equals(parent) || visitedSet.contains(parent)) {
continue;
}
visitedSet.add(parent);
File tmprel = new File(FileUtils.resolve(job.tempDir, parent) + SUBJECT_SCHEME_EXTENSION);
Document parentRoot;
if (!tmprel.exists()) {
final File src = new File(inputMap.getParentFile(), parent.getPath());
parentRoot = builder.parse(src);
} else {
parentRoot = builder.parse(tmprel);
}
if (children != null) {
for (final File childpath: children) {
final Document childRoot = builder.parse(new File(inputMap.getParentFile(), childpath.getPath()));
mergeScheme(parentRoot, childRoot);
generateScheme(new File(job.tempDir, childpath.getPath() + SUBJECT_SCHEME_EXTENSION), childRoot);
}
}
//Output parent scheme
generateScheme(new File(job.tempDir.getAbsoluteFile(), parent.getPath() + SUBJECT_SCHEME_EXTENSION), parentRoot);
}
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
throw new DITAOTException(e);
}
}
private void mergeScheme(final Document parentRoot, final Document childRoot) {
final Queue<Element> pQueue = new LinkedList<Element>();
pQueue.offer(parentRoot.getDocumentElement());
while (!pQueue.isEmpty()) {
final Element pe = pQueue.poll();
NodeList pList = pe.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node node = pList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
pQueue.offer((Element)node);
}
}
String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS);
if (StringUtils.isEmptyString(value)
|| !SUBJECTSCHEME_SUBJECTDEF.matches(value)) {
continue;
}
if (!StringUtils.isEmptyString(
value = pe.getAttribute(ATTRIBUTE_NAME_KEYREF))) {
// extend child scheme
final Element target = searchForKey(childRoot.getDocumentElement(), value);
if (target == null) {
/*
* TODO: we have a keyref here to extend into child scheme, but can't
* find any matching <subjectdef> in child scheme. Shall we throw out
* a warning?
*
* Not for now, just bypass it.
*/
continue;
}
// target found
pList = pe.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node tmpnode = childRoot.importNode(pList.item(i), false);
if (tmpnode.getNodeType() == Node.ELEMENT_NODE
&& searchForKey(target,
((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) {
continue;
}
target.appendChild(tmpnode);
}
} else if (!StringUtils.isEmptyString(
value = pe.getAttribute(ATTRIBUTE_NAME_KEYS))) {
// merge into parent scheme
final Element target = searchForKey(childRoot.getDocumentElement(), value);
if (target != null) {
pList = target.getChildNodes();
for (int i = 0; i < pList.getLength(); i++) {
final Node tmpnode = parentRoot.importNode(pList.item(i), false);
if (tmpnode.getNodeType() == Node.ELEMENT_NODE
&& searchForKey(pe,
((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) {
continue;
}
pe.appendChild(tmpnode);
}
}
}
}
}
private Element searchForKey(final Element root, final String key) {
if (root == null || StringUtils.isEmptyString(key)) {
return null;
}
final Queue<Element> queue = new LinkedList<Element>();
queue.offer(root);
while (!queue.isEmpty()) {
final Element pe = queue.poll();
final NodeList pchildrenList = pe.getChildNodes();
for (int i = 0; i < pchildrenList.getLength(); i++) {
final Node node = pchildrenList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
queue.offer((Element)node);
}
}
String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS);
if (StringUtils.isEmptyString(value)
|| !SUBJECTSCHEME_SUBJECTDEF.matches(value)) {
continue;
}
value = pe.getAttribute(ATTRIBUTE_NAME_KEYS);
if (StringUtils.isEmptyString(value)) {
continue;
}
if (value.equals(key)) {
return pe;
}
}
return null;
}
/**
* Serialize subject scheme file.
*
* @param filename output filepath
* @param root subject scheme document
*
* @throws DITAOTException if generation fails
*/
private void generateScheme(final File filename, final Document root) throws DITAOTException {
final File p = filename.getParentFile();
if (!p.exists() && !p.mkdirs()) {
throw new DITAOTException("Failed to make directory " + p.getAbsolutePath());
}
FileOutputStream out = null;
try {
out = new FileOutputStream(filename);
final StreamResult res = new StreamResult(out);
final DOMSource ds = new DOMSource(root);
final TransformerFactory tff = TransformerFactory.newInstance();
final Transformer tf = tff.newTransformer();
tf.transform(ds, res);
} catch (final Exception e) {
logger.error(e.getMessage(), e) ;
throw new DITAOTException(e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
throw new DITAOTException(e);
}
}
}
}
/**
* Execute copy-to task, generate copy-to targets base on sources
*/
private void performCopytoTask() {
final Map<File, File> copytoMap = new HashMap<File, File>();
for (final Map.Entry<URI, URI> e: job.getCopytoMap().entrySet()) {
copytoMap.put(toFile(e.getKey()), toFile(e.getValue()));
}
if (forceUniqueFilter != null) {
copytoMap.putAll(forceUniqueFilter.copyToMap);
}
for (final Map.Entry<File, File> entry: copytoMap.entrySet()) {
final File copytoTarget = entry.getKey();
final File copytoSource = entry.getValue();
final File srcFile = new File(job.tempDir, copytoSource.getPath());
final File targetFile = new File(job.tempDir, copytoTarget.getPath());
if (targetFile.exists()) {
logger.warn(MessageUtils.getInstance().getMessage("DOTX064W", copytoTarget.getPath()).toString());
} else {
final File inputMapInTemp = new File(job.tempDir, job.getInputMap().getPath()).getAbsoluteFile();
copyFileWithPIReplaced(srcFile, targetFile, copytoTarget, inputMapInTemp);
// add new file info into job
final FileInfo src = job.getFileInfo(toURI(copytoSource));
final FileInfo.Builder b = src != null ? new FileInfo.Builder(src) : new FileInfo.Builder();
final FileInfo dst = b.uri(toURI(copytoTarget)).isCopyToSource(false).build();
job.add(dst);
}
}
}
/**
* Copy files and replace workdir PI contents.
*
* @param src
* @param target
* @param copytoTargetFilename
* @param inputMapInTemp
*/
public void copyFileWithPIReplaced(final File src, final File target, final File copytoTargetFilename, final File inputMapInTemp) {
if (!target.getParentFile().exists() && !target.getParentFile().mkdirs()) {
logger.error("Failed to create copy-to target directory " + target.getParentFile().getAbsolutePath());
return;
}
final File path2project = DebugAndFilterModule.getPathtoProject(copytoTargetFilename, target, inputMapInTemp, job);
final File workdir = target.getParentFile();
XMLFilter filter = new CopyToFilter(workdir, path2project);
logger.info("Processing " + src.getAbsolutePath() + " to " + target.getAbsolutePath());
try {
XMLUtils.transform(src, target, Arrays.asList(filter));
} catch (final DITAOTException e) {
logger.error("Failed to write copy-to file: " + e.getMessage(), e);
}
}
/**
* XML filter to rewrite processing instructions to reflect copy-to location. The following processing-instructions are
* processed:
*
* <ul>
* <li>{@link Constants#PI_WORKDIR_TARGET PI_WORKDIR_TARGET}</li>
* <li>{@link Constants#PI_WORKDIR_TARGET_URI PI_WORKDIR_TARGET_URI}</li>
* <li>{@link Constants#PI_PATH2PROJ_TARGET PI_PATH2PROJ_TARGET}</li>
* <li>{@link Constants#PI_PATH2PROJ_TARGET_URI PI_PATH2PROJ_TARGET_URI}</li>
* </ul>
*/
private static final class CopyToFilter extends XMLFilterImpl {
private final File workdir;
private final File path2project;
CopyToFilter(final File workdir, final File path2project) {
super();
this.workdir = workdir;
this.path2project = path2project;
}
@Override
public void processingInstruction(final String target, final String data) throws SAXException {
String d = data;
if(target.equals(PI_WORKDIR_TARGET)) {
if (workdir != null) {
try {
if (!OS_NAME.toLowerCase().contains(OS_NAME_WINDOWS)) {
d = workdir.getCanonicalPath();
} else {
d = UNIX_SEPARATOR + workdir.getCanonicalPath();
}
} catch (final IOException e) {
throw new RuntimeException("Failed to get canonical path for working directory: " + e.getMessage(), e);
}
}
} else if(target.equals(PI_WORKDIR_TARGET_URI)) {
if (workdir != null) {
d = workdir.toURI().toString();
}
} else if (target.equals(PI_PATH2PROJ_TARGET)) {
if (path2project != null) {
d = path2project.getPath();
}
} else if (target.equals(PI_PATH2PROJ_TARGET_URI)) {
if (path2project != null) {
d = toURI(path2project).toString();
if (!d.endsWith(URI_SEPARATOR)) {
d = d + URI_SEPARATOR;
}
}
}
getContentHandler().processingInstruction(target, d);
}
}
/**
* Get path to base directory
*
* @param filename relative input file path from base directory
* @param traceFilename absolute input file
* @param inputMap absolute path to start file
* @return path to base directory, {@code null} if not available
*/
public static File getPathtoProject(final File filename, final File traceFilename, final File inputMap, final Job job) {
if (job.getGeneratecopyouter() != Job.Generate.OLDSOLUTION) {
if (isOutFile(traceFilename, inputMap)) {
return toFile(getRelativePathFromOut(traceFilename.getAbsoluteFile(), job));
} else {
return new File(getRelativeUnixPath(traceFilename.getAbsolutePath(), inputMap.getAbsolutePath())).getParentFile();
}
} else {
return getRelativePath(filename);
}
}
/**
* Just for the overflowing files.
* @param overflowingFile overflowingFile
* @return relative path to out
*/
public static String getRelativePathFromOut(final File overflowingFile, final Job job) {
final File relativePath = getRelativePath(job.getInputFile(), overflowingFile);
final File outputDir = job.getOutputDir().getAbsoluteFile();
final File outputPathName = new File(outputDir, "index.html");
final File finalOutFilePathName = resolve(outputDir, relativePath.getPath());
final File finalRelativePathName = getRelativePath(finalOutFilePathName, outputPathName);
File parentDir = finalRelativePathName.getParentFile();
if (parentDir == null || parentDir.getPath().isEmpty()) {
parentDir = new File(".");
}
return parentDir.getPath() + File.separator;
}
/**
* Check if path falls outside start document directory
*
* @param filePathName absolute path to test
* @param inputMap absolute input map path
* @return {@code true} if outside start directory, otherwise {@code false}
*/
private static boolean isOutFile(final File filePathName, final File inputMap){
final File relativePath = getRelativePath(inputMap.getAbsoluteFile(), filePathName.getAbsoluteFile());
return !(relativePath.getPath().length() == 0 || !relativePath.getPath().startsWith(".."));
}
}
|
Fix non-root subject scheme map processing #1826
|
src/main/java/org/dita/dost/module/DebugAndFilterModule.java
|
Fix non-root subject scheme map processing #1826
|
|
Java
|
apache-2.0
|
fec4641cdc4e7abf620f2058ca15f0abc068efd6
| 0
|
lastaflute/lastaflute
|
/*
* Copyright 2014-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.lastaflute.web;
import java.util.function.Supplier;
import javax.annotation.Resource;
import org.dbflute.jdbc.Classification;
import org.dbflute.optional.OptionalThing;
import org.dbflute.util.Srl;
import org.lastaflute.core.exception.ExceptionTranslator;
import org.lastaflute.core.exception.LaApplicationException;
import org.lastaflute.core.time.TimeManager;
import org.lastaflute.db.dbflute.accesscontext.AccessContextArranger;
import org.lastaflute.web.api.ApiManager;
import org.lastaflute.web.callback.ActionHook;
import org.lastaflute.web.callback.ActionRuntime;
import org.lastaflute.web.callback.TypicalEmbeddedKeySupplier;
import org.lastaflute.web.callback.TypicalGodHandActionEpilogue;
import org.lastaflute.web.callback.TypicalGodHandMonologue;
import org.lastaflute.web.callback.TypicalGodHandPrologue;
import org.lastaflute.web.callback.TypicalGodHandResource;
import org.lastaflute.web.callback.TypicalKey.TypicalSimpleEmbeddedKeySupplier;
import org.lastaflute.web.exception.ActionApplicationExceptionHandler;
import org.lastaflute.web.exception.ForcedIllegalTransitionApplicationException;
import org.lastaflute.web.exception.ForcedRequest404NotFoundException;
import org.lastaflute.web.login.LoginManager;
import org.lastaflute.web.login.UserBean;
import org.lastaflute.web.response.ActionResponse;
import org.lastaflute.web.response.HtmlResponse;
import org.lastaflute.web.servlet.request.RequestManager;
import org.lastaflute.web.servlet.request.ResponseManager;
import org.lastaflute.web.servlet.session.SessionManager;
import org.lastaflute.web.util.LaActionRuntimeUtil;
import org.lastaflute.web.util.LaDBFluteUtil;
import org.lastaflute.web.util.LaDBFluteUtil.ClassificationConvertFailureException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The typical action for your project. <br>
* You should extend this class when making your project-base action. <br>
* And you can add methods for all applications.
* @author jflute
*/
public abstract class TypicalAction extends LastaAction implements ActionHook {
// ===================================================================================
// Definition
// ==========
private static final Logger logger = LoggerFactory.getLogger(TypicalAction.class);
// ===================================================================================
// Attribute
// =========
/** The manager of time. (NotNull) */
@Resource
private TimeManager timeManager;
/** The translator of exception. (NotNull) */
@Resource
private ExceptionTranslator exceptionTranslator;
/** The manager of request. (NotNull) */
@Resource
private RequestManager requestManager;
/** The manager of response. (NotNull) */
@Resource
private ResponseManager responseManager;
/** The manager of session. (NotNull) */
@Resource
private SessionManager sessionManager;
/** The manager of API. (NotNull) */
@Resource
private ApiManager apiManager;
// ===================================================================================
// Hook
// ======
// [typical callback process]
// read the source code for the details
// (because of no comment here)
// -----------------------------------------------------
// Before
// ------
@Override
public ActionResponse godHandPrologue(ActionRuntime runtimeMeta) { // fixed process
return createTypicalGodHandPrologue().performPrologue(runtimeMeta);
}
protected TypicalGodHandPrologue createTypicalGodHandPrologue() {
final TypicalGodHandResource resource = createTypicalGodHandResource();
final AccessContextArranger arranger = newAccessContextArranger();
return newTypicalGodHandPrologue(resource, arranger, () -> getUserBean(), () -> myAppType());
}
/**
* New the arranger of access context.
* @return The instance of arranger. (NotNull)
*/
protected abstract AccessContextArranger newAccessContextArranger();
protected TypicalGodHandPrologue newTypicalGodHandPrologue(TypicalGodHandResource resource, AccessContextArranger arranger,
Supplier<OptionalThing<? extends UserBean>> userBeanSupplier, Supplier<String> appTypeSupplier) {
return new TypicalGodHandPrologue(resource, arranger, userBeanSupplier, appTypeSupplier);
}
@Override
public ActionResponse hookBefore(ActionRuntime runtimeMeta) { // application may override
return ActionResponse.empty();
}
// -----------------------------------------------------
// on Failure
// ----------
@Override
public ActionResponse godHandMonologue(ActionRuntime runtimeMeta) { // fixed process
return createTypicalGodHandMonologue().performMonologue(runtimeMeta);
}
protected TypicalGodHandMonologue createTypicalGodHandMonologue() {
final TypicalGodHandResource resource = createTypicalGodHandResource();
final TypicalEmbeddedKeySupplier supplier = newTypicalEmbeddedKeySupplier();
final ActionApplicationExceptionHandler handler = newActionApplicationExceptionHandler();
return newTypicalGodHandMonologue(resource, supplier, handler);
}
protected TypicalEmbeddedKeySupplier newTypicalEmbeddedKeySupplier() {
return new TypicalSimpleEmbeddedKeySupplier();
}
protected ActionApplicationExceptionHandler newActionApplicationExceptionHandler() {
return new ActionApplicationExceptionHandler() {
public ActionResponse handle(LaApplicationException appEx) {
return handleApplicationException(appEx);
}
};
}
/**
* Handle the application exception before framework's handling process.
* @param appEx The thrown application exception. (NotNull)
* @return The response for the exception. (NullAllowed: if null, to next handling step)
*/
protected ActionResponse handleApplicationException(LaApplicationException appEx) { // application may override
return ActionResponse.empty();
}
protected TypicalGodHandMonologue newTypicalGodHandMonologue(TypicalGodHandResource resource, TypicalEmbeddedKeySupplier supplier,
ActionApplicationExceptionHandler handler) {
return new TypicalGodHandMonologue(resource, supplier, handler);
}
// -----------------------------------------------------
// Finally
// -------
@Override
public void hookFinally(ActionRuntime runtimeMeta) { // application may override
}
@Override
public void godHandEpilogue(ActionRuntime runtimeMeta) { // fixed process
createTypicalGodHandEpilogue().performEpilogue(runtimeMeta);
}
protected TypicalGodHandActionEpilogue createTypicalGodHandEpilogue() {
return newTypicalGodHandEpilogue(createTypicalGodHandResource());
}
protected TypicalGodHandActionEpilogue newTypicalGodHandEpilogue(TypicalGodHandResource resource) {
return new TypicalGodHandActionEpilogue(resource);
}
// -----------------------------------------------------
// Resource Factory
// ----------------
protected TypicalGodHandResource createTypicalGodHandResource() {
final OptionalThing<LoginManager> loginManager = myLoginManager();
return new TypicalGodHandResource(requestManager, responseManager, sessionManager, loginManager, apiManager, exceptionTranslator);
}
// ===================================================================================
// User Info
// =========
/**
* Get the bean of login user on session as interface type. (for application)
* @return The optional thing of found user bean. (NotNull, EmptyAllowed: when not login)
*/
protected abstract OptionalThing<? extends UserBean> getUserBean();
/**
* Get the application type, e.g. for common column.
* @return The application type basically fixed string. (NotNull)
*/
protected abstract String myAppType();
/**
* Get the user type of this applicatoin's login.
* @return The optional expression of user type. (NotNull, EmptyAllowed: if no login handling)
*/
protected abstract OptionalThing<String> myUserType();
/**
* Get my (application's) login manager. (for framework)
* @return The optional instance of login manager. (NotNull, EmptyAllowed: if no login handling)
*/
protected abstract OptionalThing<LoginManager> myLoginManager();
// ===================================================================================
// Verify
// ======
// -----------------------------------------------------
// Verify Parameter
// ----------------
protected void verifyParameterExists(Object parameter) { // application may call
if (parameter == null || (parameter instanceof String && ((String) parameter).isEmpty())) {
handleParameterFailure("Not found the parameter: " + parameter);
}
}
protected void verifyParameterTrue(String msg, boolean expectedBool) { // application may call
if (!expectedBool) {
handleParameterFailure(msg);
}
}
protected void handleParameterFailure(String msg) {
// no server error because it can occur by user's trick easily e.g. changing GET parameter
lets404(msg);
}
// -----------------------------------------------------
// Verify or ...
// -------------
/**
* Check the condition is true or it throws 404 not found forcedly. <br>
* You can use this in your action process against invalid URL parameters.
* @param msg The message for exception message. (NotNull)
* @param expectedBool The expected determination for your business, true or false. (false: 404 not found)
*/
protected void verifyTrueOr404NotFound(String msg, boolean expectedBool) { // application may call
if (!expectedBool) {
lets404(msg);
}
}
/**
* Check the condition is true or it throws illegal transition forcedly. <br>
* You can use this in your action process against strange request parameters.
* @param msg The message for exception message. (NotNull)
* @param expectedBool The expected determination for your business, true or false. (false: illegal transition)
*/
protected void verifyTrueOrIllegalTransition(String msg, boolean expectedBool) { // application may call
if (!expectedBool) {
letsIllegalTransition(msg);
}
}
protected HtmlResponse lets404(String msg) { // e.g. used by error handling of validation for GET parameter
throw new ForcedRequest404NotFoundException(msg);
}
protected void letsIllegalTransition(String msg) {
final String transitionKey = newTypicalEmbeddedKeySupplier().getErrorsAppIllegalTransitionKey();
throw new ForcedIllegalTransitionApplicationException(msg, transitionKey);
}
// ===================================================================================
// Small Helper
// ============
protected boolean isEmpty(String str) {
return Srl.is_Null_or_Empty(str);
}
protected boolean isNotEmpty(String str) {
return Srl.is_NotNull_and_NotEmpty(str);
}
// -----------------------------------------------------
// Classification
// --------------
protected boolean isCls(Class<? extends Classification> cdefType, Object code) {
assertArgumentNotNull("cdefType", cdefType);
return LaDBFluteUtil.invokeClassificationCodeOf(cdefType, code) != null;
}
protected <CLS extends Classification> OptionalThing<CLS> toCls(Class<CLS> cdefType, Object code) {
assertArgumentNotNull("cdefType", cdefType);
if (code == null || (code instanceof String && isEmpty((String) code))) {
return OptionalThing.ofNullable(null, () -> {
throw new IllegalStateException("Not found the classification code for " + cdefType.getName() + ": " + code);
});
}
try {
@SuppressWarnings("unchecked")
final CLS cls = (CLS) LaDBFluteUtil.toVerifiedClassification(cdefType, code);
return OptionalThing.of(cls);
} catch (ClassificationConvertFailureException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot convert the code to the classification:");
sb.append("\n[Classification Convert Failure]");
try {
sb.append("\n").append(LaActionRuntimeUtil.getActionRuntime());
} catch (RuntimeException continued) { // just in case
logger.info("Not found the action runtime when toCls() called: " + cdefType.getName() + ", " + code, continued);
}
sb.append("\ncode=").append(code);
//sb.append("\n").append(e.getClass().getName()).append("\n").append(e.getMessage());
final String msg = sb.toString();
throw new ForcedRequest404NotFoundException(msg, e);
}
}
// ===================================================================================
// Document
// ========
// TODO jflute lastaflute: [C] function: make document()
/**
* <pre>
* [AtMark]Execute
* public HtmlResponse index() {
* ListResultBean<Product> memberList = productBhv.selectList(cb -> {
* cb.query().addOrderBy_RegularPrice_Desc();
* cb.fetchFirst(3);
* });
* List<MypageProductBean> beans = memberList.stream().map(member -> {
* return new MypageProductBean(member);
* }).collect(Collectors.toList());
* return asHtml(path_Mypage_MypageJsp).renderWith(data -> {
* data.register("beans", beans);
* });
* }
* </pre>
*/
protected void documentOfAll() {
}
/**
* <pre>
* o validate(form, error call): Hibernate Validator's Annotation only
* o validateMore(form, your validation call, error call): annotation + by-method validation
*
* o asHtml(HTML template): return response as HTML by template e.g. JSP
* o asJson(JSON bean): return response as JSON from bean
* o asStream(input stream): return response as stream from input stream
* </pre>
*/
protected void documentOfMethods() {
}
/**
* <pre>
* o Cls : is Classification (CDef)
* o CDef : is auto-generated ENUM as Classification Definition
* </pre>
*/
protected void documentOfWordDictionary() {
}
}
|
src/main/java/org/lastaflute/web/TypicalAction.java
|
/*
* Copyright 2014-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.lastaflute.web;
import java.util.function.Supplier;
import javax.annotation.Resource;
import org.dbflute.jdbc.Classification;
import org.dbflute.optional.OptionalThing;
import org.dbflute.util.Srl;
import org.lastaflute.core.exception.ExceptionTranslator;
import org.lastaflute.core.exception.LaApplicationException;
import org.lastaflute.core.time.TimeManager;
import org.lastaflute.db.dbflute.accesscontext.AccessContextArranger;
import org.lastaflute.web.api.ApiManager;
import org.lastaflute.web.callback.ActionHook;
import org.lastaflute.web.callback.ActionRuntime;
import org.lastaflute.web.callback.TypicalEmbeddedKeySupplier;
import org.lastaflute.web.callback.TypicalGodHandActionEpilogue;
import org.lastaflute.web.callback.TypicalGodHandMonologue;
import org.lastaflute.web.callback.TypicalGodHandPrologue;
import org.lastaflute.web.callback.TypicalGodHandResource;
import org.lastaflute.web.callback.TypicalKey.TypicalSimpleEmbeddedKeySupplier;
import org.lastaflute.web.exception.ActionApplicationExceptionHandler;
import org.lastaflute.web.exception.ForcedIllegalTransitionApplicationException;
import org.lastaflute.web.exception.ForcedRequest404NotFoundException;
import org.lastaflute.web.login.LoginManager;
import org.lastaflute.web.login.UserBean;
import org.lastaflute.web.response.ActionResponse;
import org.lastaflute.web.response.HtmlResponse;
import org.lastaflute.web.servlet.request.RequestManager;
import org.lastaflute.web.servlet.request.ResponseManager;
import org.lastaflute.web.servlet.session.SessionManager;
import org.lastaflute.web.util.LaActionRuntimeUtil;
import org.lastaflute.web.util.LaDBFluteUtil;
import org.lastaflute.web.util.LaDBFluteUtil.ClassificationConvertFailureException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The typical action for your project. <br>
* You should extend this class when making your project-base action. <br>
* And you can add methods for all applications.
* @author jflute
*/
public abstract class TypicalAction extends LastaAction implements ActionHook {
// ===================================================================================
// Definition
// ==========
private static final Logger logger = LoggerFactory.getLogger(TypicalAction.class);
// ===================================================================================
// Attribute
// =========
/** The manager of time. (NotNull) */
@Resource
private TimeManager timeManager;
/** The translator of exception. (NotNull) */
@Resource
private ExceptionTranslator exceptionTranslator;
/** The manager of request. (NotNull) */
@Resource
private RequestManager requestManager;
/** The manager of response. (NotNull) */
@Resource
private ResponseManager responseManager;
/** The manager of session. (NotNull) */
@Resource
private SessionManager sessionManager;
/** The manager of API. (NotNull) */
@Resource
private ApiManager apiManager;
// ===================================================================================
// Hook
// ======
// [typical callback process]
// read the source code for the details
// (because of no comment here)
// -----------------------------------------------------
// Before
// ------
@Override
public ActionResponse godHandPrologue(ActionRuntime runtimeMeta) { // fixed process
return createTypicalGodHandPrologue().performPrologue(runtimeMeta);
}
protected TypicalGodHandPrologue createTypicalGodHandPrologue() {
final TypicalGodHandResource resource = createTypicalGodHandResource();
final AccessContextArranger arranger = newAccessContextArranger();
return newTypicalGodHandPrologue(resource, arranger, () -> getUserBean(), () -> myAppType());
}
/**
* New the arranger of access context.
* @return The instance of arranger. (NotNull)
*/
protected abstract AccessContextArranger newAccessContextArranger();
protected TypicalGodHandPrologue newTypicalGodHandPrologue(TypicalGodHandResource resource, AccessContextArranger arranger,
Supplier<OptionalThing<? extends UserBean>> userBeanSupplier, Supplier<String> appTypeSupplier) {
return new TypicalGodHandPrologue(resource, arranger, userBeanSupplier, appTypeSupplier);
}
@Override
public ActionResponse hookBefore(ActionRuntime runtimeMeta) { // application may override
return ActionResponse.empty();
}
// -----------------------------------------------------
// on Failure
// ----------
@Override
public ActionResponse godHandMonologue(ActionRuntime runtimeMeta) { // fixed process
return createTypicalGodHandMonologue().performMonologue(runtimeMeta);
}
protected TypicalGodHandMonologue createTypicalGodHandMonologue() {
final TypicalGodHandResource resource = createTypicalGodHandResource();
final TypicalEmbeddedKeySupplier supplier = newTypicalEmbeddedKeySupplier();
final ActionApplicationExceptionHandler handler = newActionApplicationExceptionHandler();
return newTypicalGodHandMonologue(resource, supplier, handler);
}
protected TypicalEmbeddedKeySupplier newTypicalEmbeddedKeySupplier() {
return new TypicalSimpleEmbeddedKeySupplier();
}
protected ActionApplicationExceptionHandler newActionApplicationExceptionHandler() {
return new ActionApplicationExceptionHandler() {
public ActionResponse handle(LaApplicationException appEx) {
return handleApplicationException(appEx);
}
};
}
/**
* Handle the application exception before framework's handling process.
* @param appEx The thrown application exception. (NotNull)
* @return The response for the exception. (NullAllowed: if null, to next handling step)
*/
protected ActionResponse handleApplicationException(LaApplicationException appEx) { // application may override
return ActionResponse.empty();
}
protected TypicalGodHandMonologue newTypicalGodHandMonologue(TypicalGodHandResource resource, TypicalEmbeddedKeySupplier supplier,
ActionApplicationExceptionHandler handler) {
return new TypicalGodHandMonologue(resource, supplier, handler);
}
// -----------------------------------------------------
// Finally
// -------
@Override
public void hookFinally(ActionRuntime runtimeMeta) { // application may override
}
@Override
public void godHandEpilogue(ActionRuntime runtimeMeta) { // fixed process
createTypicalGodHandEpilogue().performEpilogue(runtimeMeta);
}
protected TypicalGodHandActionEpilogue createTypicalGodHandEpilogue() {
return newTypicalGodHandEpilogue(createTypicalGodHandResource());
}
protected TypicalGodHandActionEpilogue newTypicalGodHandEpilogue(TypicalGodHandResource resource) {
return new TypicalGodHandActionEpilogue(resource);
}
// -----------------------------------------------------
// Resource Factory
// ----------------
protected TypicalGodHandResource createTypicalGodHandResource() {
final OptionalThing<LoginManager> loginManager = myLoginManager();
return new TypicalGodHandResource(requestManager, responseManager, sessionManager, loginManager, apiManager, exceptionTranslator);
}
// ===================================================================================
// User Info
// =========
/**
* Get the bean of login user on session as interface type. (for application)
* @return The optional thing of found user bean. (NotNull, EmptyAllowed: when not login)
*/
protected abstract OptionalThing<? extends UserBean> getUserBean();
/**
* Get the application type, e.g. for common column.
* @return The application type basically fixed string. (NotNull)
*/
protected abstract String myAppType();
/**
* Get the user type of this applicatoin's login.
* @return The optional expression of user type. (NotNull, EmptyAllowed: if no login handling)
*/
protected abstract OptionalThing<String> myUserType();
/**
* Get my (application's) login manager. (for framework)
* @return The optional instance of login manager. (NotNull, EmptyAllowed: if no login handling)
*/
protected abstract OptionalThing<LoginManager> myLoginManager();
// ===================================================================================
// Verify
// ======
// -----------------------------------------------------
// Verify Parameter
// ----------------
protected void verifyParameterExists(Object parameter) { // application may call
logger.debug("...Verifying the parameter exists: {}", parameter);
if (parameter == null || (parameter instanceof String && ((String) parameter).isEmpty())) {
handleParameterFailure("Not found the parameter: parameter=" + parameter);
}
}
protected void verifyParameterTrue(String msg, boolean expectedBool) { // application may call
logger.debug("...Verifying the parameter is true: {}", expectedBool);
if (!expectedBool) {
handleParameterFailure(msg);
}
}
protected void handleParameterFailure(String msg) {
// no server error because it can occur by user's trick easily e.g. changing GET parameter
lets404(msg);
}
// -----------------------------------------------------
// Verify or ...
// -------------
/**
* Check the condition is true or it throws 404 not found forcedly. <br>
* You can use this in your action process against invalid URL parameters.
* @param msg The message for exception message. (NotNull)
* @param expectedBool The expected determination for your business, true or false. (false: 404 not found)
*/
protected void verifyTrueOr404NotFound(String msg, boolean expectedBool) { // application may call
logger.debug("...Verifying the condition is true or 404 not found: {}", expectedBool);
if (!expectedBool) {
lets404(msg);
}
}
/**
* Check the condition is true or it throws illegal transition forcedly. <br>
* You can use this in your action process against strange request parameters.
* @param msg The message for exception message. (NotNull)
* @param expectedBool The expected determination for your business, true or false. (false: illegal transition)
*/
protected void verifyTrueOrIllegalTransition(String msg, boolean expectedBool) { // application may call
logger.debug("...Verifying the condition is true or illegal transition: {}", expectedBool);
if (!expectedBool) {
letsIllegalTransition(msg);
}
}
protected HtmlResponse lets404(String msg) { // e.g. used by error handling of validation for GET parameter
throw new ForcedRequest404NotFoundException(msg);
}
protected void letsIllegalTransition(String msg) {
final String transitionKey = newTypicalEmbeddedKeySupplier().getErrorsAppIllegalTransitionKey();
throw new ForcedIllegalTransitionApplicationException(msg, transitionKey);
}
// ===================================================================================
// Small Helper
// ============
protected boolean isEmpty(String str) {
return Srl.is_Null_or_Empty(str);
}
protected boolean isNotEmpty(String str) {
return Srl.is_NotNull_and_NotEmpty(str);
}
// -----------------------------------------------------
// Classification
// --------------
protected boolean isCls(Class<? extends Classification> cdefType, Object code) {
assertArgumentNotNull("cdefType", cdefType);
return LaDBFluteUtil.invokeClassificationCodeOf(cdefType, code) != null;
}
protected <CLS extends Classification> OptionalThing<CLS> toCls(Class<CLS> cdefType, Object code) {
assertArgumentNotNull("cdefType", cdefType);
if (code == null || (code instanceof String && isEmpty((String) code))) {
return OptionalThing.ofNullable(null, () -> {
throw new IllegalStateException("Not found the classification code for " + cdefType.getName() + ": " + code);
});
}
try {
@SuppressWarnings("unchecked")
final CLS cls = (CLS) LaDBFluteUtil.toVerifiedClassification(cdefType, code);
return OptionalThing.of(cls);
} catch (ClassificationConvertFailureException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot convert the code to the classification:");
sb.append("\n[Classification Convert Failure]");
try {
sb.append("\n").append(LaActionRuntimeUtil.getActionRuntime());
} catch (RuntimeException continued) { // just in case
logger.info("Not found the action runtime when toCls() called: " + cdefType.getName() + ", " + code, continued);
}
sb.append("\ncode=").append(code);
//sb.append("\n").append(e.getClass().getName()).append("\n").append(e.getMessage());
final String msg = sb.toString();
throw new ForcedRequest404NotFoundException(msg, e);
}
}
// ===================================================================================
// Document
// ========
// TODO jflute lastaflute: [C] function: make document()
/**
* <pre>
* [AtMark]Execute
* public HtmlResponse index() {
* ListResultBean<Product> memberList = productBhv.selectList(cb -> {
* cb.query().addOrderBy_RegularPrice_Desc();
* cb.fetchFirst(3);
* });
* List<MypageProductBean> beans = memberList.stream().map(member -> {
* return new MypageProductBean(member);
* }).collect(Collectors.toList());
* return asHtml(path_Mypage_MypageJsp).renderWith(data -> {
* data.register("beans", beans);
* });
* }
* </pre>
*/
protected void documentOfAll() {
}
/**
* <pre>
* o validate(form, error call): Hibernate Validator's Annotation only
* o validateMore(form, your validation call, error call): annotation + by-method validation
*
* o asHtml(HTML template): return response as HTML by template e.g. JSP
* o asJson(JSON bean): return response as JSON from bean
* o asStream(input stream): return response as stream from input stream
* </pre>
*/
protected void documentOfMethods() {
}
/**
* <pre>
* o Cls : is Classification (CDef)
* o CDef : is auto-generated ENUM as Classification Definition
* </pre>
*/
protected void documentOfWordDictionary() {
}
}
|
remove verify logging because of logging filter instead
|
src/main/java/org/lastaflute/web/TypicalAction.java
|
remove verify logging because of logging filter instead
|
|
Java
|
apache-2.0
|
e75ddd1ec1ada8d053177744d4777cffc8af03a8
| 0
|
hazuki0x0/YuzuBrowser,hazuki0x0/YuzuBrowser,hazuki0x0/YuzuBrowser,hazuki0x0/YuzuBrowser
|
/*
* Copyright (C) 2017 Hazuki
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.hazuki.yuzubrowser.utils;
import android.net.Uri;
import android.text.TextUtils;
import java.net.IDN;
public class UrlUtils {
public static String decodeUrl(String url) {
if (url == null) return null;
return decodeUrl(Uri.parse(url));
}
public static String decodeUrl(Uri uri) {
if (uri.isOpaque()) {
StringBuilder builder = new StringBuilder(uri.getScheme()).append(":");
if (isValid(uri.getSchemeSpecificPart())) {
builder.append(uri.getSchemeSpecificPart());
} else {
builder.append(uri.getEncodedSchemeSpecificPart());
}
String fragment = uri.getFragment();
if (!TextUtils.isEmpty(fragment)) {
builder.append("#");
if (isValid(fragment)) {
builder.append(fragment);
} else {
builder.append(uri.getEncodedFragment());
}
}
return builder.toString();
} else {
Uri.Builder decode = uri.buildUpon();
if (isValid(uri.getQuery()))
decode.encodedQuery(uri.getQuery());
if (isValid(uri.getFragment()))
decode.encodedFragment(uri.getFragment());
if (isValid(uri.getPath()))
decode.encodedPath(uri.getPath());
decode.encodedAuthority(decodeAuthority(uri));
return decode.build().toString();
}
}
public static String decodeUrlHost(String url) {
String host = Uri.parse(url).getHost();
if (host != null)
return decodePunyCode(host);
return null;
}
private static String decodePunyCode(String domain) {
return IDN.toUnicode(domain);
}
private static String decodeAuthority(Uri uri) {
String host = uri.getHost();
if (TextUtils.isEmpty(host)) {
return uri.getEncodedAuthority();
} else {
host = decodePunyCode(host);
}
String userInfo = uri.getUserInfo();
boolean noUserInfo = TextUtils.isEmpty(userInfo);
int port = uri.getPort();
if (noUserInfo && port == -1)
return host;
StringBuilder builder = new StringBuilder();
if (!noUserInfo) {
if (isValid(userInfo)) {
builder.append(userInfo).append("@");
} else {
builder.append(uri.getEncodedUserInfo()).append("@");
}
}
builder.append(host);
if (port > -1) {
builder.append(":").append(port);
}
return builder.toString();
}
private static final char INVALID_CHAR = '\uFFFD';
private static boolean isValid(String str) {
return str != null && !(str.indexOf(INVALID_CHAR) > -1);
}
}
|
app/src/main/java/jp/hazuki/yuzubrowser/utils/UrlUtils.java
|
/*
* Copyright (C) 2017 Hazuki
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.hazuki.yuzubrowser.utils;
import android.net.Uri;
import android.text.TextUtils;
import java.net.IDN;
public class UrlUtils {
public static String decodeUrl(String url) {
if (url == null) return null;
return decodeUrl(Uri.parse(url));
}
public static String decodeUrl(Uri uri) {
Uri.Builder decode = uri.buildUpon();
if (isValid(uri.getQuery()))
decode.encodedQuery(uri.getQuery());
if (isValid(uri.getFragment()))
decode.encodedFragment(uri.getFragment());
if (isValid(uri.getPath()))
decode.encodedPath(uri.getPath());
decode.encodedAuthority(decodeAuthority(uri));
return decode.build().toString();
}
public static String decodeUrlHost(String url) {
String host = Uri.parse(url).getHost();
if (host != null)
return decodePunyCode(host);
return null;
}
private static String decodePunyCode(String domain) {
return IDN.toUnicode(domain);
}
private static String decodeAuthority(Uri uri) {
String host = uri.getHost();
if (TextUtils.isEmpty(host)) {
return uri.getEncodedAuthority();
} else {
host = decodePunyCode(host);
}
String userInfo = uri.getUserInfo();
boolean noUserInfo = TextUtils.isEmpty(userInfo);
int port = uri.getPort();
if (noUserInfo && port == -1)
return host;
StringBuilder builder = new StringBuilder();
if (!noUserInfo) {
if (isValid(userInfo)) {
builder.append(userInfo).append("@");
} else {
builder.append(uri.getEncodedUserInfo()).append("@");
}
}
builder.append(host);
if (port > -1) {
builder.append(":").append(port);
}
return builder.toString();
}
private static final char INVALID_CHAR = '\uFFFD';
private static boolean isValid(String str) {
return str != null && !(str.indexOf(INVALID_CHAR) > -1);
}
}
|
corresponds to opaque url
|
app/src/main/java/jp/hazuki/yuzubrowser/utils/UrlUtils.java
|
corresponds to opaque url
|
|
Java
|
apache-2.0
|
3b09d32be343f7378ddbfdd9c5ff5182ba29dae6
| 0
|
UnquietCode/Flapi,UnquietCode/Flapi
|
/*********************************************************************
Copyright 2014 the Flapi authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
package unquietcode.tools.flapi.outline;
import unquietcode.tools.flapi.DescriptorBuilderException;
import unquietcode.tools.flapi.MethodParser;
import unquietcode.tools.flapi.Pair;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author Ben Fagin
*/
public class MethodInfo implements Comparable<MethodInfo> {
private Integer minOccurrences;
private Integer maxOccurrences;
private String methodSignature;
private String documentation;
private boolean isDeprecated = false;
private String deprecationReason;
private boolean didTrigger = false;
private boolean isImplicit = false;
private Map<Object, Map<String, Object>> annotations = new LinkedHashMap<Object, Map<String, Object>>();
public Integer getMinOccurrences() {
return minOccurrences;
}
public void setMinOccurrences(Integer minOccurrences) {
this.minOccurrences = minOccurrences;
}
public Integer getMaxOccurrences() {
return maxOccurrences;
}
public void setMaxOccurrences(Integer maxOccurrences) {
this.maxOccurrences = maxOccurrences;
}
public boolean isDeprecated() {
return isDeprecated;
}
public void setDeprecated(String reason) {
isDeprecated = true;
deprecationReason = reason;
}
public String getDeprecationReason() {
return deprecationReason;
}
public String getDocumentation() {
return documentation;
}
public void setDocumentation(String documentation) {
this.documentation = documentation;
}
public String getMethodSignature() {
return methodSignature;
}
public void setMethodSignature(String methodSignature) {
this.methodSignature = methodSignature.trim();
}
public boolean didTrigger() {
return didTrigger;
}
public void setTriggered() {
didTrigger = true;
}
public boolean isImplicit() {
return isImplicit;
}
public void isImplicit(boolean isImplicit) {
this.isImplicit = isImplicit;
}
public void addAnnotation(Object annotation, Map<String, Object> params) {
if (annotations.containsKey(annotation)) {
throw new DescriptorBuilderException("duplicate annotation: "+annotation);
}
annotations.put(annotation, params);
}
public Map<Object, Map<String, Object>> getAnnotations() {
return annotations;
}
public MethodInfo copy() {
MethodInfo clone = new MethodInfo();
copy(clone);
return clone;
}
protected void copy(MethodInfo other) {
other.minOccurrences = minOccurrences;
other.maxOccurrences = maxOccurrences;
other.methodSignature = methodSignature;
other.documentation = documentation;
other.isDeprecated = isDeprecated;
other.deprecationReason = deprecationReason;
other.didTrigger = didTrigger;
other.isImplicit = isImplicit;
other.annotations.putAll(annotations);
}
@Override
public String toString() {
return methodSignature + "-" + maxOccurrences;
}
/*
Used by sorted collections to provide consistent ordering.
*/
public @Override int compareTo(MethodInfo other) {
return keyString().compareTo(other.keyString());
}
public String keyString() {
StringBuilder sb = new StringBuilder();
final MethodParser parser;
try {
parser = new MethodParser(methodSignature);
} catch (MethodParser.ParseException e) {
throw new DescriptorBuilderException(e);
}
sb.append(parser.methodName).append("_1");
boolean first = true;
for (Pair<MethodParser.JavaType, String> param : parser.params) {
if (!first) { sb.append("$"); }
else { first = false; }
sb.append(param.first.typeName).append("_").append(param.second);
}
if (didTrigger) {
sb.append("_2t");
}
if (isImplicit) {
sb.append("_3t");
}
return sb.toString();
}
}
|
src/main/java/unquietcode/tools/flapi/outline/MethodInfo.java
|
/*********************************************************************
Copyright 2014 the Flapi authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
package unquietcode.tools.flapi.outline;
import unquietcode.tools.flapi.DescriptorBuilderException;
import unquietcode.tools.flapi.MethodParser;
import unquietcode.tools.flapi.Pair;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author Ben Fagin
*/
public class MethodInfo implements Comparable<MethodInfo> {
private Integer minOccurrences;
private Integer maxOccurrences;
private String methodSignature;
private String documentation;
private boolean isDeprecated = false;
private String deprecationReason;
private boolean didTrigger = false;
private boolean isImplicit = false;
private Map<Object, Map<String, Object>> annotations = new LinkedHashMap<Object, Map<String, Object>>();
public Integer getMinOccurrences() {
return minOccurrences;
}
public void setMinOccurrences(Integer minOccurrences) {
this.minOccurrences = minOccurrences;
}
public Integer getMaxOccurrences() {
return maxOccurrences;
}
public void setMaxOccurrences(Integer maxOccurrences) {
this.maxOccurrences = maxOccurrences;
}
public boolean isDeprecated() {
return isDeprecated;
}
public void setDeprecated(String reason) {
isDeprecated = true;
deprecationReason = reason;
}
public String getDeprecationReason() {
return deprecationReason;
}
public String getDocumentation() {
return documentation;
}
public void setDocumentation(String documentation) {
this.documentation = documentation;
}
public String getMethodSignature() {
return methodSignature;
}
public void setMethodSignature(String methodSignature) {
this.methodSignature = methodSignature.trim();
}
public boolean didTrigger() {
return didTrigger;
}
public void setTriggered() {
didTrigger = true;
}
public boolean isImplicit() {
return isImplicit;
}
public void isImplicit(boolean isImplicit) {
this.isImplicit = isImplicit;
}
public void addAnnotation(Object annotation, Map<String, Object> params) {
if (annotations.containsKey(annotation)) {
throw new DescriptorBuilderException("duplicate annotation: "+annotation);
}
annotations.put(annotation, params);
}
public Map<Object, Map<String, Object>> getAnnotations() {
return annotations;
}
public MethodInfo copy() {
MethodInfo clone = new MethodInfo();
copy(clone);
return clone;
}
protected void copy(MethodInfo other) {
other.minOccurrences = minOccurrences;
other.maxOccurrences = maxOccurrences;
other.methodSignature = methodSignature;
other.documentation = documentation;
other.isDeprecated = isDeprecated;
other.deprecationReason = deprecationReason;
other.didTrigger = didTrigger;
other.isImplicit = isImplicit;
other.annotations.putAll(annotations);
}
@Override
public String toString() {
return methodSignature + "-" + maxOccurrences;
}
/*
Used by sorted collections to provide consistent ordering.
*/
public @Override int compareTo(MethodInfo other) {
return keyString().compareTo(other.keyString());
}
public String keyString() {
StringBuilder sb = new StringBuilder();
MethodParser parser = new MethodParser(methodSignature);
sb.append(parser.methodName).append("_1");
boolean first = true;
for (Pair<MethodParser.JavaType, String> param : parser.params) {
if (!first) { sb.append("$"); }
else { first = false; }
sb.append(param.first.typeName).append("_").append(param.second);
}
if (didTrigger) {
sb.append("_2t");
}
if (isImplicit) {
sb.append("_3t");
}
return sb.toString();
}
}
|
defensive capture of parse exceptions
|
src/main/java/unquietcode/tools/flapi/outline/MethodInfo.java
|
defensive capture of parse exceptions
|
|
Java
|
apache-2.0
|
55ef0b9902a64b6f0d8c2aeb137a2c7a631ecb17
| 0
|
jahlborn/jackcess
|
/*
Copyright (c) 2005 Health Market Science, Inc.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
USA
You can contact Health Market Science at info@healthmarketscience.com
or at the following address:
Health Market Science
2700 Horizon Drive
Suite 200
King of Prussia, PA 19406
*/
package com.healthmarketscience.jackcess;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Access table (logical) index. Logical indexes are backed for IndexData,
* where one or more logical indexes could be backed by the same data.
*
* @author Tim McCune
*/
public class Index implements Comparable<Index> {
protected static final Log LOG = LogFactory.getLog(Index.class);
/** index type for primary key indexes */
static final byte PRIMARY_KEY_INDEX_TYPE = (byte)1;
/** index type for foreign key indexes */
static final byte FOREIGN_KEY_INDEX_TYPE = (byte)2;
/** flag for indicating that updates should cascade in a foreign key index */
private static final byte CASCADE_UPDATES_FLAG = (byte)1;
/** flag for indicating that deletes should cascade in a foreign key index */
private static final byte CASCADE_DELETES_FLAG = (byte)1;
/** index table type for the "primary" table in a foreign key index */
private static final byte PRIMARY_TABLE_TYPE = (byte)1;
/** indicate an invalid index number for foreign key field */
private static final int INVALID_INDEX_NUMBER = -1;
/** the actual data backing this index (more than one index may be backed by
the same data */
private final IndexData _data;
/** 0-based index number */
private final int _indexNumber;
/** the type of the index */
private final byte _indexType;
/** Index name */
private String _name;
/** foreign key reference info, if any */
private final ForeignKeyReference _reference;
protected Index(ByteBuffer tableBuffer, List<IndexData> indexDatas,
JetFormat format)
throws IOException
{
ByteUtil.forward(tableBuffer, format.SKIP_BEFORE_INDEX_SLOT); //Forward past Unknown
_indexNumber = tableBuffer.getInt();
int indexDataNumber = tableBuffer.getInt();
// read foreign key reference info
byte relIndexType = tableBuffer.get();
int relIndexNumber = tableBuffer.getInt();
int relTablePageNumber = tableBuffer.getInt();
byte cascadeUpdatesFlag = tableBuffer.get();
byte cascadeDeletesFlag = tableBuffer.get();
_indexType = tableBuffer.get();
if((_indexType == FOREIGN_KEY_INDEX_TYPE) &&
(relIndexNumber != INVALID_INDEX_NUMBER)) {
_reference = new ForeignKeyReference(
relIndexType, relIndexNumber, relTablePageNumber,
(cascadeUpdatesFlag == CASCADE_UPDATES_FLAG),
(cascadeDeletesFlag == CASCADE_DELETES_FLAG));
} else {
_reference = null;
}
ByteUtil.forward(tableBuffer, format.SKIP_AFTER_INDEX_SLOT); //Skip past Unknown
_data = indexDatas.get(indexDataNumber);
_data.addIndex(this);
}
public IndexData getIndexData() {
return _data;
}
public Table getTable() {
return getIndexData().getTable();
}
public JetFormat getFormat() {
return getTable().getFormat();
}
public PageChannel getPageChannel() {
return getTable().getPageChannel();
}
public int getIndexNumber() {
return _indexNumber;
}
public byte getIndexFlags() {
return getIndexData().getIndexFlags();
}
public int getUniqueEntryCount() {
return getIndexData().getUniqueEntryCount();
}
public int getUniqueEntryCountOffset() {
return getIndexData().getUniqueEntryCountOffset();
}
public String getName() {
return _name;
}
public void setName(String name) {
_name = name;
}
public boolean isPrimaryKey() {
return _indexType == PRIMARY_KEY_INDEX_TYPE;
}
public boolean isForeignKey() {
return _indexType == FOREIGN_KEY_INDEX_TYPE;
}
public ForeignKeyReference getReference() {
return _reference;
}
/**
* Whether or not {@code null} values are actually recorded in the index.
*/
public boolean shouldIgnoreNulls() {
return getIndexData().shouldIgnoreNulls();
}
/**
* Whether or not index entries must be unique.
* <p>
* Some notes about uniqueness:
* <ul>
* <li>Access does not seem to consider multiple {@code null} entries
* invalid for a unique index</li>
* <li>text indexes collapse case, and Access seems to compare <b>only</b>
* the index entry bytes, therefore two strings which differ only in
* case <i>will violate</i> the unique constraint</li>
* </ul>
*/
public boolean isUnique() {
return getIndexData().isUnique();
}
/**
* Returns the Columns for this index (unmodifiable)
*/
public List<IndexData.ColumnDescriptor> getColumns() {
return getIndexData().getColumns();
}
/**
* Whether or not the complete index state has been read.
*/
public boolean isInitialized() {
return getIndexData().isInitialized();
}
/**
* Forces initialization of this index (actual parsing of index pages).
* normally, the index will not be initialized until the entries are
* actually needed.
*/
public void initialize() throws IOException {
getIndexData().initialize();
}
/**
* Writes the current index state to the database.
* <p>
* Forces index initialization.
*/
public void update() throws IOException {
getIndexData().update();
}
/**
* Adds a row to this index
* <p>
* Forces index initialization.
*
* @param row Row to add
* @param rowId rowId of the row to be added
*/
public void addRow(Object[] row, RowId rowId)
throws IOException
{
getIndexData().addRow(row, rowId);
}
/**
* Removes a row from this index
* <p>
* Forces index initialization.
*
* @param row Row to remove
* @param rowId rowId of the row to be removed
*/
public void deleteRow(Object[] row, RowId rowId)
throws IOException
{
getIndexData().deleteRow(row, rowId);
}
/**
* Gets a new cursor for this index.
* <p>
* Forces index initialization.
*/
public IndexData.EntryCursor cursor()
throws IOException
{
return cursor(null, true, null, true);
}
/**
* Gets a new cursor for this index, narrowed to the range defined by the
* given startRow and endRow.
* <p>
* Forces index initialization.
*
* @param startRow the first row of data for the cursor, or {@code null} for
* the first entry
* @param startInclusive whether or not startRow is inclusive or exclusive
* @param endRow the last row of data for the cursor, or {@code null} for
* the last entry
* @param endInclusive whether or not endRow is inclusive or exclusive
*/
public IndexData.EntryCursor cursor(Object[] startRow,
boolean startInclusive,
Object[] endRow,
boolean endInclusive)
throws IOException
{
return getIndexData().cursor(startRow, startInclusive, endRow,
endInclusive);
}
/**
* Constructs an array of values appropriate for this index from the given
* column values, expected to match the columns for this index.
* @return the appropriate sparse array of data
* @throws IllegalArgumentException if the wrong number of values are
* provided
*/
public Object[] constructIndexRowFromEntry(Object... values)
{
return getIndexData().constructIndexRowFromEntry(values);
}
/**
* Constructs an array of values appropriate for this index from the given
* column value.
* @return the appropriate sparse array of data or {@code null} if not all
* columns for this index were provided
*/
public Object[] constructIndexRow(String colName, Object value)
{
return constructIndexRow(Collections.singletonMap(colName, value));
}
/**
* Constructs an array of values appropriate for this index from the given
* column values.
* @return the appropriate sparse array of data or {@code null} if not all
* columns for this index were provided
*/
public Object[] constructIndexRow(Map<String,Object> row)
{
return getIndexData().constructIndexRow(row);
}
@Override
public String toString() {
StringBuilder rtn = new StringBuilder();
rtn.append("\tName: (").append(getTable().getName()).append(") ")
.append(_name);
rtn.append("\n\tNumber: ").append(_indexNumber);
rtn.append("\n\tIs Primary Key: ").append(isPrimaryKey());
rtn.append("\n\tIs Foreign Key: ").append(isForeignKey());
if(_reference != null) {
rtn.append("\n\tForeignKeyReference: ").append(_reference);
}
rtn.append(_data.toString());
rtn.append("\n\n");
return rtn.toString();
}
public int compareTo(Index other) {
if (_indexNumber > other.getIndexNumber()) {
return 1;
} else if (_indexNumber < other.getIndexNumber()) {
return -1;
} else {
return 0;
}
}
/**
* Writes the logical index definitions into a table definition buffer.
* @param buffer Buffer to write to
* @param indexes List of IndexBuilders to write definitions for
*/
protected static void writeDefinitions(
ByteBuffer buffer, List<IndexBuilder> indexes, Charset charset)
throws IOException
{
// write logical index information
for(IndexBuilder idx : indexes) {
buffer.putInt(Table.MAGIC_TABLE_NUMBER); // seemingly constant magic value which matches the table def
buffer.putInt(idx.getIndexNumber()); // index num
buffer.putInt(idx.getIndexDataNumber()); // index data num
buffer.put((byte)0); // related table type
buffer.putInt(INVALID_INDEX_NUMBER); // related index num
buffer.putInt(0); // related table definition page number
buffer.put((byte)0); // cascade updates flag
buffer.put((byte)0); // cascade deletes flag
buffer.put(idx.getType()); // index type flags
buffer.putInt(0); // unknown
}
// write index names
for(IndexBuilder idx : indexes) {
Table.writeName(buffer, idx.getName(), charset);
}
}
/**
* Information about a foreign key reference defined in an index (when
* referential integrity should be enforced).
*/
public static class ForeignKeyReference
{
private final byte _tableType;
private final int _otherIndexNumber;
private final int _otherTablePageNumber;
private final boolean _cascadeUpdates;
private final boolean _cascadeDeletes;
public ForeignKeyReference(
byte tableType, int otherIndexNumber, int otherTablePageNumber,
boolean cascadeUpdates, boolean cascadeDeletes)
{
_tableType = tableType;
_otherIndexNumber = otherIndexNumber;
_otherTablePageNumber = otherTablePageNumber;
_cascadeUpdates = cascadeUpdates;
_cascadeDeletes = cascadeDeletes;
}
public byte getTableType() {
return _tableType;
}
public boolean isPrimaryTable() {
return(getTableType() == PRIMARY_TABLE_TYPE);
}
public int getOtherIndexNumber() {
return _otherIndexNumber;
}
public int getOtherTablePageNumber() {
return _otherTablePageNumber;
}
public boolean isCascadeUpdates() {
return _cascadeUpdates;
}
public boolean isCascadeDeletes() {
return _cascadeDeletes;
}
@Override
public String toString() {
return new StringBuilder()
.append("\n\t\tOther Index Number: ").append(_otherIndexNumber)
.append("\n\t\tOther Table Page Num: ").append(_otherTablePageNumber)
.append("\n\t\tIs Primary Table: ").append(isPrimaryTable())
.append("\n\t\tIs Cascade Updates: ").append(isCascadeUpdates())
.append("\n\t\tIs Cascade Deletes: ").append(isCascadeDeletes())
.toString();
}
}
}
|
src/java/com/healthmarketscience/jackcess/Index.java
|
/*
Copyright (c) 2005 Health Market Science, Inc.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
USA
You can contact Health Market Science at info@healthmarketscience.com
or at the following address:
Health Market Science
2700 Horizon Drive
Suite 200
King of Prussia, PA 19406
*/
package com.healthmarketscience.jackcess;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Access table (logical) index. Logical indexes are backed for IndexData,
* where one or more logical indexes could be backed by the same data.
*
* @author Tim McCune
*/
public class Index implements Comparable<Index> {
protected static final Log LOG = LogFactory.getLog(Index.class);
/** index type for primary key indexes */
static final byte PRIMARY_KEY_INDEX_TYPE = (byte)1;
/** index type for foreign key indexes */
static final byte FOREIGN_KEY_INDEX_TYPE = (byte)2;
/** flag for indicating that updates should cascade in a foreign key index */
private static final byte CASCADE_UPDATES_FLAG = (byte)1;
/** flag for indicating that deletes should cascade in a foreign key index */
private static final byte CASCADE_DELETES_FLAG = (byte)1;
/** index table type for the "primary" table in a foreign key index */
private static final byte PRIMARY_TABLE_TYPE = (byte)1;
/** indicate an invalid index number for foreign key field */
private static final int INVALID_INDEX_NUMBER = -1;
/** the actual data backing this index (more than one index may be backed by
the same data */
private final IndexData _data;
/** 0-based index number */
private final int _indexNumber;
/** the type of the index */
private final byte _indexType;
/** Index name */
private String _name;
/** foreign key reference info, if any */
private final ForeignKeyReference _reference;
protected Index(ByteBuffer tableBuffer, List<IndexData> indexDatas,
JetFormat format)
throws IOException
{
ByteUtil.forward(tableBuffer, format.SKIP_BEFORE_INDEX_SLOT); //Forward past Unknown
_indexNumber = tableBuffer.getInt();
int indexDataNumber = tableBuffer.getInt();
// read foreign key reference info
byte relIndexType = tableBuffer.get();
int relIndexNumber = tableBuffer.getInt();
int relTablePageNumber = tableBuffer.getInt();
byte cascadeUpdatesFlag = tableBuffer.get();
byte cascadeDeletesFlag = tableBuffer.get();
_indexType = tableBuffer.get();
if((_indexType == FOREIGN_KEY_INDEX_TYPE) &&
(relIndexNumber != INVALID_INDEX_NUMBER)) {
_reference = new ForeignKeyReference(
relIndexType, relIndexNumber, relTablePageNumber,
(cascadeUpdatesFlag == CASCADE_UPDATES_FLAG),
(cascadeDeletesFlag == CASCADE_DELETES_FLAG));
} else {
_reference = null;
}
ByteUtil.forward(tableBuffer, format.SKIP_AFTER_INDEX_SLOT); //Skip past Unknown
_data = indexDatas.get(indexDataNumber);
_data.addIndex(this);
}
public IndexData getIndexData() {
return _data;
}
public Table getTable() {
return getIndexData().getTable();
}
public JetFormat getFormat() {
return getTable().getFormat();
}
public PageChannel getPageChannel() {
return getTable().getPageChannel();
}
public int getIndexNumber() {
return _indexNumber;
}
public byte getIndexFlags() {
return getIndexData().getIndexFlags();
}
public int getUniqueEntryCount() {
return getIndexData().getUniqueEntryCount();
}
public int getUniqueEntryCountOffset() {
return getIndexData().getUniqueEntryCountOffset();
}
public String getName() {
return _name;
}
public void setName(String name) {
_name = name;
}
public boolean isPrimaryKey() {
return _indexType == PRIMARY_KEY_INDEX_TYPE;
}
public boolean isForeignKey() {
return _indexType == FOREIGN_KEY_INDEX_TYPE;
}
public ForeignKeyReference getReference() {
return _reference;
}
/**
* Whether or not {@code null} values are actually recorded in the index.
*/
public boolean shouldIgnoreNulls() {
return getIndexData().shouldIgnoreNulls();
}
/**
* Whether or not index entries must be unique.
* <p>
* Some notes about uniqueness:
* <ul>
* <li>Access does not seem to consider multiple {@code null} entries
* invalid for a unique index</li>
* <li>text indexes collapse case, and Access seems to compare <b>only</b>
* the index entry bytes, therefore two strings which differ only in
* case <i>will violate</i> the unique constraint</li>
* </ul>
*/
public boolean isUnique() {
return getIndexData().isUnique();
}
/**
* Returns the Columns for this index (unmodifiable)
*/
public List<IndexData.ColumnDescriptor> getColumns() {
return getIndexData().getColumns();
}
/**
* Whether or not the complete index state has been read.
*/
public boolean isInitialized() {
return getIndexData().isInitialized();
}
/**
* Forces initialization of this index (actual parsing of index pages).
* normally, the index will not be initialized until the entries are
* actually needed.
*/
public void initialize() throws IOException {
getIndexData().initialize();
}
/**
* Writes the current index state to the database.
* <p>
* Forces index initialization.
*/
public void update() throws IOException {
getIndexData().update();
}
/**
* Adds a row to this index
* <p>
* Forces index initialization.
*
* @param row Row to add
* @param rowId rowId of the row to be added
*/
public void addRow(Object[] row, RowId rowId)
throws IOException
{
getIndexData().addRow(row, rowId);
}
/**
* Removes a row from this index
* <p>
* Forces index initialization.
*
* @param row Row to remove
* @param rowId rowId of the row to be removed
*/
public void deleteRow(Object[] row, RowId rowId)
throws IOException
{
getIndexData().deleteRow(row, rowId);
}
/**
* Gets a new cursor for this index.
* <p>
* Forces index initialization.
*/
public IndexData.EntryCursor cursor()
throws IOException
{
return cursor(null, true, null, true);
}
/**
* Gets a new cursor for this index, narrowed to the range defined by the
* given startRow and endRow.
* <p>
* Forces index initialization.
*
* @param startRow the first row of data for the cursor, or {@code null} for
* the first entry
* @param startInclusive whether or not startRow is inclusive or exclusive
* @param endRow the last row of data for the cursor, or {@code null} for
* the last entry
* @param endInclusive whether or not endRow is inclusive or exclusive
*/
public IndexData.EntryCursor cursor(Object[] startRow,
boolean startInclusive,
Object[] endRow,
boolean endInclusive)
throws IOException
{
return getIndexData().cursor(startRow, startInclusive, endRow,
endInclusive);
}
/**
* Constructs an array of values appropriate for this index from the given
* column values, expected to match the columns for this index.
* @return the appropriate sparse array of data
* @throws IllegalArgumentException if the wrong number of values are
* provided
*/
public Object[] constructIndexRowFromEntry(Object... values)
{
return getIndexData().constructIndexRowFromEntry(values);
}
/**
* Constructs an array of values appropriate for this index from the given
* column value.
* @return the appropriate sparse array of data or {@code null} if not all
* columns for this index were provided
*/
public Object[] constructIndexRow(String colName, Object value)
{
return constructIndexRow(Collections.singletonMap(colName, value));
}
/**
* Constructs an array of values appropriate for this index from the given
* column values.
* @return the appropriate sparse array of data or {@code null} if not all
* columns for this index were provided
*/
public Object[] constructIndexRow(Map<String,Object> row)
{
return getIndexData().constructIndexRow(row);
}
@Override
public String toString() {
StringBuilder rtn = new StringBuilder();
rtn.append("\tName: (").append(getTable().getName()).append(") ")
.append(_name);
rtn.append("\n\tNumber: ").append(_indexNumber);
rtn.append("\n\tIs Primary Key: ").append(isPrimaryKey());
rtn.append("\n\tIs Foreign Key: ").append(isForeignKey());
if(_reference != null) {
rtn.append("\n\tForeignKeyReference: ").append(_reference);
}
rtn.append(_data.toString());
rtn.append("\n\n");
return rtn.toString();
}
public int compareTo(Index other) {
if (_indexNumber > other.getIndexNumber()) {
return 1;
} else if (_indexNumber < other.getIndexNumber()) {
return -1;
} else {
return 0;
}
}
/**
* Writes the logical index definitions into a table definition buffer.
* @param buffer Buffer to write to
* @param indexes List of IndexBuilders to write definitions for
*/
protected static void writeDefinitions(
ByteBuffer buffer, List<IndexBuilder> indexes, Charset charset)
throws IOException
{
// write logical index information
for(IndexBuilder idx : indexes) {
buffer.putInt(Table.MAGIC_TABLE_NUMBER); // seemingly constant magic value which matches the table def
buffer.putInt(idx.getIndexNumber()); // index num
buffer.putInt(idx.getIndexDataNumber()); // index data num
buffer.put((byte)0); // related table type
buffer.putInt(INVALID_INDEX_NUMBER); // related index num
buffer.putInt(0); // related table definition page number
buffer.put((byte)0); // cascade updates flag
buffer.put((byte)0); // cascade deletes flag
buffer.put(idx.getFlags()); // index flags
buffer.putInt(0); // unknown
}
// write index names
for(IndexBuilder idx : indexes) {
Table.writeName(buffer, idx.getName(), charset);
}
}
/**
* Information about a foreign key reference defined in an index (when
* referential integrity should be enforced).
*/
public static class ForeignKeyReference
{
private final byte _tableType;
private final int _otherIndexNumber;
private final int _otherTablePageNumber;
private final boolean _cascadeUpdates;
private final boolean _cascadeDeletes;
public ForeignKeyReference(
byte tableType, int otherIndexNumber, int otherTablePageNumber,
boolean cascadeUpdates, boolean cascadeDeletes)
{
_tableType = tableType;
_otherIndexNumber = otherIndexNumber;
_otherTablePageNumber = otherTablePageNumber;
_cascadeUpdates = cascadeUpdates;
_cascadeDeletes = cascadeDeletes;
}
public byte getTableType() {
return _tableType;
}
public boolean isPrimaryTable() {
return(getTableType() == PRIMARY_TABLE_TYPE);
}
public int getOtherIndexNumber() {
return _otherIndexNumber;
}
public int getOtherTablePageNumber() {
return _otherTablePageNumber;
}
public boolean isCascadeUpdates() {
return _cascadeUpdates;
}
public boolean isCascadeDeletes() {
return _cascadeDeletes;
}
@Override
public String toString() {
return new StringBuilder()
.append("\n\t\tOther Index Number: ").append(_otherIndexNumber)
.append("\n\t\tOther Table Page Num: ").append(_otherTablePageNumber)
.append("\n\t\tIs Primary Table: ").append(isPrimaryTable())
.append("\n\t\tIs Cascade Updates: ").append(isCascadeUpdates())
.append("\n\t\tIs Cascade Deletes: ").append(isCascadeDeletes())
.toString();
}
}
}
|
fix writing index type value on index creation
git-svn-id: 3a2409cd0beef11b2606a17fdc4e1262b30a237e@530 f203690c-595d-4dc9-a70b-905162fa7fd2
|
src/java/com/healthmarketscience/jackcess/Index.java
|
fix writing index type value on index creation
|
|
Java
|
apache-2.0
|
c9ff779e0ccdbcad0f94b29040e50bad349f0d25
| 0
|
onyxbits/TextFiction,onyxbits/TextFiction,onyxbits/TextFiction
|
package de.onyxbits.textfiction;
import java.io.File;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.Iterator;
import org.json.JSONArray;
import de.onyxbits.textfiction.input.CompassFragment;
import de.onyxbits.textfiction.input.InputFragment;
import de.onyxbits.textfiction.input.InputProcessor;
import de.onyxbits.textfiction.input.WordExtractor;
import de.onyxbits.textfiction.zengine.GrueException;
import de.onyxbits.textfiction.zengine.StyleRegion;
import de.onyxbits.textfiction.zengine.ZMachine;
import de.onyxbits.textfiction.zengine.ZState;
import de.onyxbits.textfiction.zengine.ZStatus;
import de.onyxbits.textfiction.zengine.ZWindow;
import android.net.Uri;
import android.os.Bundle;
import android.text.SpannableString;
import android.text.style.StyleSpan;
import android.text.style.TypefaceSpan;
import android.text.style.UnderlineSpan;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.AnimationUtils;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ViewFlipper;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.NavUtils;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.graphics.Typeface;
import android.os.Build;
import android.preference.PreferenceManager;
/**
* The activity where actual gameplay takes place.
*
* @author patrick
*
*/
public class GameActivity extends FragmentActivity implements
DialogInterface.OnClickListener, OnInitListener,
OnSharedPreferenceChangeListener, InputProcessor {
/**
* Name of the file we keep our highlights in
*/
public static final String HIGHLIGHTFILE = "highlights.json";
/**
* This activity must be started through an intent and be passed the filename
* of the game via this extra.
*/
public static final String LOADFILE = "loadfile";
/**
* Optionally, the name of the game may be passed (if none is passed, the
* filename is used as a title).
*/
public static final String GAMETITLE = "gametitle";
/**
* How many items to keep in the messagebuffer at most. Note: this should be
* an odd number so the log starts with a narrator entry.
*/
public static final int MAXMESSAGES = 81;
private static final int PENDING_NONE = 0;
private static final int PENDING_RESTART = 1;
private static final int PENDING_RESTORE = 2;
private static final int PENDING_SAVE = 3;
/**
* Displays the message log
*/
private ListView storyBoard;
/**
* Adapter for the story list
*/
private StoryAdapter messages;
/**
* The "upper window" of the z-machine containing the status part
*/
private TextView statusWindow;
/**
* Holds stuff that needs to survive config changes (e.g. screen rotation).
*/
private RetainerFragment retainerFragment;
/**
* The input prompt
*/
private InputFragment inputFragment;
/**
* On screen compass
*/
private CompassFragment compassFragment;
/**
* Contains story- and status screen.
*/
private ViewFlipper windowFlipper;
/**
* For entering a filename to save the current game as.
*/
private EditText saveName;
/**
* The game playing in this activity
*/
private File storyFile;
/**
* State variable for when we are showing a "confirm" dialog.
*/
private int pendingAction = PENDING_NONE;
/**
* Words we are highligting in the story
*/
private String[] highlighted;
private SharedPreferences prefs;
private TextToSpeech speaker;
private boolean ttsReady;
private WordExtractor wordExtractor;
private ProgressBar loading;
@Override
protected void onCreate(Bundle savedInstanceState) {
prefs = PreferenceManager.getDefaultSharedPreferences(this);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
// Set the custom theme
try {
Field field = R.style.class.getField(prefs.getString("theme", ""));
setTheme(field.getInt(null));
}
catch (Exception e) {
Log.w(getClass().getName(), e);
}
prefs.registerOnSharedPreferenceChangeListener(this);
super.onCreate(savedInstanceState);
LayoutInflater infl = getLayoutInflater();
requestWindowFeature(Window.FEATURE_PROGRESS);
storyFile = new File(getIntent().getStringExtra(LOADFILE));
View content = infl.inflate(R.layout.activity_game, null);
setContentView(content);
// Check if this is a genuine start or if we are restarting because the
// device got rotated.
FragmentManager fm = getSupportFragmentManager();
inputFragment = (InputFragment) fm.findFragmentById(R.id.fragment_input);
compassFragment = (CompassFragment) fm
.findFragmentById(R.id.fragment_compass);
retainerFragment = (RetainerFragment) fm.findFragmentByTag("retainer");
if (retainerFragment == null) {
// First start
retainerFragment = new RetainerFragment();
fm.beginTransaction().add(retainerFragment, "retainer").commit();
}
else {
// Likely a restart because of the screen being rotated. This may have
// happened while loading, so don't figure if we don't have an engine.
if (retainerFragment.engine != null) {
figurePromptStyle();
figureMenuState();
}
}
// Load the highlight file
try {
File file = new File(FileUtil.getDataDir(storyFile), HIGHLIGHTFILE);
JSONArray js = new JSONArray(FileUtil.getContents(file));
for (int i = 0; i < js.length(); i++) {
retainerFragment.highlighted.add(js.getString(i));
}
}
catch (Exception e) {
// No big deal. Probably the first time this game runs -> use defaults
String[] ini = getResources().getStringArray(R.array.initial_highlights);
for (String i : ini) {
retainerFragment.highlighted.add(i);
}
}
highlighted = retainerFragment.highlighted.toArray(new String[0]);
storyBoard = (ListView) content.findViewById(R.id.storyboard);
wordExtractor = new WordExtractor(this);
wordExtractor.setInputFragment(inputFragment);
wordExtractor.setInputProcessor(this);
messages = new StoryAdapter(this, 0, retainerFragment.messageBuffer,
wordExtractor);
storyBoard.setAdapter(messages);
windowFlipper = (ViewFlipper) content.findViewById(R.id.window_flipper);
statusWindow = (TextView) content.findViewById(R.id.status);
loading = (ProgressBar) findViewById(R.id.gameloading);
statusWindow.setText(retainerFragment.upperWindow);
speaker = new TextToSpeech(this, this);
onSharedPreferenceChanged(prefs, "");
}
@Override
public void onPause() {
if (ttsReady && speaker.isSpeaking()) {
speaker.stop();
}
super.onPause();
}
@Override
public void onDestroy() {
prefs.unregisterOnSharedPreferenceChangeListener(this);
if (ttsReady) {
speaker.shutdown();
}
if (retainerFragment == null || retainerFragment.engine == null) {
if (retainerFragment.postMortem != null) {
// Let's not go into details here. The user won't understand them
// anyways.
Toast
.makeText(this, R.string.msg_corrupt_game_file, Toast.LENGTH_SHORT)
.show();
}
super.onDestroy();
return;
}
if (retainerFragment.postMortem != null) {
// Let's not go into details here. The user won't understand them anyways.
Toast.makeText(this, R.string.msg_corrupt_game_file, Toast.LENGTH_SHORT)
.show();
super.onDestroy();
return;
}
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CMD) {
ZState state = new ZState(retainerFragment.engine);
File f = new File(FileUtil.getSaveGameDir(storyFile),
getString(R.string.autosavename));
state.disk_save(f.getPath(), retainerFragment.engine.pc);
}
else {
Toast.makeText(this, R.string.mg_not_at_a_commandprompt,
Toast.LENGTH_LONG).show();
}
super.onDestroy();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.game, menu);
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
boolean rest = !(retainerFragment == null
|| retainerFragment.engine == null
|| retainerFragment.engine.getRunState() == ZMachine.STATE_RUNNING || retainerFragment.engine
.getRunState() == ZMachine.STATE_INIT);
menu.findItem(R.id.mi_save).setEnabled(rest && inputFragment.isPrompt());
menu.findItem(R.id.mi_restore).setEnabled(rest && inputFragment.isPrompt());
menu.findItem(R.id.mi_restart).setEnabled(rest);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.mi_flip_view: {
flipView(windowFlipper.getCurrentView() != storyBoard);
return true;
}
case R.id.mi_save: {
pendingAction = PENDING_SAVE;
saveName = new EditText(this);
saveName.setSingleLine(true);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_save_game)
.setPositiveButton(android.R.string.ok, this).setView(saveName)
.show();
return true;
}
case R.id.mi_restore: {
String[] sg = FileUtil.listSaveName(storyFile);
if (sg.length > 0) {
pendingAction = PENDING_RESTORE;
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_restore_game).setItems(sg, this)
.show();
}
else {
Toast.makeText(this, R.string.msg_no_savegames, Toast.LENGTH_SHORT)
.show();
}
return true;
}
case R.id.mi_clear_log: {
retainerFragment.messageBuffer.clear();
messages.notifyDataSetChanged();
return true;
}
case R.id.mi_help: {
MainActivity.openUri(this, Uri.parse(getString(R.string.url_help)));
return true;
}
case R.id.mi_restart: {
pendingAction = PENDING_RESTART;
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_please_confirm)
.setMessage(R.string.msg_really_restart)
.setPositiveButton(android.R.string.yes, this)
.setNegativeButton(android.R.string.no, this).show();
return true;
}
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void executeCommand(char[] inputBuffer) {
ZMachine engine = retainerFragment.engine;
if (engine != null && engine.getRunState() != ZMachine.STATE_RUNNING) {
retainerFragment.engine.fillInputBuffer(inputBuffer);
if (retainerFragment.engine.getRunState() != ZMachine.STATE_WAIT_CHAR) {
String tmp = new String(inputBuffer).replaceAll("\n", "").trim();
SpannableString ss = new SpannableString(tmp);
retainerFragment.messageBuffer.add(new StoryItem(ss, StoryItem.MYSELF));
}
try {
retainerFragment.engine.run();
publishResult();
if (retainerFragment.engine.saveCalled || retainerFragment.engine.restoreCalled) {
// This is a really ugly hack to let the user know that the save/restore commands
// don't work
Toast.makeText(this,R.string.err_sr_deprecated,Toast.LENGTH_LONG).show();
retainerFragment.engine.saveCalled = false;
retainerFragment.engine.restoreCalled = false;
}
}
catch (GrueException e) {
retainerFragment.postMortem = e;
Log.w(getClass().getName(), e);
finish();
}
}
}
/**
* Callback: publish results after the engine has run
*/
public void publishResult() {
ZWindow upper = retainerFragment.engine.window[1];
ZWindow lower = retainerFragment.engine.window[0];
ZStatus status = retainerFragment.engine.status_line;
String tmp = "";
boolean showLower = false;
// Evaluate game status
if (status != null) {
// Z3 game -> copy the status bar object into the upper window.
retainerFragment.engine.update_status_line();
retainerFragment.upperWindow = status.toString();
statusWindow.setText(retainerFragment.upperWindow);
}
else {
if (upper.maxCursor > 0) {
// The normal, "status bar" upper window.
tmp = upper.stringyfy(upper.startWindow, upper.maxCursor);
}
else {
tmp = "";
}
statusWindow.setText(tmp);
retainerFragment.upperWindow = tmp;
}
upper.retrieved();
// Evaluate story progress
if (lower.cursor > 0) {
showLower = true;
tmp = new String(lower.frameBuffer, 0, lower.noPrompt());
if (ttsReady && prefs.getBoolean("narrator", false)) {
speaker.speak(tmp, TextToSpeech.QUEUE_FLUSH, null);
}
SpannableString stmp = new SpannableString(tmp);
StyleRegion reg = lower.regions;
if (reg != null) {
while (reg != null) {
if (reg.next == null) {
// The printer does not "close" the last style since it doesn't know
// when the last character is printed.
reg.end = tmp.length() - 1;
}
// Did the game style the prompt (which we cut away)?
reg.end = Math.min(reg.end, tmp.length() - 1);
switch (reg.style) {
case ZWindow.BOLD: {
stmp.setSpan(new StyleSpan(Typeface.BOLD), reg.start, reg.end, 0);
break;
}
case ZWindow.ITALIC: {
stmp.setSpan(new StyleSpan(Typeface.ITALIC), reg.start, reg.end,
0);
break;
}
case ZWindow.FIXED: {
stmp.setSpan(new TypefaceSpan("monospace"), reg.start, reg.end, 0);
break;
}
}
reg = reg.next;
}
}
highlight(stmp, highlighted);
retainerFragment.messageBuffer
.add(new StoryItem(stmp, StoryItem.NARRATOR));
}
lower.retrieved();
// Throw out old story items.
while (retainerFragment.messageBuffer.size() > MAXMESSAGES) {
retainerFragment.messageBuffer.remove(0);
}
messages.notifyDataSetChanged();
// Scroll the storyboard to the latest item.
if (prefs.getBoolean("smoothscrolling", true)) {
// NOTE:smoothScroll() does not work properly if the theme defines
// dividerheight > 0!
storyBoard
.smoothScrollToPosition(retainerFragment.messageBuffer.size() - 1);
}
else {
storyBoard.setSelection(retainerFragment.messageBuffer.size() - 1);
}
inputFragment.reset();
// Kinda dirty: assume that the lower window is the important one. If
// anything got added to it, ensure that it is visible. Otherwise assume
// that we are dealing with something like a menu and switch the display to
// display the upperwindow
flipView(showLower);
figurePromptStyle();
figureMenuState();
}
/**
* Show the correct prompt.
*/
private void figurePromptStyle() {
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CHAR
&& inputFragment.isPrompt()) {
inputFragment.toggleInput();
}
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CMD
&& !inputFragment.isPrompt()) {
inputFragment.toggleInput();
}
}
/**
* Enable/Disable menu items
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void figureMenuState() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
invalidateOptionsMenu();
}
}
/**
* Make either the storyboard or the statusscreen visible
*
* @param showstory
* true to swtich to the story view, false to swtich to the status
* screen. nothing happens if the desired view is already showing.
*/
private void flipView(boolean showstory) {
View now = windowFlipper.getCurrentView();
if (showstory) {
if (now != storyBoard) {
windowFlipper.setInAnimation(AnimationUtils.loadAnimation(this,
R.animator.slide_in_right));
windowFlipper.setOutAnimation(AnimationUtils.loadAnimation(this,
R.animator.slide_out_left));
windowFlipper.showPrevious();
}
}
else {
if (now == storyBoard) {
windowFlipper.setInAnimation(AnimationUtils.loadAnimation(this,
android.R.anim.slide_in_left));
windowFlipper.setOutAnimation(AnimationUtils.loadAnimation(this,
android.R.anim.slide_out_right));
windowFlipper.showPrevious();
}
}
}
@Override
public void onClick(DialogInterface dialog, int which) {
switch (pendingAction) {
case PENDING_RESTART: {
if (which == DialogInterface.BUTTON_POSITIVE) {
retainerFragment.messageBuffer.clear();
try {
retainerFragment.engine.restart();
retainerFragment.engine.run();
}
catch (GrueException e) {
// This should never happen
retainerFragment.postMortem = e;
finish();
}
publishResult();
}
break;
}
case PENDING_SAVE: {
String name = saveName.getEditableText().toString();
name = name.replace('/', '_');
if (name.length() > 0) {
ZState state = new ZState(retainerFragment.engine);
File f = new File(FileUtil.getSaveGameDir(storyFile), name);
state.disk_save(f.getPath(), retainerFragment.engine.pc);
Toast.makeText(this, R.string.msg_game_saved, Toast.LENGTH_SHORT)
.show();
}
}
case PENDING_RESTORE: {
if (which > -1) {
File file = FileUtil.listSaveGames(storyFile)[which];
ZState state = new ZState(retainerFragment.engine);
if (state.restore_from_disk(file.getPath())) {
statusWindow.setText(""); // Wrong, but the best we can do.
retainerFragment.messageBuffer.clear();
messages.notifyDataSetChanged();
retainerFragment.engine.restore(state);
figurePromptStyle();
figureMenuState();
Toast
.makeText(this, R.string.msg_game_restored, Toast.LENGTH_SHORT)
.show();
}
else {
Toast.makeText(this, R.string.msg_restore_failed,
Toast.LENGTH_SHORT).show();
}
}
}
}
pendingAction = PENDING_NONE;
}
@Override
public void onInit(int status) {
ttsReady = (status == TextToSpeech.SUCCESS);
if (ttsReady) {
// Was the game faster to load?
if (retainerFragment != null && retainerFragment.messageBuffer.size() > 0
&& prefs.getBoolean("narrator", false)) {
speaker.speak(retainerFragment.messageBuffer
.get(retainerFragment.messageBuffer.size() - 1).message.toString(),
TextToSpeech.QUEUE_FLUSH, null);
}
}
}
@Override
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
String font = prefs.getString("font", "");
if (font.equals("default")) {
messages.setTypeface(Typeface.DEFAULT);
}
if (font.equals("sans")) {
messages.setTypeface(Typeface.SANS_SERIF);
}
if (font.equals("serif")) {
messages.setTypeface(Typeface.SERIF);
}
if (font.equals("monospace")) {
messages.setTypeface(Typeface.MONOSPACE);
}
if (font.equals("comicsans")) {
Typeface tf = Typeface.createFromAsset(getAssets(), "fonts/LDFComicSans.ttf");
messages.setTypeface(tf);
}
if (font.equals("ziggyzoe")) {
Typeface tf = Typeface.createFromAsset(getAssets(), "fonts/ziggyzoe.ttf");
messages.setTypeface(tf);
}
String fontSize = prefs.getString("fontsize", "");
TextView tmp = new TextView(this);
if (fontSize.equals("small")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Small);
messages.setTextSize(tmp.getTextSize());
}
if (fontSize.equals("medium")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Medium);
messages.setTextSize(tmp.getTextSize());
}
if (fontSize.equals("large")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Large);
messages.setTextSize(tmp.getTextSize());
}
inputFragment.setAutoCollapse(prefs.getBoolean("autocollapse", false));
wordExtractor.setKeyclick(prefs.getBoolean("keyclick", false));
compassFragment.setKeyclick(prefs.getBoolean("keyclick", false));
}
@Override
public void toggleTextHighlight(String str) {
int tmp;
String txt = str.toLowerCase();
if (retainerFragment.highlighted.contains(txt)) {
retainerFragment.highlighted.remove(txt);
tmp = R.string.msg_unmarked;
}
else {
retainerFragment.highlighted.add(txt);
tmp = R.string.msg_marked;
}
Toast
.makeText(this, getResources().getString(tmp, txt), Toast.LENGTH_SHORT)
.show();
highlighted = retainerFragment.highlighted.toArray(new String[0]);
Iterator<StoryItem> it = retainerFragment.messageBuffer.listIterator();
while (it.hasNext()) {
highlight(it.next().message, highlighted);
}
messages.notifyDataSetChanged();
try {
JSONArray array = new JSONArray(retainerFragment.highlighted);
File f = new File(FileUtil.getDataDir(storyFile), HIGHLIGHTFILE);
PrintStream ps = new PrintStream(f);
ps.write(array.toString(2).getBytes());
ps.close();
}
catch (Exception e) {
Log.w(getClass().getName(), e);
}
}
@Override
public void utterText(CharSequence txt) {
if (ttsReady) {
if (speaker.isSpeaking() && txt == null) {
speaker.stop();
}
if (txt != null) {
speaker.speak(txt.toString(), TextToSpeech.QUEUE_FLUSH, null);
}
}
}
/**
* Add underlines to a text blob. Any existing underlines are removed. before
* new ones are added.
*
* @param span
* the blob to modify
* @param words
* the words to underline (all lowercase!)
*/
private static void highlight(SpannableString span, String... words) {
UnderlineSpan old[] = span.getSpans(0, span.length(), UnderlineSpan.class);
for (UnderlineSpan del : old) {
span.removeSpan(del);
}
char spanChars[] = span.toString().toLowerCase().toCharArray();
for (String word : words) {
char[] wc = word.toCharArray();
int last = spanChars.length - wc.length + 1;
for (int i = 0; i < last; i++) {
// First check if there is a word-sized gap at spanchars[i] as we don't
// want to highlight words that are actually just substrings (e.g.
// "east" in "lEASTwise").
if ((i > 0 && Character.isLetterOrDigit(spanChars[i - 1]))
|| (i + wc.length != spanChars.length && Character
.isLetterOrDigit(spanChars[i + wc.length]))) {
continue;
}
int a = i;
int b = 0;
while (b < wc.length) {
if (spanChars[a] != wc[b]) {
b = 0;
break;
}
a++;
b++;
}
if (b == wc.length) {
span.setSpan(new UnderlineSpan(), i, a, 0);
i = a;
}
}
}
}
@Override
public File getStory() {
return storyFile;
}
/**
* Show/hide the spinner indicating that we are currently loading a game
* @param b true to show the spinner.
*/
public void setLoadingVisibility(boolean b) {
try {
loading.setIndeterminate(b);
if (b) {
loading.setVisibility(View.VISIBLE);
}
else {
loading.setVisibility(View.GONE);
}
}
catch (Exception e) {
// TODO: Getting here is a bug! I haven't figured out how to trigger it yet,
// the User message on Google Play for the stack trace reads "crash on resume".
Log.w("TextFiction",e);
}
}
}
|
src/de/onyxbits/textfiction/GameActivity.java
|
package de.onyxbits.textfiction;
import java.io.File;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.Iterator;
import org.json.JSONArray;
import de.onyxbits.textfiction.input.CompassFragment;
import de.onyxbits.textfiction.input.InputFragment;
import de.onyxbits.textfiction.input.InputProcessor;
import de.onyxbits.textfiction.input.WordExtractor;
import de.onyxbits.textfiction.zengine.GrueException;
import de.onyxbits.textfiction.zengine.StyleRegion;
import de.onyxbits.textfiction.zengine.ZMachine;
import de.onyxbits.textfiction.zengine.ZState;
import de.onyxbits.textfiction.zengine.ZStatus;
import de.onyxbits.textfiction.zengine.ZWindow;
import android.net.Uri;
import android.os.Bundle;
import android.text.SpannableString;
import android.text.style.StyleSpan;
import android.text.style.TypefaceSpan;
import android.text.style.UnderlineSpan;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.AnimationUtils;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ViewFlipper;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.NavUtils;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.graphics.Typeface;
import android.os.Build;
import android.preference.PreferenceManager;
/**
* The activity where actual gameplay takes place.
*
* @author patrick
*
*/
public class GameActivity extends FragmentActivity implements
DialogInterface.OnClickListener, OnInitListener,
OnSharedPreferenceChangeListener, InputProcessor {
/**
* Name of the file we keep our highlights in
*/
public static final String HIGHLIGHTFILE = "highlights.json";
/**
* This activity must be started through an intent and be passed the filename
* of the game via this extra.
*/
public static final String LOADFILE = "loadfile";
/**
* Optionally, the name of the game may be passed (if none is passed, the
* filename is used as a title).
*/
public static final String GAMETITLE = "gametitle";
/**
* How many items to keep in the messagebuffer at most. Note: this should be
* an odd number so the log starts with a narrator entry.
*/
public static final int MAXMESSAGES = 81;
private static final int PENDING_NONE = 0;
private static final int PENDING_RESTART = 1;
private static final int PENDING_RESTORE = 2;
private static final int PENDING_SAVE = 3;
/**
* Displays the message log
*/
private ListView storyBoard;
/**
* Adapter for the story list
*/
private StoryAdapter messages;
/**
* The "upper window" of the z-machine containing the status part
*/
private TextView statusWindow;
/**
* Holds stuff that needs to survive config changes (e.g. screen rotation).
*/
private RetainerFragment retainerFragment;
/**
* The input prompt
*/
private InputFragment inputFragment;
/**
* On screen compass
*/
private CompassFragment compassFragment;
/**
* Contains story- and status screen.
*/
private ViewFlipper windowFlipper;
/**
* For entering a filename to save the current game as.
*/
private EditText saveName;
/**
* The game playing in this activity
*/
private File storyFile;
/**
* State variable for when we are showing a "confirm" dialog.
*/
private int pendingAction = PENDING_NONE;
/**
* Words we are highligting in the story
*/
private String[] highlighted;
private SharedPreferences prefs;
private TextToSpeech speaker;
private boolean ttsReady;
private WordExtractor wordExtractor;
private ProgressBar loading;
@Override
protected void onCreate(Bundle savedInstanceState) {
prefs = PreferenceManager.getDefaultSharedPreferences(this);
// Set the custom theme
try {
Field field = R.style.class.getField(prefs.getString("theme", ""));
setTheme(field.getInt(null));
}
catch (Exception e) {
Log.w(getClass().getName(), e);
}
prefs.registerOnSharedPreferenceChangeListener(this);
super.onCreate(savedInstanceState);
LayoutInflater infl = getLayoutInflater();
requestWindowFeature(Window.FEATURE_PROGRESS);
storyFile = new File(getIntent().getStringExtra(LOADFILE));
View content = infl.inflate(R.layout.activity_game, null);
setContentView(content);
// Check if this is a genuine start or if we are restarting because the
// device got rotated.
FragmentManager fm = getSupportFragmentManager();
inputFragment = (InputFragment) fm.findFragmentById(R.id.fragment_input);
compassFragment = (CompassFragment) fm
.findFragmentById(R.id.fragment_compass);
retainerFragment = (RetainerFragment) fm.findFragmentByTag("retainer");
if (retainerFragment == null) {
// First start
retainerFragment = new RetainerFragment();
fm.beginTransaction().add(retainerFragment, "retainer").commit();
}
else {
// Likely a restart because of the screen being rotated. This may have
// happened while loading, so don't figure if we don't have an engine.
if (retainerFragment.engine != null) {
figurePromptStyle();
figureMenuState();
}
}
// Load the highlight file
try {
File file = new File(FileUtil.getDataDir(storyFile), HIGHLIGHTFILE);
JSONArray js = new JSONArray(FileUtil.getContents(file));
for (int i = 0; i < js.length(); i++) {
retainerFragment.highlighted.add(js.getString(i));
}
}
catch (Exception e) {
// No big deal. Probably the first time this game runs -> use defaults
String[] ini = getResources().getStringArray(R.array.initial_highlights);
for (String i : ini) {
retainerFragment.highlighted.add(i);
}
}
highlighted = retainerFragment.highlighted.toArray(new String[0]);
String title = getIntent().getStringExtra(GAMETITLE);
if (title == null) {
title = storyFile.getName();
}
setTitle(title);
setupActionBar(title);
storyBoard = (ListView) content.findViewById(R.id.storyboard);
wordExtractor = new WordExtractor(this);
wordExtractor.setInputFragment(inputFragment);
wordExtractor.setInputProcessor(this);
messages = new StoryAdapter(this, 0, retainerFragment.messageBuffer,
wordExtractor);
storyBoard.setAdapter(messages);
windowFlipper = (ViewFlipper) content.findViewById(R.id.window_flipper);
statusWindow = (TextView) content.findViewById(R.id.status);
loading = (ProgressBar) findViewById(R.id.gameloading);
statusWindow.setText(retainerFragment.upperWindow);
speaker = new TextToSpeech(this, this);
onSharedPreferenceChanged(prefs, "");
}
@Override
public void onPause() {
if (ttsReady && speaker.isSpeaking()) {
speaker.stop();
}
super.onPause();
}
@Override
public void onDestroy() {
prefs.unregisterOnSharedPreferenceChangeListener(this);
if (ttsReady) {
speaker.shutdown();
}
if (retainerFragment == null || retainerFragment.engine == null) {
if (retainerFragment.postMortem != null) {
// Let's not go into details here. The user won't understand them
// anyways.
Toast
.makeText(this, R.string.msg_corrupt_game_file, Toast.LENGTH_SHORT)
.show();
}
super.onDestroy();
return;
}
if (retainerFragment.postMortem != null) {
// Let's not go into details here. The user won't understand them anyways.
Toast.makeText(this, R.string.msg_corrupt_game_file, Toast.LENGTH_SHORT)
.show();
super.onDestroy();
return;
}
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CMD) {
ZState state = new ZState(retainerFragment.engine);
File f = new File(FileUtil.getSaveGameDir(storyFile),
getString(R.string.autosavename));
state.disk_save(f.getPath(), retainerFragment.engine.pc);
}
else {
Toast.makeText(this, R.string.mg_not_at_a_commandprompt,
Toast.LENGTH_LONG).show();
}
super.onDestroy();
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*
* @param subTitle
* should be the game name
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void setupActionBar(String subTitle) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
//getActionBar().setDisplayHomeAsUpEnabled(true);
//getActionBar().setSubtitle(subTitle);
getActionBar().hide();
setTitle(R.string.app_name);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.game, menu);
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
boolean rest = !(retainerFragment == null
|| retainerFragment.engine == null
|| retainerFragment.engine.getRunState() == ZMachine.STATE_RUNNING || retainerFragment.engine
.getRunState() == ZMachine.STATE_INIT);
menu.findItem(R.id.mi_save).setEnabled(rest && inputFragment.isPrompt());
menu.findItem(R.id.mi_restore).setEnabled(rest && inputFragment.isPrompt());
menu.findItem(R.id.mi_restart).setEnabled(rest);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.mi_flip_view: {
flipView(windowFlipper.getCurrentView() != storyBoard);
return true;
}
case R.id.mi_save: {
pendingAction = PENDING_SAVE;
saveName = new EditText(this);
saveName.setSingleLine(true);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_save_game)
.setPositiveButton(android.R.string.ok, this).setView(saveName)
.show();
return true;
}
case R.id.mi_restore: {
String[] sg = FileUtil.listSaveName(storyFile);
if (sg.length > 0) {
pendingAction = PENDING_RESTORE;
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_restore_game).setItems(sg, this)
.show();
}
else {
Toast.makeText(this, R.string.msg_no_savegames, Toast.LENGTH_SHORT)
.show();
}
return true;
}
case R.id.mi_clear_log: {
retainerFragment.messageBuffer.clear();
messages.notifyDataSetChanged();
return true;
}
case R.id.mi_help: {
MainActivity.openUri(this, Uri.parse(getString(R.string.url_help)));
return true;
}
case R.id.mi_restart: {
pendingAction = PENDING_RESTART;
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_please_confirm)
.setMessage(R.string.msg_really_restart)
.setPositiveButton(android.R.string.yes, this)
.setNegativeButton(android.R.string.no, this).show();
return true;
}
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void executeCommand(char[] inputBuffer) {
ZMachine engine = retainerFragment.engine;
if (engine != null && engine.getRunState() != ZMachine.STATE_RUNNING) {
retainerFragment.engine.fillInputBuffer(inputBuffer);
if (retainerFragment.engine.getRunState() != ZMachine.STATE_WAIT_CHAR) {
String tmp = new String(inputBuffer).replaceAll("\n", "").trim();
SpannableString ss = new SpannableString(tmp);
retainerFragment.messageBuffer.add(new StoryItem(ss, StoryItem.MYSELF));
}
try {
retainerFragment.engine.run();
publishResult();
if (retainerFragment.engine.saveCalled || retainerFragment.engine.restoreCalled) {
// This is a really ugly hack to let the user know that the save/restore commands
// don't work
Toast.makeText(this,R.string.err_sr_deprecated,Toast.LENGTH_LONG).show();
retainerFragment.engine.saveCalled = false;
retainerFragment.engine.restoreCalled = false;
}
}
catch (GrueException e) {
retainerFragment.postMortem = e;
Log.w(getClass().getName(), e);
finish();
}
}
}
/**
* Callback: publish results after the engine has run
*/
public void publishResult() {
ZWindow upper = retainerFragment.engine.window[1];
ZWindow lower = retainerFragment.engine.window[0];
ZStatus status = retainerFragment.engine.status_line;
String tmp = "";
boolean showLower = false;
// Evaluate game status
if (status != null) {
// Z3 game -> copy the status bar object into the upper window.
retainerFragment.engine.update_status_line();
retainerFragment.upperWindow = status.toString();
statusWindow.setText(retainerFragment.upperWindow);
}
else {
if (upper.maxCursor > 0) {
// The normal, "status bar" upper window.
tmp = upper.stringyfy(upper.startWindow, upper.maxCursor);
}
else {
tmp = "";
}
statusWindow.setText(tmp);
retainerFragment.upperWindow = tmp;
}
upper.retrieved();
// Evaluate story progress
if (lower.cursor > 0) {
showLower = true;
tmp = new String(lower.frameBuffer, 0, lower.noPrompt());
if (ttsReady && prefs.getBoolean("narrator", false)) {
speaker.speak(tmp, TextToSpeech.QUEUE_FLUSH, null);
}
SpannableString stmp = new SpannableString(tmp);
StyleRegion reg = lower.regions;
if (reg != null) {
while (reg != null) {
if (reg.next == null) {
// The printer does not "close" the last style since it doesn't know
// when the last character is printed.
reg.end = tmp.length() - 1;
}
// Did the game style the prompt (which we cut away)?
reg.end = Math.min(reg.end, tmp.length() - 1);
switch (reg.style) {
case ZWindow.BOLD: {
stmp.setSpan(new StyleSpan(Typeface.BOLD), reg.start, reg.end, 0);
break;
}
case ZWindow.ITALIC: {
stmp.setSpan(new StyleSpan(Typeface.ITALIC), reg.start, reg.end,
0);
break;
}
case ZWindow.FIXED: {
stmp.setSpan(new TypefaceSpan("monospace"), reg.start, reg.end, 0);
break;
}
}
reg = reg.next;
}
}
highlight(stmp, highlighted);
retainerFragment.messageBuffer
.add(new StoryItem(stmp, StoryItem.NARRATOR));
}
lower.retrieved();
// Throw out old story items.
while (retainerFragment.messageBuffer.size() > MAXMESSAGES) {
retainerFragment.messageBuffer.remove(0);
}
messages.notifyDataSetChanged();
// Scroll the storyboard to the latest item.
if (prefs.getBoolean("smoothscrolling", true)) {
// NOTE:smoothScroll() does not work properly if the theme defines
// dividerheight > 0!
storyBoard
.smoothScrollToPosition(retainerFragment.messageBuffer.size() - 1);
}
else {
storyBoard.setSelection(retainerFragment.messageBuffer.size() - 1);
}
inputFragment.reset();
// Kinda dirty: assume that the lower window is the important one. If
// anything got added to it, ensure that it is visible. Otherwise assume
// that we are dealing with something like a menu and switch the display to
// display the upperwindow
flipView(showLower);
figurePromptStyle();
figureMenuState();
}
/**
* Show the correct prompt.
*/
private void figurePromptStyle() {
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CHAR
&& inputFragment.isPrompt()) {
inputFragment.toggleInput();
}
if (retainerFragment.engine.getRunState() == ZMachine.STATE_WAIT_CMD
&& !inputFragment.isPrompt()) {
inputFragment.toggleInput();
}
}
/**
* Enable/Disable menu items
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void figureMenuState() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
invalidateOptionsMenu();
}
}
/**
* Make either the storyboard or the statusscreen visible
*
* @param showstory
* true to swtich to the story view, false to swtich to the status
* screen. nothing happens if the desired view is already showing.
*/
private void flipView(boolean showstory) {
View now = windowFlipper.getCurrentView();
if (showstory) {
if (now != storyBoard) {
windowFlipper.setInAnimation(AnimationUtils.loadAnimation(this,
R.animator.slide_in_right));
windowFlipper.setOutAnimation(AnimationUtils.loadAnimation(this,
R.animator.slide_out_left));
windowFlipper.showPrevious();
}
}
else {
if (now == storyBoard) {
windowFlipper.setInAnimation(AnimationUtils.loadAnimation(this,
android.R.anim.slide_in_left));
windowFlipper.setOutAnimation(AnimationUtils.loadAnimation(this,
android.R.anim.slide_out_right));
windowFlipper.showPrevious();
}
}
}
@Override
public void onClick(DialogInterface dialog, int which) {
switch (pendingAction) {
case PENDING_RESTART: {
if (which == DialogInterface.BUTTON_POSITIVE) {
retainerFragment.messageBuffer.clear();
try {
retainerFragment.engine.restart();
retainerFragment.engine.run();
}
catch (GrueException e) {
// This should never happen
retainerFragment.postMortem = e;
finish();
}
publishResult();
}
break;
}
case PENDING_SAVE: {
String name = saveName.getEditableText().toString();
name = name.replace('/', '_');
if (name.length() > 0) {
ZState state = new ZState(retainerFragment.engine);
File f = new File(FileUtil.getSaveGameDir(storyFile), name);
state.disk_save(f.getPath(), retainerFragment.engine.pc);
Toast.makeText(this, R.string.msg_game_saved, Toast.LENGTH_SHORT)
.show();
}
}
case PENDING_RESTORE: {
if (which > -1) {
File file = FileUtil.listSaveGames(storyFile)[which];
ZState state = new ZState(retainerFragment.engine);
if (state.restore_from_disk(file.getPath())) {
statusWindow.setText(""); // Wrong, but the best we can do.
retainerFragment.messageBuffer.clear();
messages.notifyDataSetChanged();
retainerFragment.engine.restore(state);
figurePromptStyle();
figureMenuState();
Toast
.makeText(this, R.string.msg_game_restored, Toast.LENGTH_SHORT)
.show();
}
else {
Toast.makeText(this, R.string.msg_restore_failed,
Toast.LENGTH_SHORT).show();
}
}
}
}
pendingAction = PENDING_NONE;
}
@Override
public void onInit(int status) {
ttsReady = (status == TextToSpeech.SUCCESS);
if (ttsReady) {
// Was the game faster to load?
if (retainerFragment != null && retainerFragment.messageBuffer.size() > 0
&& prefs.getBoolean("narrator", false)) {
speaker.speak(retainerFragment.messageBuffer
.get(retainerFragment.messageBuffer.size() - 1).message.toString(),
TextToSpeech.QUEUE_FLUSH, null);
}
}
}
@Override
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
String font = prefs.getString("font", "");
if (font.equals("default")) {
messages.setTypeface(Typeface.DEFAULT);
}
if (font.equals("sans")) {
messages.setTypeface(Typeface.SANS_SERIF);
}
if (font.equals("serif")) {
messages.setTypeface(Typeface.SERIF);
}
if (font.equals("monospace")) {
messages.setTypeface(Typeface.MONOSPACE);
}
if (font.equals("comicsans")) {
Typeface tf = Typeface.createFromAsset(getAssets(), "fonts/LDFComicSans.ttf");
messages.setTypeface(tf);
}
if (font.equals("ziggyzoe")) {
Typeface tf = Typeface.createFromAsset(getAssets(), "fonts/ziggyzoe.ttf");
messages.setTypeface(tf);
}
String fontSize = prefs.getString("fontsize", "");
TextView tmp = new TextView(this);
if (fontSize.equals("small")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Small);
messages.setTextSize(tmp.getTextSize());
}
if (fontSize.equals("medium")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Medium);
messages.setTextSize(tmp.getTextSize());
}
if (fontSize.equals("large")) {
tmp.setTextAppearance(this, android.R.style.TextAppearance_Large);
messages.setTextSize(tmp.getTextSize());
}
inputFragment.setAutoCollapse(prefs.getBoolean("autocollapse", false));
wordExtractor.setKeyclick(prefs.getBoolean("keyclick", false));
compassFragment.setKeyclick(prefs.getBoolean("keyclick", false));
}
@Override
public void toggleTextHighlight(String str) {
int tmp;
String txt = str.toLowerCase();
if (retainerFragment.highlighted.contains(txt)) {
retainerFragment.highlighted.remove(txt);
tmp = R.string.msg_unmarked;
}
else {
retainerFragment.highlighted.add(txt);
tmp = R.string.msg_marked;
}
Toast
.makeText(this, getResources().getString(tmp, txt), Toast.LENGTH_SHORT)
.show();
highlighted = retainerFragment.highlighted.toArray(new String[0]);
Iterator<StoryItem> it = retainerFragment.messageBuffer.listIterator();
while (it.hasNext()) {
highlight(it.next().message, highlighted);
}
messages.notifyDataSetChanged();
try {
JSONArray array = new JSONArray(retainerFragment.highlighted);
File f = new File(FileUtil.getDataDir(storyFile), HIGHLIGHTFILE);
PrintStream ps = new PrintStream(f);
ps.write(array.toString(2).getBytes());
ps.close();
}
catch (Exception e) {
Log.w(getClass().getName(), e);
}
}
@Override
public void utterText(CharSequence txt) {
if (ttsReady) {
if (speaker.isSpeaking() && txt == null) {
speaker.stop();
}
if (txt != null) {
speaker.speak(txt.toString(), TextToSpeech.QUEUE_FLUSH, null);
}
}
}
/**
* Add underlines to a text blob. Any existing underlines are removed. before
* new ones are added.
*
* @param span
* the blob to modify
* @param words
* the words to underline (all lowercase!)
*/
private static void highlight(SpannableString span, String... words) {
UnderlineSpan old[] = span.getSpans(0, span.length(), UnderlineSpan.class);
for (UnderlineSpan del : old) {
span.removeSpan(del);
}
char spanChars[] = span.toString().toLowerCase().toCharArray();
for (String word : words) {
char[] wc = word.toCharArray();
int last = spanChars.length - wc.length + 1;
for (int i = 0; i < last; i++) {
// First check if there is a word-sized gap at spanchars[i] as we don't
// want to highlight words that are actually just substrings (e.g.
// "east" in "lEASTwise").
if ((i > 0 && Character.isLetterOrDigit(spanChars[i - 1]))
|| (i + wc.length != spanChars.length && Character
.isLetterOrDigit(spanChars[i + wc.length]))) {
continue;
}
int a = i;
int b = 0;
while (b < wc.length) {
if (spanChars[a] != wc[b]) {
b = 0;
break;
}
a++;
b++;
}
if (b == wc.length) {
span.setSpan(new UnderlineSpan(), i, a, 0);
i = a;
}
}
}
}
@SuppressLint("NewApi")
public void setFullScreen(boolean full) {
Window window = getWindow();
if (full) {
window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
else {
window.clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
if (Build.VERSION.SDK_INT >= 11) {
if (full) {
getActionBar().hide();
}
else {
getActionBar().show();
}
}
}
@Override
public void onBackPressed() {
if ((getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) != 0) {
setFullScreen(false);
}
else {
super.onBackPressed();
}
}
@Override
public File getStory() {
return storyFile;
}
/**
* Show/hide the spinner indicating that we are currently loading a game
* @param b true to show the spinner.
*/
public void setLoadingVisibility(boolean b) {
try {
loading.setIndeterminate(b);
if (b) {
loading.setVisibility(View.VISIBLE);
}
else {
loading.setVisibility(View.GONE);
}
}
catch (Exception e) {
// TODO: Getting here is a bug! I haven't figured out how to trigger it yet,
// the User message on Google Play for the stack trace reads "crash on resume".
Log.w("TextFiction",e);
}
}
}
|
Remove the notification bar while playing a story.
|
src/de/onyxbits/textfiction/GameActivity.java
|
Remove the notification bar while playing a story.
|
|
Java
|
apache-2.0
|
c90ad9828eff6b581f7b6b0523a6ecc6a03e3fdb
| 0
|
albertocerqueira/java-utils
|
package com.java.utils;
import com.java.IConverter;
import com.java.exception.ConverterException;
import com.java.regex.RegexValidation;
public class DoubleUtils implements IConverter<Double> {
private static DoubleUtils instance = new DoubleUtils();
private DoubleUtils() {}
public static DoubleUtils getInstance() {
return instance;
}
/**
* Method responsible for converting the String value reported in Integer
*
* <br /><br />
*
* @param Value (String) value to be converted into Integer
* @return (Double) Double value transformed into
* @throws ConverterException If an error occurs in the conversion.
*/
public Double convert(String value) throws ConverterException {
Double newDouble = null;
if (value != null && !value.equals("")) {
try {
newDouble = new Double(value);
} catch(Exception e) {
try {
newDouble = new Double(value.replace(".", "").replace(",", "."));
} catch(Exception f) {
throw new ConverterException(this.getClass(), f);
}
}
}
return newDouble;
}
/**
* Checks if the String contains only numbers
*
* <br /><br />
*
* Remember:
* <ul>
* <li><code>The string must contain only numbers</code></li>
* </ul>
*
* @param value (String) string to check
* @return (boolean) true if the string has only number, and false otherwise
* @see com.java.regex.RegexValidation.OnlyNumbers()
*/
public static boolean isOnlyNumber(String value) {
boolean ret = false;
if (!StringUtils.isBlank(value)) {
ret = value.matches(RegexValidation.OnlyNumbersFloats.expression());
}
return ret;
}
/**
* Check if the string is empty
*
* <br /><br />
*
* Examples:
* <ul>
* <li><code>" " == false</code></li>
* <li><code>"" == true</code></li>
* <li><code>"0" == true</code></li>
* <li><code>"0.1" == false</code></li>
* <li><code>"0.01" == false</code></li>
* <li><code>"0.001" == false</code></li>
* <li><code>"1" == false</code></li>
* </ul>
*
* @param value (String) string to check
* @return (boolean) true to empty string and false otherwise
* @see com.java.utils.StringUtils.isBlank(String)
*/
public static boolean isBlank(String value) {
boolean isBlank = StringUtils.isBlank(value);
if (!isBlank) {
if (isOnlyNumber(value)) {
isBlank = isZero(value);
}
}
return isBlank;
}
public static boolean isZero(String value) {
Double d = DoubleUtils.getInstance().convert(value);
return Double.compare(d, Double.valueOf(0.0)) <= 0;
}
public static boolean isZero(double value) {
return Double.compare(value, Double.valueOf(0.0)) <= 0;
}
public static double round(double round, int decimal, int ceilOrFloor) {
round *= (Math.pow(10, decimal));
if (ceilOrFloor == 0) {
round = Math.ceil(round);
} else {
round = Math.floor(round);
}
round /= (Math.pow(10, decimal));
return round;
}
}
|
src/main/java/com/java/utils/DoubleUtils.java
|
package com.java.utils;
import com.java.IConverter;
import com.java.exception.ConverterException;
import com.java.regex.RegexValidation;
public class DoubleUtils implements IConverter<Double> {
private static DoubleUtils instance = new DoubleUtils();
private DoubleUtils() {}
public static DoubleUtils getInstance() {
return instance;
}
/**
* Method responsible for converting the String value reported in Integer
*
* <br /><br />
*
* @param Value (String) value to be converted into Integer
* @return (Double) Double value transformed into
* @throws ConverterException If an error occurs in the conversion.
*/
public Double convert(String value) throws ConverterException {
Double newDouble = null;
if (value != null && !value.equals("")) {
try {
newDouble = new Double(value);
} catch(Exception e) {
try {
newDouble = new Double(value.replace(".", "").replace(",", "."));
} catch(Exception f) {
throw new ConverterException(this.getClass(), f);
}
}
}
return newDouble;
}
/**
* Checks if the String contains only numbers
*
* <br /><br />
*
* Remember:
* <ul>
* <li><code>The string must contain only numbers</code></li>
* </ul>
*
* @param value (String) string to check
* @return (boolean) true if the string has only number, and false otherwise
* @see com.java.regex.RegexValidation.OnlyNumbers()
*/
public static boolean isOnlyNumber(String value) {
boolean ret = false;
if (!StringUtils.isBlank(value)) {
ret = value.matches(RegexValidation.OnlyNumbersFloats.expression());
}
return ret;
}
/**
* Check if the string is empty
*
* <br /><br />
*
* Examples:
* <ul>
* <li><code>" " == false</code></li>
* <li><code>"" == true</code></li>
* <li><code>"0" == true</code></li>
* <li><code>"0.1" == false</code></li>
* <li><code>"0.01" == false</code></li>
* <li><code>"0.001" == false</code></li>
* <li><code>"1" == false</code></li>
* </ul>
*
* @param value (String) string to check
* @return (boolean) true to empty string and false otherwise
* @see com.java.utils.StringUtils.isBlank(String)
*/
public static boolean isBlank(String value) {
boolean isBlank = StringUtils.isBlank(value);
if (!isBlank) {
if (isOnlyNumber(value)) {
isBlank = isZero(value);
}
}
return isBlank;
}
public static boolean isZero(String value) {
Double d = DoubleUtils.getInstance().convert(value);
return Double.compare(d, Double.valueOf(0.0)) <= 0;
}
public static boolean isZero(double value) {
return Double.compare(value, Double.valueOf(0.0)) <= 0;
}
}
|
Adicionando novo metodo de arredondamento.
|
src/main/java/com/java/utils/DoubleUtils.java
|
Adicionando novo metodo de arredondamento.
|
|
Java
|
bsd-2-clause
|
a823104e73754ea776320d9ed098b92035bdbd28
| 0
|
biovoxxel/imagej,TehSAUCE/imagej,biovoxxel/imagej,biovoxxel/imagej,TehSAUCE/imagej,TehSAUCE/imagej
|
package imagej.ij1bridge;
import java.awt.Rectangle;
import java.awt.image.ColorModel;
import java.util.ArrayList;
import ij.ImageStack;
import ij.process.ImageProcessor;
import imagej.Dimensions;
import imagej.dataset.Dataset;
import imagej.process.Index;
// TODO - for performance could use planeRef's size/cache methods rather than querying dataset all the time
/** BridgeStack will take data from a PlanarDataset and point into it so that ij1 can use it as an ImageStack and make changes to it */
public class BridgeStack extends ImageStack
{
// **** base interface instance variables
private Dataset dataset;
private ArrayList<Object> planeRefs;
private int[] planeDims;
private ProcessorFactory processorFactory;
private Object[] planeRefCache;
// *** compatibility instance variables
private final String outOfRange = "stack index out of range: ";
private double min = Double.MAX_VALUE;
private double max;
private Rectangle roi;
private ColorModel cm;
private float[] cTable;
// ********* constructor ******************************************************
public BridgeStack(Dataset ds, ProcessorFactory procFac)
{
this.dataset = ds;
this.processorFactory = procFac;
this.planeRefs = new ArrayList<Object>();
// TODO - relaxing for the moment since MetaData code not in place. do some kind of check later.
//if (ds.getMetaData().getDirectAccessDimensionCount() != 2)
// throw new IllegalArgumentException("can't make a BridgeStack on a dataset unless it is organized by plane");
int[] dimensions = ds.getDimensions();
int numPlanes = (int) Dimensions.getTotalPlanes(dimensions);
if (numPlanes <= 0)
throw new IllegalArgumentException("can't make a BridgeStack on a dataset that has 0 planes");
this.planeDims = new int[2];
this.planeDims[0] = dimensions[0];
this.planeDims[1] = dimensions[1];
int[] subDimensions = new int[dimensions.length-2];
for (int i = 0; i < subDimensions.length; i++)
subDimensions[i] = dimensions[i+2];
if (subDimensions.length == 0)
{
this.planeRefs.add(this.dataset.getData());
}
else
{
int[] origin = Index.create(dimensions.length-2);
int[] position = Index.create(dimensions.length-2);
while (Index.isValid(position, origin, subDimensions))
{
Object planeRef = ds.getSubset(position).getData();
this.planeRefs.add(planeRef);
Index.increment(position, origin, subDimensions);
}
}
}
// ********* private interface ******************************************************
// NOTE - index in range 0..n-1
private void insertSlice(int index, String sliceLabel, Object pixels)
{
Dataset newSubset = this.dataset.insertNewSubset(index);
newSubset.setData(pixels);
this.planeRefs.add(index, pixels); // update our cache
setSliceLabel(sliceLabel, index+1);
}
// ********* public interface ******************************************************
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, Object pixels)
{
int end = this.planeRefs.size();
insertSlice(end, sliceLabel, pixels);
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, ImageProcessor ip)
{
if ((ip.getWidth() != getWidth()) ||
(ip.getHeight() != getHeight()))
throw new IllegalArgumentException("Dimensions do not match");
if (this.planeRefs.size() == 0) // TODO - note this code will never evaluate to true for imglib datasets as imglib constituted 11-20-10
{
this.cm = ip.getColorModel();
this.min = ip.getMin();
this.max = ip.getMax();
}
addSlice(sliceLabel, ip.getPixels());
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, ImageProcessor ip, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
insertSlice(n-1, sliceLabel, ip.getPixels());
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void deleteSlice(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
this.dataset.removeSubset(n-1);
this.planeRefs.remove(n-1);
this.planeRefCache = null;
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void deleteLastSlice()
{
int numPlanes = this.planeRefs.size();
if (numPlanes > 0) // TODO - imglib forces this to be true!!! should fail on deleting last plane. address with imglib people.
{
int lastPlane = numPlanes - 1;
this.dataset.removeSubset(lastPlane);
this.planeRefs.remove(lastPlane);
this.planeRefCache = null;
}
}
@Override
public int getWidth()
{
return this.planeDims[0];
}
@Override
public int getHeight()
{
return this.planeDims[1];
}
@Override
public void setRoi(Rectangle roi)
{
this.roi = roi;
}
@Override
public Rectangle getRoi()
{
if (this.roi==null)
return new Rectangle(0, 0, getWidth(), getHeight());
return this.roi;
}
@Override
/** Updates this stack so its attributes, such as min, max,
calibration table and color model, are the same as 'ip'. */
public void update(ImageProcessor ip)
{
if (ip!=null)
{
this.min = ip.getMin();
this.max = ip.getMax();
this.cTable = ip.getCalibrationTable();
this.cm = ip.getColorModel();
}
}
@Override
/** Returns the pixel array for the specified slice, were 1<=n<=nslices. */
public Object getPixels(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
return this.planeRefs.get(n-1);
}
@Override
/** Assigns a pixel array to the specified slice,
were 1<=n<=nslices. */
public void setPixels(Object pixels, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
this.dataset.getSubset(planePos).setData(pixels);
this.planeRefs.set(n-1, pixels);
}
@Override
/** Returns the stack as an array of 1D pixel arrays. Note
that the size of the returned array may be greater than
the number of slices currently in the stack, with
unused elements set to null. */
public Object[] getImageArray()
{
if ((this.planeRefCache == null) || (this.planeRefs.size() > this.planeRefCache.length))
{
this.planeRefCache = new Object[this.planeRefs.size()];
}
return this.planeRefs.toArray(this.planeRefCache);
}
@Override
/** Returns the number of slices in this stack. */
public int getSize()
{
return this.planeRefs.size();
}
@Override
/** Returns the slice labels as an array of Strings. Returns null
if the stack is empty. */
public String[] getSliceLabels()
{
if (this.planeRefs.size() == 0)
return null;
// NOTE - we will return a COPY of the labels. Users should access them readonly.
// TODO - document.
String[] labels = new String[this.planeRefs.size()];
for (int i = 0; i < labels.length; i++)
{
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), i);
labels[i] = this.dataset.getSubset(planePos).getMetaData().getLabel();
}
return labels;
}
@Override
/** Returns the label of the specified slice, were 1<=n<=nslices.
Returns null if the slice does not have a label. For DICOM
and FITS stacks, labels may contain header information. */
public String getSliceLabel(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
return this.dataset.getSubset(planePos).getMetaData().getLabel();
}
@Override
/** Returns a shortened version (up to the first 60 characters or first newline and
suffix removed) of the label of the specified slice.
Returns null if the slice does not have a label. */
public String getShortSliceLabel(int n)
{
String shortLabel = getSliceLabel(n);
if (shortLabel == null)
return null;
int newline = shortLabel.indexOf('\n');
if (newline == 0)
return null;
if (newline > 0)
shortLabel = shortLabel.substring(0, newline);
int len = shortLabel.length();
if ((len>4) &&
(shortLabel.charAt(len-4) == '.') &&
(!Character.isDigit(shortLabel.charAt(len-1))))
shortLabel = shortLabel.substring(0,len-4);
if (shortLabel.length() > 60)
shortLabel = shortLabel.substring(0, 60);
return shortLabel;
}
@Override
/** Sets the label of the specified slice, were 1<=n<=nslices. */
public void setSliceLabel(String label, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
this.dataset.getSubset(planePos).getMetaData().setLabel(label);
}
@Override
/** Returns an ImageProcessor for the specified slice,
where 1<=n<=nslices. Returns null if the stack is empty.
*/
public ImageProcessor getProcessor(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
ImageProcessor ip = processorFactory.makeProcessor(planePos);
// TODO : problem - what if we want one processor per plane and return it over and over. here we are hatching new all the time.
if ((this.min != Double.MAX_VALUE) && (ip!=null))
ip.setMinAndMax(this.min, this.max);
if (this.cTable!=null)
ip.setCalibrationTable(this.cTable);
return ip;
}
@Override
/** Assigns a new color model to this stack. */
public void setColorModel(ColorModel cm)
{
this.cm = cm;
}
@Override
/** Returns this stack's color model. May return null. */
public ColorModel getColorModel()
{
return this.cm;
}
@Override
/** Returns true if this is a 3-slice RGB stack. */
public boolean isRGB()
{
if ((this.planeRefs.size()==3) &&
(this.planeRefs.get(0) instanceof byte[]) &&
("Red".equals(getSliceLabel(1))))
return true;
return false;
}
@Override
/** Returns true if this is a 3-slice HSB stack. */
public boolean isHSB()
{
if ((this.planeRefs.size()==3) &&
("Hue".equals(getSliceLabel(1))))
return true;
return false;
}
@Override
/** Returns true if this is a virtual (disk resident) stack.
This method is overridden by the VirtualStack subclass. */
public boolean isVirtual()
{
return false; // TODO - assuming this means I am not a VirtualStack class. If it means something else then we'll need to query imglib
}
@Override
/** Frees memory by deleting a few slices from the end of the stack. */
public void trim()
{
int n = (int)Math.round(Math.log(this.planeRefs.size())+1.0);
for (int i=0; i<n; i++)
{
deleteLastSlice();
System.gc();
}
}
@Override
public String toString()
{
String v = isVirtual()?"(V)":"";
return ("stack["+getWidth()+"x"+getHeight()+"x"+getSize()+v+"]");
}
@Override
public void flush()
{
for (int i = 0; i < this.planeRefs.size(); i++)
this.planeRefs.set(i, null);
}
}
|
ij1-bridge/src/main/java/imagej/ij1bridge/BridgeStack.java
|
package imagej.ij1bridge;
import java.awt.Rectangle;
import java.awt.image.ColorModel;
import java.util.ArrayList;
import ij.ImageStack;
import ij.process.ImageProcessor;
import imagej.Dimensions;
import imagej.dataset.Dataset;
import imagej.process.Index;
// TODO - for performance could use planeRef's size/cache methods rather than querying dataset all the time
/** BridgeStack will take data from a PlanarDataset and point into it so that ij1 can use it as an ImageStack and make changes to it */
public class BridgeStack extends ImageStack
{
// **** base interface instance variables
private Dataset dataset;
private ArrayList<Object> planeRefs;
private int[] planeDims;
private ProcessorFactory processorFactory;
private Object[] planeRefCache;
// *** compatibility instance variables
private final String outOfRange = "stack index out of range: ";
private double min = Double.MAX_VALUE;
private double max;
private Rectangle roi;
private ColorModel cm;
private float[] cTable;
// ********* constructor ******************************************************
public BridgeStack(Dataset ds, ProcessorFactory procFac)
{
this.dataset = ds;
this.processorFactory = procFac;
this.planeRefs = new ArrayList<Object>();
// TODO - relaxing for the moment since MetaData code not in place. do some kind of check later.
//if (ds.getMetaData().getDirectAccessDimensionCount() != 2)
// throw new IllegalArgumentException("can't make a BridgeStack on a dataset unless it is organized by plane");
int[] dimensions = ds.getDimensions();
int numPlanes = (int) Dimensions.getTotalPlanes(dimensions);
if (numPlanes <= 0)
throw new IllegalArgumentException("can't make a BridgeStack on a dataset that has 0 planes");
this.planeDims = new int[2];
this.planeDims[0] = dimensions[0];
this.planeDims[1] = dimensions[1];
int[] subDimensions = new int[dimensions.length-2];
for (int i = 0; i < subDimensions.length; i++)
subDimensions[i] = dimensions[i+2];
if (subDimensions.length == 0)
{
this.planeRefs.add(this.dataset.getData());
}
else
{
int[] origin = Index.create(dimensions.length-2);
int[] position = Index.create(dimensions.length-2);
while (Index.isValid(position, origin, subDimensions))
{
Object planeRef = ds.getSubset(position).getData();
this.planeRefs.add(planeRef);
Index.increment(position, origin, subDimensions);
}
}
}
// ********* private interface ******************************************************
// NOTE - index in range 0..n-1
private void insertSlice(int index, String sliceLabel, Object pixels)
{
Dataset newSubset = this.dataset.insertNewSubset(index);
newSubset.setData(pixels);
this.planeRefs.add(index, pixels); // update our cache
setSliceLabel(sliceLabel, index+1);
}
// ********* public interface ******************************************************
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, Object pixels)
{
int end = this.planeRefs.size();
insertSlice(end, sliceLabel, pixels);
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, ImageProcessor ip)
{
if ((ip.getWidth() != getWidth()) ||
(ip.getHeight() != getHeight()))
throw new IllegalArgumentException("Dimensions do not match");
if (this.planeRefs.size() == 0) // TODO - note this code will never evaluate to true for imglib datasets as imglib constituted 11-20-10
{
this.cm = ip.getColorModel();
this.min = ip.getMin();
this.max = ip.getMax();
}
addSlice(sliceLabel, ip.getPixels());
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void addSlice(String sliceLabel, ImageProcessor ip, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
insertSlice(n-1, sliceLabel, ip.getPixels());
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void deleteSlice(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
this.dataset.removeSubset(n-1);
this.planeRefs.remove(n-1);
this.planeRefCache = null;
}
// TODO - make processors event listeners for addition/deletion of subsets. fixup planePos if needed (or even have proc go away if possible)
@Override
public void deleteLastSlice()
{
int numPlanes = this.planeRefs.size();
if (numPlanes > 0) // TODO - imglib forces this to be true!!! should fail on deleting last plane. address with imglib people.
{
int lastPlane = numPlanes - 1;
this.dataset.removeSubset(lastPlane);
this.planeRefs.remove(lastPlane);
this.planeRefCache = null;
}
}
@Override
public int getWidth()
{
return this.planeDims[0];
}
@Override
public int getHeight()
{
return this.planeDims[1];
}
@Override
public void setRoi(Rectangle roi)
{
this.roi = roi;
}
@Override
public Rectangle getRoi()
{
if (this.roi==null)
return new Rectangle(0, 0, getWidth(), getHeight());
return this.roi;
}
@Override
/** Updates this stack so its attributes, such as min, max,
calibration table and color model, are the same as 'ip'. */
public void update(ImageProcessor ip)
{
if (ip!=null)
{
this.min = ip.getMin();
this.max = ip.getMax();
this.cTable = ip.getCalibrationTable();
this.cm = ip.getColorModel();
}
}
@Override
/** Returns the pixel array for the specified slice, were 1<=n<=nslices. */
public Object getPixels(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
return this.planeRefs.get(n-1);
}
@Override
/** Assigns a pixel array to the specified slice,
were 1<=n<=nslices. */
public void setPixels(Object pixels, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
this.dataset.getSubset(planePos).setData(pixels);
this.planeRefs.set(n-1, pixels);
}
@Override
/** Returns the stack as an array of 1D pixel arrays. Note
that the size of the returned array may be greater than
the number of slices currently in the stack, with
unused elements set to null. */
public Object[] getImageArray()
{
if ((this.planeRefCache == null) || (this.planeRefs.size() > this.planeRefCache.length))
{
this.planeRefCache = new Object[this.planeRefs.size()];
}
return this.planeRefs.toArray(this.planeRefCache);
}
@Override
/** Returns the number of slices in this stack. */
public int getSize()
{
return this.planeRefs.size();
}
@Override
/** Returns the slice labels as an array of Strings. Returns null
if the stack is empty. */
public String[] getSliceLabels()
{
if (this.planeRefs.size() == 0)
return null;
// NOTE - we will return a COPY of the labels. Users should access them readonly.
// TODO - document.
String[] labels = new String[this.planeRefs.size()];
for (int i = 0; i < labels.length; i++)
{
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), i);
labels[i] = this.dataset.getSubset(planePos).getMetaData().getLabel();
}
return labels;
}
@Override
/** Returns the label of the specified slice, were 1<=n<=nslices.
Returns null if the slice does not have a label. For DICOM
and FITS stacks, labels may contain header information. */
public String getSliceLabel(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
return this.dataset.getSubset(planePos).getMetaData().getLabel();
}
@Override
/** Returns a shortened version (up to the first 60 characters or first newline and
suffix removed) of the label of the specified slice.
Returns null if the slice does not have a label. */
public String getShortSliceLabel(int n)
{
String shortLabel = getSliceLabel(n);
if (shortLabel == null)
return null;
int newline = shortLabel.indexOf('\n');
if (newline == 0)
return null;
if (newline > 0)
shortLabel = shortLabel.substring(0, newline);
int len = shortLabel.length();
if ((len>4) &&
(shortLabel.charAt(len-4) == '.') &&
(!Character.isDigit(shortLabel.charAt(len-1))))
shortLabel = shortLabel.substring(0,len-4);
if (shortLabel.length() > 60)
shortLabel = shortLabel.substring(0, 60);
return shortLabel;
}
@Override
/** Sets the label of the specified slice, were 1<=n<=nslices. */
public void setSliceLabel(String label, int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
this.dataset.getSubset(planePos).getMetaData().setLabel(label);
}
@Override
/** Returns an ImageProcessor for the specified slice,
where 1<=n<=nslices. Returns null if the stack is empty.
*/
public ImageProcessor getProcessor(int n)
{
if (n<1 || n>this.planeRefs.size())
throw new IllegalArgumentException(outOfRange+n);
int[] planePos = Index.getPlanePosition(this.dataset.getDimensions(), n-1);
ImageProcessor ip = processorFactory.makeProcessor(planePos);
// TODO : problem - what if we want one processor per plane and return it over and over. here we are hatching new all the time.
if ((this.min != Double.MAX_VALUE) && (ip!=null))
ip.setMinAndMax(this.min, this.max);
if (this.cTable!=null)
ip.setCalibrationTable(this.cTable);
return ip;
}
@Override
/** Assigns a new color model to this stack. */
public void setColorModel(ColorModel cm)
{
this.cm = cm;
}
@Override
/** Returns this stack's color model. May return null. */
public ColorModel getColorModel()
{
return this.cm;
}
@Override
/** Returns true if this is a 3-slice RGB stack. */
public boolean isRGB()
{
if ((this.planeRefs.size()==3) &&
(this.planeRefs.get(0) instanceof byte[]) &&
("Red".equals(getSliceLabel(1))))
return true;
return false;
}
@Override
/** Returns true if this is a 3-slice HSB stack. */
public boolean isHSB()
{
if ((this.planeRefs.size()==3) &&
("Hue".equals(getSliceLabel(1))))
return true;
return false;
}
@Override
/** Returns true if this is a virtual (disk resident) stack.
This method is overridden by the VirtualStack subclass. */
public boolean isVirtual()
{
return false; // TODO - assuming this means I am not a VirtualStack class. If it means something else then we'll need to query imglib
}
@Override
/** Frees memory by deleting a few slices from the end of the stack. */
public void trim()
{
int n = (int)Math.round(Math.log(this.planeRefs.size())+1.0);
for (int i=0; i<n; i++)
{
deleteLastSlice();
System.gc();
}
}
@Override
public String toString()
{
String v = isVirtual()?"(V)":"";
return ("stack["+getWidth()+"x"+getHeight()+"x"+getSize()+v+"]");
}
}
|
add flush() to all ImageStack implementations
This used to be revision r1920.
|
ij1-bridge/src/main/java/imagej/ij1bridge/BridgeStack.java
|
add flush() to all ImageStack implementations
|
|
Java
|
bsd-2-clause
|
4bbe1542f6019b29e50c3c8805422c75277911b4
| 0
|
oblac/jodd,mosoft521/jodd,oblac/jodd,oblac/jodd,oblac/jodd,mosoft521/jodd,mosoft521/jodd,mosoft521/jodd
|
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.io;
import jodd.util.StringUtil;
import org.junit.jupiter.api.*;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
class FileUtilTest {
protected String dataRoot;
protected String utfdataRoot;
@BeforeEach
void setUp() throws Exception {
if (dataRoot != null) {
return;
}
URL data = FileUtilTest.class.getResource("data");
dataRoot = data.getFile();
data = FileUtilTest.class.getResource("utf");
utfdataRoot = data.getFile();
}
@Test
void testFileManipulation() throws IOException {
FileUtil.copy(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data"));
assertFalse(FileUtil.isNewer(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data")));
assertFalse(FileUtil.isOlder(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data")));
FileUtil.delete(new File(dataRoot, "sb1.data"));
}
@Test
void testString() {
String s = "This is a test file\nIt only has\nthree lines!!";
try {
FileUtil.writeString(new File(dataRoot, "test.txt"), s);
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
String s2 = null;
try {
s2 = FileUtil.readString(dataRoot + "/test.txt");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s, s2);
// test unicode chars (i.e. greater then 255)
char[] buf = s.toCharArray();
buf[0] = 256;
s = new String(buf);
try {
FileUtil.writeString(dataRoot + "/test.txt", s);
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
try {
s2 = FileUtil.readString(dataRoot + "/test.txt");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s.substring(1), s2.substring(1));
assertEquals(s.charAt(0), s2.charAt(0));
try {
FileUtil.delete(dataRoot + "/test.txt");
} catch (IOException ioex) {
fail("FileUtil.delete" + ioex.toString());
}
}
@Test
void testUnicodeString() {
String s = "This is a test file\nIt only has\nthree lines!!";
char[] buf = s.toCharArray();
buf[0] = 256;
s = new String(buf);
try {
FileUtil.writeString(dataRoot + "/test2.txt", s, "UTF-16");
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
String s2 = null;
try {
s2 = FileUtil.readString(dataRoot + "/test2.txt", "UTF-16");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s, s2);
try {
FileUtil.delete(dataRoot + "/test2.txt");
} catch (IOException ioex) {
fail("FileUtil.delete" + ioex.toString());
}
}
@Test
void testFileManipulations() {
String root = dataRoot + "/file/";
String tmp = root + "tmp/";
String tmp2 = root + "xxx/";
String tmp3 = root + "zzz/";
// copy
try {
FileUtil.copyFile(root + "a.txt", root + "w.txt");
FileUtil.copyFile(root + "a.png", root + "w.png");
FileUtil.copyFile(root + "a.txt", root + "w.txt");
} catch (IOException ioex) {
fail(ioex.toString());
}
// mkdirs
try {
FileUtil.mkdir(tmp);
FileUtil.mkdirs(tmp + "x/");
FileUtil.copyFileToDir(root + "a.txt", tmp);
FileUtil.copyFileToDir(root + "a.png", tmp);
} catch (IOException ioex) {
fail(ioex.toString());
}
// don't overwrite
try {
FileUtil.copyFileToDir(root + "a.txt", tmp, FileUtil.params().setOverwrite(false));
fail("copy file don't overwrite");
} catch (IOException e) {
// ignore
}
// move
try {
FileUtil.moveFile(root + "w.txt", tmp + "w.txt");
FileUtil.moveFileToDir(root + "w.png", tmp);
} catch (IOException ioex) {
fail(ioex.toString());
}
try {
FileUtil.moveFileToDir(root + "w.png", tmp, FileUtil.cloneParams().setOverwrite(false));
fail("move file don't overwrite");
} catch (IOException e) {
// ignore
}
// delete
try {
FileUtil.deleteFile(tmp + "a.txt");
FileUtil.deleteFile(tmp + "a.png");
FileUtil.deleteFile(tmp + "w.txt");
FileUtil.deleteFile(tmp + "w.png");
} catch (IOException ioex) {
fail(ioex.toString());
}
try {
FileUtil.deleteFile(tmp + "a.txt");
fail("delete file strict delete");
} catch (IOException e) {
// ignore
}
// movedir
try {
FileUtil.moveDir(tmp, tmp2);
} catch (IOException ioex) {
fail(ioex.toString());
}
// copyDir
try {
FileUtil.copyDir(tmp2, tmp3);
} catch (IOException ioex) {
fail(ioex.toString());
}
// deleteDir
try {
FileUtil.deleteDir(tmp2);
FileUtil.deleteDir(tmp3);
} catch (IOException ioex) {
fail(ioex.toString());
}
}
@Test
void testBytes() {
try {
File file = new File(dataRoot + "/file/a.txt");
byte[] bytes = FileUtil.readBytes(dataRoot + "/file/a.txt");
assertEquals(file.length(), bytes.length);
String content = new String(bytes);
content = StringUtil.remove(content, '\r');
assertEquals("test file\n", content);
} catch (IOException ioex) {
fail(ioex.toString());
}
}
@Test
void testUTFReads() throws IOException {
String content = FileUtil.readUTFString(new File(utfdataRoot, "utf-8.txt"));
content = content.replace("\r\n", "\n");
String content8 = FileUtil.readString(new File(utfdataRoot, "utf-8.txt"), "UTF-8");
content8 = content8.replace("\r\n", "\n");
assertEquals(content, content8);
String content1 = FileUtil.readUTFString(new File(utfdataRoot, "utf-16be.txt"));
content1 = content1.replace("\r\n", "\n");
assertEquals(content, content1);
String content16 = FileUtil.readString(new File(utfdataRoot, "utf-16be.txt"), "UTF-16BE");
content16 = content16.replace("\r\n", "\n");
assertEquals(content, content16);
String content162 = FileUtil.readString(new File(utfdataRoot, "utf-16be.txt"), "UTF-16");
content162 = content162.replace("\r\n", "\n");
assertEquals(content, content162);
String content2 = FileUtil.readUTFString(new File(utfdataRoot, "utf-16le.txt"));
content2 = content2.replace("\r\n", "\n");
assertEquals(content, content2);
String content163 = FileUtil.readString(new File(utfdataRoot, "utf-16le.txt"), "UTF-16LE");
content163 = content163.replace("\r\n", "\n");
assertEquals(content, content163);
}
@ParameterizedTest (name = "{index} : FileUtil#{0}")
@CsvSource(
{
"md5, 529a2cfd3346c7fe17b845b6ec90fcfd",
"sha1, 7687b985b2eeff4a981480cead1787bd3f26929c",
"sha256, b2a3dec0059df342e9b33721957fd54221ab7fb7daa99d9f35af729dc2568e51",
"sha384, 1eb67f4b35ae69bbd815dbceee9584c9a65b82e8a209b0a3ab9e6def0a74cf5915228ce32f6154ba5c9ee6dfc66f6414",
"sha512, 4b53d8ca344fc63dd0a69b2ef4c5275279b4c31a834d5e0501a0ab646d1cc56f15e45a019e3f46597be288924b8b6fba19e4ebad1552f5007d56e7f12c3cb1d2"
}
)
@DisplayName(value = "tests for digest-algorithms")
void testDigestAlgorithms(final String method, final String expected) throws Exception {
Method declaredMethod = FileUtil.class.getMethod(method, File.class);
File file = new File(FileUtilTest.class.getResource("data/file/a.png").toURI());
final String actual = (String) declaredMethod.invoke(null, file);
// asserts
assertEquals(expected, actual.toLowerCase());
}
}
|
jodd-core/src/test/java/jodd/io/FileUtilTest.java
|
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.io;
import jodd.util.StringUtil;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import static org.junit.jupiter.api.Assertions.*;
class FileUtilTest {
protected String dataRoot;
protected String utfdataRoot;
@BeforeEach
void setUp() throws Exception {
if (dataRoot != null) {
return;
}
URL data = FileUtilTest.class.getResource("data");
dataRoot = data.getFile();
data = FileUtilTest.class.getResource("utf");
utfdataRoot = data.getFile();
}
@Test
void testFileManipulation() throws IOException {
FileUtil.copy(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data"));
assertFalse(FileUtil.isNewer(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data")));
assertFalse(FileUtil.isOlder(new File(dataRoot, "sb.data"), new File(dataRoot, "sb1.data")));
FileUtil.delete(new File(dataRoot, "sb1.data"));
}
@Test
void testString() {
String s = "This is a test file\nIt only has\nthree lines!!";
try {
FileUtil.writeString(new File(dataRoot, "test.txt"), s);
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
String s2 = null;
try {
s2 = FileUtil.readString(dataRoot + "/test.txt");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s, s2);
// test unicode chars (i.e. greater then 255)
char[] buf = s.toCharArray();
buf[0] = 256;
s = new String(buf);
try {
FileUtil.writeString(dataRoot + "/test.txt", s);
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
try {
s2 = FileUtil.readString(dataRoot + "/test.txt");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s.substring(1), s2.substring(1));
assertEquals(s.charAt(0), s2.charAt(0));
try {
FileUtil.delete(dataRoot + "/test.txt");
} catch (IOException ioex) {
fail("FileUtil.delete" + ioex.toString());
}
}
@Test
void testUnicodeString() {
String s = "This is a test file\nIt only has\nthree lines!!";
char[] buf = s.toCharArray();
buf[0] = 256;
s = new String(buf);
try {
FileUtil.writeString(dataRoot + "/test2.txt", s, "UTF-16");
} catch (Exception ex) {
fail("FileUtil.writeString " + ex.toString());
}
String s2 = null;
try {
s2 = FileUtil.readString(dataRoot + "/test2.txt", "UTF-16");
} catch (Exception ex) {
fail("FileUtil.readString " + ex.toString());
}
assertEquals(s, s2);
try {
FileUtil.delete(dataRoot + "/test2.txt");
} catch (IOException ioex) {
fail("FileUtil.delete" + ioex.toString());
}
}
@Test
void testFileManipulations() {
String root = dataRoot + "/file/";
String tmp = root + "tmp/";
String tmp2 = root + "xxx/";
String tmp3 = root + "zzz/";
// copy
try {
FileUtil.copyFile(root + "a.txt", root + "w.txt");
FileUtil.copyFile(root + "a.png", root + "w.png");
FileUtil.copyFile(root + "a.txt", root + "w.txt");
} catch (IOException ioex) {
fail(ioex.toString());
}
// mkdirs
try {
FileUtil.mkdir(tmp);
FileUtil.mkdirs(tmp + "x/");
FileUtil.copyFileToDir(root + "a.txt", tmp);
FileUtil.copyFileToDir(root + "a.png", tmp);
} catch (IOException ioex) {
fail(ioex.toString());
}
// don't overwrite
try {
FileUtil.copyFileToDir(root + "a.txt", tmp, FileUtil.params().setOverwrite(false));
fail("copy file don't overwrite");
} catch (IOException e) {
// ignore
}
// move
try {
FileUtil.moveFile(root + "w.txt", tmp + "w.txt");
FileUtil.moveFileToDir(root + "w.png", tmp);
} catch (IOException ioex) {
fail(ioex.toString());
}
try {
FileUtil.moveFileToDir(root + "w.png", tmp, FileUtil.cloneParams().setOverwrite(false));
fail("move file don't overwrite");
} catch (IOException e) {
// ignore
}
// delete
try {
FileUtil.deleteFile(tmp + "a.txt");
FileUtil.deleteFile(tmp + "a.png");
FileUtil.deleteFile(tmp + "w.txt");
FileUtil.deleteFile(tmp + "w.png");
} catch (IOException ioex) {
fail(ioex.toString());
}
try {
FileUtil.deleteFile(tmp + "a.txt");
fail("delete file strict delete");
} catch (IOException e) {
// ignore
}
// movedir
try {
FileUtil.moveDir(tmp, tmp2);
} catch (IOException ioex) {
fail(ioex.toString());
}
// copyDir
try {
FileUtil.copyDir(tmp2, tmp3);
} catch (IOException ioex) {
fail(ioex.toString());
}
// deleteDir
try {
FileUtil.deleteDir(tmp2);
FileUtil.deleteDir(tmp3);
} catch (IOException ioex) {
fail(ioex.toString());
}
}
@Test
void testBytes() {
try {
File file = new File(dataRoot + "/file/a.txt");
byte[] bytes = FileUtil.readBytes(dataRoot + "/file/a.txt");
assertEquals(file.length(), bytes.length);
String content = new String(bytes);
content = StringUtil.remove(content, '\r');
assertEquals("test file\n", content);
} catch (IOException ioex) {
fail(ioex.toString());
}
}
@Test
void testUTFReads() throws IOException {
String content = FileUtil.readUTFString(new File(utfdataRoot, "utf-8.txt"));
content = content.replace("\r\n", "\n");
String content8 = FileUtil.readString(new File(utfdataRoot, "utf-8.txt"), "UTF-8");
content8 = content8.replace("\r\n", "\n");
assertEquals(content, content8);
String content1 = FileUtil.readUTFString(new File(utfdataRoot, "utf-16be.txt"));
content1 = content1.replace("\r\n", "\n");
assertEquals(content, content1);
String content16 = FileUtil.readString(new File(utfdataRoot, "utf-16be.txt"), "UTF-16BE");
content16 = content16.replace("\r\n", "\n");
assertEquals(content, content16);
String content162 = FileUtil.readString(new File(utfdataRoot, "utf-16be.txt"), "UTF-16");
content162 = content162.replace("\r\n", "\n");
assertEquals(content, content162);
String content2 = FileUtil.readUTFString(new File(utfdataRoot, "utf-16le.txt"));
content2 = content2.replace("\r\n", "\n");
assertEquals(content, content2);
String content163 = FileUtil.readString(new File(utfdataRoot, "utf-16le.txt"), "UTF-16LE");
content163 = content163.replace("\r\n", "\n");
assertEquals(content, content163);
}
@Nested
@DisplayName("tests for digest-algorithms")
class Digest {
private File file;
@BeforeEach
void beforeEach() throws Exception {
file = new File(FileUtilTest.class.getResource("data/file/a.png").toURI());
}
@Test
void testMd5() throws IOException {
final String expected = "529a2cfd3346c7fe17b845b6ec90fcfd".toUpperCase();
final String actual = FileUtil.md5(file);
// asserts
assertEquals(expected, actual);
}
@Test
void testSha1() throws IOException {
final String expected = "7687b985b2eeff4a981480cead1787bd3f26929c".toUpperCase();
final String actual = FileUtil.sha1(file);
// asserts
assertEquals(expected, actual);
}
@Test
void testSha256() throws IOException {
final String expected = "b2a3dec0059df342e9b33721957fd54221ab7fb7daa99d9f35af729dc2568e51".toUpperCase();
final String actual = FileUtil.sha256(file);
// asserts
assertEquals(expected, actual);
}
@Test
void testSha384() throws IOException {
final String expected = "1eb67f4b35ae69bbd815dbceee9584c9a65b82e8a209b0a3ab9e6def0a74cf5915228ce32f6154ba5c9ee6dfc66f6414".toUpperCase();
final String actual = FileUtil.sha384(file);
// asserts
assertEquals(expected, actual);
}
@Test
void testSha512() throws IOException {
final String expected = "4b53d8ca344fc63dd0a69b2ef4c5275279b4c31a834d5e0501a0ab646d1cc56f15e45a019e3f46597be288924b8b6fba19e4ebad1552f5007d56e7f12c3cb1d2".toUpperCase();
final String actual = FileUtil.sha512(file);
// asserts
assertEquals(expected, actual);
}
}
}
|
update tests for digest-algorithms
What test style do you prefer for testing digest algos in FileUtil?
- nested class with several methods for each digest algo
- one parameterized method to test all digest algos
|
jodd-core/src/test/java/jodd/io/FileUtilTest.java
|
update tests for digest-algorithms
|
|
Java
|
bsd-3-clause
|
189bbe60dff78b0153b5347221af5ae52c463f1d
| 0
|
Moliholy/cvmfs-java
|
package com.molina.cvmfs.history;
import com.molina.cvmfs.common.DatabaseObject;
import java.io.File;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Wrapper around CernVM-FS 2.1.x repository history databases
*
* @author Jose Molina Colmenero
*/
public class History extends DatabaseObject {
private String schema;
private String fqrn;
public History(File databaseFile) throws IllegalStateException, SQLException {
super(databaseFile);
readProperties();
}
public static History open(String historyPath) throws SQLException {
return new History(new File(historyPath));
}
public String getSchema() {
return schema;
}
public String getFqrn() {
return fqrn;
}
private void readProperties() throws SQLException {
Map<String, Object> properties = readPropertiesTable();
assert (properties.containsKey("schema") &&
properties.get("schema").equals("1.0"));
if (properties.containsKey("fqrn"))
fqrn = (String) properties.get("fqrn");
schema = (String) properties.get("schema");
}
private RevisionTag getTagByQuery(String query) throws SQLException {
Statement statement = createStatement();
ResultSet result = statement.executeQuery(query);
if (result != null && result.next()) {
RevisionTag rt = new RevisionTag(result);
statement.close();
result.close();
return rt;
}
return null;
}
public List<RevisionTag> listTags() throws SQLException {
Statement statement = createStatement();
ResultSet results = statement.executeQuery(RevisionTag.sqlQueryAll());
List<RevisionTag> tags = new ArrayList<RevisionTag>();
while (results.next()) {
tags.add(new RevisionTag(results));
}
results.getStatement().close();
results.close();
return tags;
}
public RevisionTag getTagByName(String name) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryName(name));
}
public RevisionTag getTagByRevision(int revision) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryRevision(revision));
}
public RevisionTag getTagByDate(long timestamp) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryDate(timestamp));
}
}
|
src/main/java/com/molina/cvmfs/history/History.java
|
package com.molina.cvmfs.history;
import com.molina.cvmfs.common.DatabaseObject;
import java.io.File;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Wrapper around CernVM-FS 2.1.x repository history databases
*
* @author Jose Molina Colmenero
*/
public class History extends DatabaseObject {
private String schema;
private String fqrn;
public History(File databaseFile) throws IllegalStateException, SQLException {
super(databaseFile);
readProperties();
}
public static History open(String historyPath) throws SQLException {
return new History(new File(historyPath));
}
public String getSchema() {
return schema;
}
public String getFqrn() {
return fqrn;
}
private void readProperties() throws SQLException {
Map<String, Object> properties = readPropertiesTable();
assert (properties.containsKey("schema") &&
properties.get("schema").equals("1.0"));
if (properties.containsKey("fqrn"))
fqrn = (String) properties.get("fqrn");
schema = (String) properties.get("schema");
}
private RevisionTag getTagByQuery(String query) throws SQLException {
ResultSet result = runSQL(query);
if (result != null && result.next()) {
RevisionTag rt = new RevisionTag(result);
result.getStatement().close();
result.close();
return rt;
}
return null;
}
public List<RevisionTag> listTags() throws SQLException {
ResultSet results = runSQL(RevisionTag.sqlQueryAll());
List<RevisionTag> tags = new ArrayList<RevisionTag>();
while (results.next()) {
tags.add(new RevisionTag(results));
}
results.getStatement().close();
results.close();
return tags;
}
public RevisionTag getTagByName(String name) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryName(name));
}
public RevisionTag getTagByRevision(int revision) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryRevision(revision));
}
public RevisionTag getTagByDate(long timestamp) throws SQLException {
return getTagByQuery(RevisionTag.sqlQueryDate(timestamp));
}
}
|
Fix new statements
|
src/main/java/com/molina/cvmfs/history/History.java
|
Fix new statements
|
|
Java
|
bsd-3-clause
|
d1ba46a7a75be29d8f105d03dd24108235e103d2
| 0
|
GabrielDancause/jbooktrader,mkoistinen/JBookTrader,mkoistinen/JBookTrader,GabrielDancause/jbooktrader,mkoistinen/JBookTrader,GabrielDancause/jbooktrader
|
package com.jbooktrader.platform.web;
import com.jbooktrader.platform.model.*;
import com.jbooktrader.platform.performance.*;
import com.jbooktrader.platform.position.*;
import com.jbooktrader.platform.startup.*;
import com.jbooktrader.platform.strategy.*;
import com.jbooktrader.platform.util.*;
import com.sun.net.httpserver.*;
import java.io.*;
import java.net.*;
import java.text.*;
public class WebHandler implements HttpHandler {
private static final String WEBROOT = "resources/web";
public void handle(HttpExchange httpExchange) throws IOException {
String requestURI = httpExchange.getRequestURI().toString().trim();
String userAgent = httpExchange.getRequestHeaders().getFirst("User-Agent");
Boolean iPhone = userAgent.contains("iPhone");
StringBuilder sb = new StringBuilder();
// The page...
if (requestURI.equalsIgnoreCase("/") || requestURI.equalsIgnoreCase("/index.html")) {
// We'll respond to any unknown request with the main page
sb.append("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">\n");
sb.append("<html>\n");
sb.append("<head>\n");
sb.append("<title>JBookTrader Web Console</title>\n");
if (iPhone) {
sb.append("<link rel=\"apple-touch-icon\" href=\"apple-touch-icon.png\" />\n");
sb.append("<meta name=\"viewport\" content=\"width=320; initial-scale=1.0; maximum-scale=1.0; user-scalable=0;\" />\n");
sb.append("<link media=\"screen\" rel=\"stylesheet\" type=\"text/css\" href=\"iPhone.css\" />\n");
sb.append("<script type=\"application/x-javascript\" src=\"iPhone.js\"></script> \n");
}
else {
sb.append("<link media=\"screen\" rel=\"stylesheet\" type=\"text/css\" href=\"stylesheet.css\" />\n");
}
String modeString = "";
if (Dispatcher.getMode().toString() == "Trade") { modeString = "Trading"; }
else if (Dispatcher.getMode().toString() == "Optimization") { modeString = "Optimizing"; }
else modeString = Dispatcher.getMode() + "ing";
sb.append("</head>\n");
sb.append("<body>\n");
sb.append("<h1>\n");
sb.append(JBookTrader.APP_NAME).append(" : ").append(modeString);
sb.append("</h1>\n");
sb.append("<table>");
sb.append("<tr><th>Strategy</th><th>Position</th><th>Trades</th><th>Max DD</th><th>Net Profit</th></tr>");
DecimalFormat df = NumberFormatterFactory.getNumberFormatter(0);
double totalPNL = 0.0;
for (Strategy strategy : Dispatcher.getTrader().getAssistant().getAllStrategies()) {
PositionManager positionManager = strategy.getPositionManager();
PerformanceManager performanceManager = strategy.getPerformanceManager();
totalPNL += performanceManager.getNetProfit();
sb.append("<tr>\n");
sb.append("<td>").append(strategy.getName()).append("</td>");
sb.append("<td align=\"right\">").append(positionManager.getPosition()).append("</td>");
sb.append("<td align=\"right\">").append(performanceManager.getTrades()).append("</td>");
sb.append("<td align=\"right\">").append(df.format(performanceManager.getMaxDrawdown())).append("</td>");
sb.append("<td align=\"right\">").append(df.format(performanceManager.getNetProfit())).append("</td>\n");
sb.append("</tr>\n");
}
sb.append("<tr><td class=\"summary\" colspan=\"4\">Summary</td>");
sb.append("<td class=\"summary\" style=\"text-align: right\">").append(df.format(totalPNL)).append("</td>\n");
sb.append("</table>\n");
sb.append("<p class=\"version\">JBookTrader version ").append(JBookTrader.VERSION).append("</p>\n");
sb.append("</body>\n");
sb.append("</html>\n");
}
// This handles static files...
else if (
requestURI.toLowerCase().contains(".png") ||
requestURI.toLowerCase().contains(".jpg") ||
requestURI.toLowerCase().contains(".gif") ||
requestURI.toLowerCase().contains(".ico") ||
requestURI.toLowerCase().contains(".css") ||
requestURI.toLowerCase().contains(".js")) {
try {
handleFile(httpExchange, requestURI);
}
catch (Exception e) {
e.printStackTrace();
}
return;
}
// Huh?
else {
sb.append("File not found");
String response = sb.toString();
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_NOT_FOUND, response.length());
OutputStream os = httpExchange.getResponseBody();
os.write(response.getBytes());
os.close();
return;
}
String response = sb.toString();
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.length());
OutputStream os = httpExchange.getResponseBody();
os.write(response.getBytes());
os.close();
}
/**
* Handles HTTP requests for files (images, css, js, etc.)
* The files must reside in resources/web/
*
* @param httpExchange
* @param requestURI
* @throws IOException
*/
private void handleFile(HttpExchange httpExchange, String requestURI) throws IOException {
StringBuilder resource = new StringBuilder(WEBROOT).append(requestURI);
if (requestURI.toLowerCase().contains(".png")) {
httpExchange.getResponseHeaders().set("Content-Type", "image/png;charset=utf-8");
}
else if (requestURI.toLowerCase().contains(".ico")) {
httpExchange.getResponseHeaders().set("Content-Type", "image/x-ico;charset=utf-8");
}
else if (requestURI.toLowerCase().contains(".jpg")) {
httpExchange.getResponseHeaders().set("Content-Type", "image/jpeg;charset=utf-8");
}
else if (requestURI.toLowerCase().contains(".gif")) {
httpExchange.getResponseHeaders().set("Content-Type", "image/gif;charset=utf-8");
}
else if (requestURI.toLowerCase().contains(".css")) {
httpExchange.getResponseHeaders().set("Content-Type", "text/css;charset=utf-8");
}
else if (requestURI.toLowerCase().contains(".js")) {
httpExchange.getResponseHeaders().set("Content-Type", "text/javascript;charset=utf-8");
}
else {
httpExchange.getResponseHeaders().set("Content-Type", "application/octet-stream;charset=utf-8");
}
long fileLength = 0;
try {
File temp = new File(resource.toString());
fileLength = temp.length();
}
catch(Exception e) {
System.out.println(e);
}
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, fileLength);
OutputStream responseBody = httpExchange.getResponseBody();
try {
FileInputStream file = new FileInputStream(resource.toString());
BufferedInputStream bis = new BufferedInputStream(file);
byte buffer[] = new byte[8192];
int bytesRead;
while ((bytesRead = bis.read(buffer)) != -1)
responseBody.write(buffer, 0, bytesRead);
bis.close();
}
catch (Exception e) {
System.out.println(e);
}
finally {
responseBody.flush();
responseBody.close();
}
return;
}
}
|
source/com/jbooktrader/platform/web/WebHandler.java
|
package com.jbooktrader.platform.web;
import com.jbooktrader.platform.model.*;
import com.jbooktrader.platform.performance.*;
import com.jbooktrader.platform.position.*;
import com.jbooktrader.platform.startup.*;
import com.jbooktrader.platform.strategy.*;
import com.jbooktrader.platform.util.*;
import com.sun.net.httpserver.*;
import java.io.*;
import java.net.*;
import java.text.*;
public class WebHandler implements HttpHandler {
public void handle(HttpExchange httpExchange) throws IOException {
StringBuilder sb = new StringBuilder();
sb.append("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">");
sb.append("<html>");
sb.append("<head>");
sb.append("<title>JBookTrader Web Console</title>");
sb.append("<style type=\"text/css\">h3 { text-align: center; }</style>");
sb.append("</head>");
sb.append("<body>");
sb.append("<h3>");
sb.append(JBookTrader.APP_NAME).append(", version ").append(JBookTrader.VERSION);
sb.append(", ").append(Dispatcher.getMode()).append(" mode");
sb.append("</h3>");
sb.append("<table bgcolor=\"#FFFFEE\" cellspacing=\"0\" border=\"1\" width=\"100%\">");
sb.append("<tr bgcolor=\"#FFCC33\"><th>Strategy<th>Position<th>Trades<th>Max DD<th>Net Profit</tr>");
DecimalFormat df = NumberFormatterFactory.getNumberFormatter(0);
for (Strategy strategy : Dispatcher.getTrader().getAssistant().getAllStrategies()) {
PositionManager positionManager = strategy.getPositionManager();
PerformanceManager performanceManager = strategy.getPerformanceManager();
sb.append("<tr>");
sb.append("<td>").append(strategy.getName()).append("</td>");
sb.append("<td align=\"right\">").append(positionManager.getPosition()).append("</td>");
sb.append("<td align=\"right\">").append(performanceManager.getTrades()).append("</td>");
sb.append("<td align=\"right\">").append(df.format(performanceManager.getMaxDrawdown())).append("</td>");
sb.append("<td align=\"right\">").append(df.format(performanceManager.getNetProfit())).append("</td>");
sb.append("</tr>");
}
sb.append("</table>");
sb.append("</body>");
sb.append("</html>");
String response = sb.toString();
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.length());
OutputStream os = httpExchange.getResponseBody();
os.write(response.getBytes());
os.close();
}
}
|
Significantly enhanced the capabilities of the Web Access output and added native look and feel support for the iPhone/iPod Touch as a client.
|
source/com/jbooktrader/platform/web/WebHandler.java
|
Significantly enhanced the capabilities of the Web Access output and added native look and feel support for the iPhone/iPod Touch as a client.
|
|
Java
|
bsd-3-clause
|
a2d80db13e318e68bc5c1b93119e1df7807712a0
| 0
|
vietnguyen/dhis2-core,msf-oca-his/dhis2-core,vietnguyen/dhis2-core,hispindia/dhis2-Core,vietnguyen/dhis2-core,hispindia/dhis2-Core,msf-oca-his/dhis-core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,hispindia/dhis2-Core,dhis2/dhis2-core,dhis2/dhis2-core,vietnguyen/dhis2-core,msf-oca-his/dhis-core,msf-oca-his/dhis-core,dhis2/dhis2-core,msf-oca-his/dhis-core,vietnguyen/dhis2-core,hispindia/dhis2-Core,msf-oca-his/dhis2-core,hispindia/dhis2-Core,msf-oca-his/dhis2-core,dhis2/dhis2-core,dhis2/dhis2-core,msf-oca-his/dhis-core
|
package org.hisp.dhis.startup;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.system.startup.AbstractStartupRoutine;
import org.hisp.quick.StatementManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Lars Helge Overland
*/
public class InitTableAlteror
extends AbstractStartupRoutine
{
private static final Log log = LogFactory.getLog( InitTableAlteror.class );
@Autowired
private StatementManager statementManager;
@Autowired
private StatementBuilder statementBuilder;
// -------------------------------------------------------------------------
// Execute
// -------------------------------------------------------------------------
@Override
@Transactional
public void execute()
{
executeSql( "update dataelement set domaintype='AGGREGATE' where domaintype='aggregate' or domaintype is null;" );
executeSql( "update dataelement set domaintype='TRACKER' where domaintype='patient';" );
executeSql( "update users set invitation = false where invitation is null" );
executeSql( "update users set selfregistered = false where selfregistered is null" );
executeSql( "update users set externalauth = false where externalauth is null" );
executeSql( "update users set disabled = false where disabled is null" );
executeSql( "alter table dataelement alter column domaintype set not null;" );
executeSql( "alter table programstageinstance alter column status type varchar(25);" );
executeSql( "UPDATE programstageinstance SET status='ACTIVE' WHERE status='0';" );
executeSql( "UPDATE programstageinstance SET status='COMPLETED' WHERE status='1';" );
executeSql( "UPDATE programstageinstance SET status='SKIPPED' WHERE status='5';" );
executeSql( "ALTER TABLE program DROP COLUMN displayonallorgunit" );
upgradeProgramStageDataElements();
updateValueTypes();
updateAggregationTypes();
updateFeatureTypes();
updateValidationRuleEnums();
updateProgramStatus();
removeDeprecatedConfigurationColumns();
updateTimestamps();
updateCompletedBy();
updateRelativePeriods();
executeSql( "ALTER TABLE program ALTER COLUMN \"type\" TYPE varchar(255);" );
executeSql( "update program set \"type\"='WITH_REGISTRATION' where type='1' or type='2'" );
executeSql( "update program set \"type\"='WITHOUT_REGISTRATION' where type='3'" );
// Update userkeyjsonvalue and keyjsonvalue to set new encrypted column to false.
executeSql( "UPDATE keyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
executeSql( "UPDATE userkeyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
// Set messages "ticket" properties to non-null values
executeSql( "UPDATE message SET internal = FALSE WHERE internal IS NULL" );
executeSql( "UPDATE messageconversation SET priority = 'NONE' WHERE priority IS NULL" );
executeSql( "UPDATE messageconversation SET status = 'NONE' WHERE status IS NULL" );
updateMessageConversationMessageCount();
// Set OrganisationUnitGroupSet includeSubhierarchyInAnalytics to false where IS NULL
executeSql( "UPDATE orgunitgroupset SET includesubhierarchyinanalytics = FALSE WHERE includesubhierarchyinanalytics IS NULL" );
// Update programstageinstance set deleted = false where deleted = null
executeSql( "UPDATE programstageinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table programstageinstance alter column deleted set not null" );
executeSql( "create index in_programstageinstance_deleted on programstageinstace(deleted)" );
// Update trackedentityinstance set deleted = false where deleted = null
executeSql( "UPDATE trackedentityinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table trackedentityinstance alter column deleted set not null" );
executeSql( "create index in_trackedentityinstance_deleted on trackedentityinstance(deleted)" );
// Update programinstance set deleted = false where deleted = null
executeSql( "UPDATE programinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table programinstance alter column deleted set not null" );
executeSql( "create index in_programinstance_deleted on programinstance(deleted)" );
// Remove DataSet start and end date - replaced by DataInputPeriods
executeSql( "ALTER TABLE dataset drop column startdate" );
executeSql( "ALTER TABLE dataset drop column enddate" );
updateLegendSetAssociationAndDeleteOldAssociation();
// Message Conversation Message Type
updateMessageConversationMessageTypes();
executeSql( "UPDATE expression SET slidingWindow = FALSE WHERE slidingWindow IS NULL" );
executeSql( "UPDATE validationResult set notificationsent = false WHERE notificationsent is null" );
executeSql( "UPDATE trackedentityinstance SET featuretype = 'NONE' WHERE featuretype IS NULL " );
updateTrackedEntityAttributePatternAndTextPattern();
// 2FA fixes for 2.30
executeSql( "UPDATE users set twofa = false where twofa is null" );
executeSql( "ALTER TABLE users alter column twofa set not null" );
// Update trackedentityattribute set skipsynchronization = false where skipsynchronization = null
executeSql( "UPDATE trackedentityattribute SET skipsynchronization = false WHERE skipsynchronization IS NULL" );
executeSql( "ALTER TABLE trackedentityattribute ALTER COLUMN skipsynchronization SET NOT NULL" );
// alter/update lastsynchronized column in trackedentityinstance to: NOT NULL, DEFAULT to_timestamp(0)
executeSql( "UPDATE trackedentityinstance SET lastsynchronized = to_timestamp(0) WHERE lastsynchronized IS NULL;" ); //Do not remove this line if some cleanup will ever happen
executeSql( "ALTER TABLE trackedentityinstance ALTER COLUMN lastsynchronized SET NOT NULL" );
executeSql( "ALTER TABLE trackedentityinstance ALTER COLUMN lastsynchronized SET DEFAULT to_timestamp(0)" );
// alter/update lastsynchronized column in programstageinstance to: NOT NULL, DEFAULT to_timestamp(0)
executeSql( "UPDATE programstageinstance SET lastsynchronized = to_timestamp(0) WHERE lastsynchronized IS NULL" ); //Do not remove this line if some cleanup will ever happen
executeSql( "ALTER TABLE programstageinstance ALTER COLUMN lastsynchronized SET NOT NULL" );
executeSql( "ALTER TABLE programstageinstance ALTER COLUMN lastsynchronized SET DEFAULT to_timestamp(0)" );
// Update trackedentityattribute set skipsynchronization = false where skipsynchronization = null
executeSql( "UPDATE programstagedataelement SET skipsynchronization = false WHERE skipsynchronization IS NULL" );
executeSql( "ALTER TABLE programstagedataelement ALTER COLUMN skipsynchronization SET NOT NULL" );
executeSql( "UPDATE programstage SET featuretype = 'POINT' WHERE capturecoordinates = true AND featuretype IS NULL" );
executeSql( "UPDATE programstage SET featuretype = 'NONE' WHERE capturecoordinates = false AND featuretype IS NULL" );
updateAndRemoveOldProgramStageInstanceCoordinates();
//Remove createddate column from trackedentitycomment table
executeSql( "UPDATE trackedentitycomment SET created = createddate WHERE created IS NOT NULL;" );
executeSql( "ALTER TABLE trackedentitycomment DROP COLUMN createddate;" );
addGenerateUidFunction();
}
private void addGenerateUidFunction()
{
executeSql(
"create or replace function generate_uid()\n" +
" returns text as\n" +
"$$\n" +
"declare\n" +
" chars text [] := '{0,1,2,3,4,5,6,7,8,9,a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z}';\n" +
" result text := chars [11 + random() * (array_length(chars, 1) - 11)];\n" +
"begin\n" +
" for i in 1..10 loop\n" +
" result := result || chars [1 + random() * (array_length(chars, 1) - 1)];\n" +
" end loop;\n" +
" return result;\n" +
"end;\n" +
"$$\n" +
"language plpgsql;"
);
}
private void updateAndRemoveOldProgramStageInstanceCoordinates()
{
executeSql( "UPDATE programstageinstance " +
"SET geometry = ST_GeomFromText('POINT(' || longitude || ' ' || latitude || ')', 4326) " +
"WHERE longitude IS NOT NULL " +
"AND latitude IS NOT NULL" +
"AND geometry IS NULL" );
executeSql( "ALTER TABLE programstageinstance DROP COLUMN latitude " );
executeSql( "ALTER TABLE programstageinstance DROP COLUMN longitude " );
}
private void updateTrackedEntityAttributePatternAndTextPattern()
{
// Create textpattern jsonb
executeSql( "UPDATE trackedentityattribute SET textpattern = concat('{\"ownerUid\": \"', uid, '\",\"segments\": [{\"parameter\": \"', pattern, '\",\"method\": \"RANDOM\"}],\"ownerObject\": \"TRACKEDENTITYATTRIBUTE\"}')::jsonb WHERE pattern SIMILAR TO '#+' AND generated = true AND textpattern IS NULL" );
// Update pattern to match new syntax
executeSql( "UPDATE trackedentityattribute SET pattern = concat('RANDOM(', pattern, ')') WHERE pattern SIMILAR TO '#+' AND generated = true AND textpattern IS NOT NULL" );
// Move all reserved values into the new table
executeSql( "INSERT INTO reservedvalue(owneruid, key, value, expires, ownerobject, reservedvalueid) " +
"SELECT TEA.uid, TEA.pattern, TEARV.value, TEARV.expirydate, 'TRACKEDENTITYATTRIBUTE', nextval('hibernate_sequence') " +
"FROM trackedentityattributereservedvalue TEARV, trackedentityattribute TEA " +
"WHERE TEARV.trackedentityattributeid = TEA.trackedentityattributeid " +
"AND TEARV.expirydate > NOW() " +
"AND TEARV.trackedentityinstanceid IS NULL" );
// Drop the old table
executeSql( "DROP TABLE trackedentityattributereservedvalue" );
}
private void updateMessageConversationMessageTypes()
{
// Tickets has status != NONE
executeSql( "UPDATE messageconversation SET messagetype = 'TICKET' WHERE messagetype IS NULL AND status != 'NONE'" );
// Validation results existing ValidationResults always start with "Alerts as of%"
executeSql( "UPDATE messageconversation SET messagetype = 'VALIDATION_RESULT' WHERE messagetype IS NULL AND ( subject LIKE 'Alerts as of%' OR subject LIKE 'DHIS alerts as of%' )" );
// System Always have no user "owner"
executeSql( "UPDATE messageconversation SET messagetype = 'SYSTEM' WHERE messagetype IS NULL AND userid IS NULL" );
// Direct messages is what is left
executeSql( "UPDATE messageconversation SET messagetype = 'PRIVATE' WHERE messagetype IS NULL" );
executeSql( "ALTER TABLE messageconversation ALTER COLUMN messagetype set not null" );
}
private void updateLegendSetAssociationAndDeleteOldAssociation()
{
// Transfer all existing references from dataelement to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO dataelementlegendsets (dataelementid, sort_order, legendsetid) SELECT dataelementid, 0, legendsetid FROM dataelement WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE dataelement DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO datasetlegendsets (datasetid, sort_order, legendsetid) SELECT datasetid, 0, legendsetid FROM dataset WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE dataset DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO indicatorlegendsets (indicatorid, sort_order, legendsetid) SELECT indicatorid, 0, legendsetid FROM indicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE indicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO programindicatorlegendsets (programindicatorid, sort_order, legendsetid) SELECT programindicatorid, 0, legendsetid FROM programindicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE programindicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO programindicatorlegendsets (programindicatorid, sort_order, legendsetid) SELECT programindicatorid, 0, legendsetid FROM programindicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE programindicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO trackedentityattributelegendsets (trackedentityattributeid, sort_order, legendsetid) SELECT trackedentityattributeid, 0, legendsetid FROM trackedentityattribute WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE trackedentityattribute DROP COLUMN legendsetid " );
}
private void updateMessageConversationMessageCount()
{
Integer nullCounts = statementManager.getHolder().queryForInteger( "SELECT count(*) from messageconversation WHERE messagecount IS NULL" );
if ( nullCounts > 0 )
{
executeSql( "update messageconversation MC SET messagecount = (SELECT count(MCM.messageconversationid) FROM messageconversation_messages MCM WHERE messageconversationid=MC.messageconversationid)" );
}
}
private void updateCompletedBy()
{
executeSql( "update programinstance set completedby=completeduser where completedby is null" );
executeSql( "update programstageinstance set completedby=completeduser where completedby is null" );
executeSql( "alter table programinstance drop column completeduser" );
executeSql( "alter table programstageinstance drop column completeduser" );
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private void removeDeprecatedConfigurationColumns()
{
try
{
executeSql( "ALTER TABLE configuration DROP COLUMN smptpassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN smtppassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverurl" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverusername" );
executeSql( "ALTER TABLE configuration DROP COLUMN remotepassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverpassword" );
}
catch ( Exception ex )
{
log.debug( ex );
}
}
private void updateTimestamps()
{
executeSql( "update datavalueaudit set created=timestamp where created is null" );
executeSql( "update datavalueaudit set created=now() where created is null" );
executeSql( "alter table datavalueaudit drop column timestamp" );
executeSql( "update trackedentitydatavalue set created=timestamp where created is null" );
executeSql( "update trackedentitydatavalue set lastupdated=timestamp where lastupdated is null" );
executeSql( "update trackedentityattributevalue set created=now() where created is null" );
executeSql( "update trackedentityattributevalue set lastupdated=now() where lastupdated is null" );
executeSql( "alter table trackedentitydatavalue drop column timestamp" );
}
private void updateProgramStatus()
{
executeSql( "alter table programinstance alter column status type varchar(50)" );
executeSql( "update programinstance set status='ACTIVE' where status='0'" );
executeSql( "update programinstance set status='COMPLETED' where status='1'" );
executeSql( "update programinstance set status='CANCELLED' where status='2'" );
executeSql( "update programinstance set status='ACTIVE' where status is null" );
}
private void updateValidationRuleEnums()
{
executeSql( "alter table validationrule alter column ruletype type varchar(50)" );
executeSql( "alter table validationrule alter column importance type varchar(50)" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='validation'" );
executeSql( "update validationrule set ruletype='SURVEILLANCE' where ruletype='surveillance'" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='' or ruletype is null" );
executeSql( "update validationrule set importance='HIGH' where importance='high'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='medium'" );
executeSql( "update validationrule set importance='LOW' where importance='low'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='' or importance is null" );
}
private void updateFeatureTypes()
{
executeSql( "update organisationunit set featuretype='NONE' where featuretype='None'" );
executeSql( "update organisationunit set featuretype='MULTI_POLYGON' where featuretype='MultiPolygon'" );
executeSql( "update organisationunit set featuretype='POLYGON' where featuretype='Polygon'" );
executeSql( "update organisationunit set featuretype='POINT' where featuretype='Point'" );
executeSql( "update organisationunit set featuretype='SYMBOL' where featuretype='Symbol'" );
executeSql( "update organisationunit set featuretype='NONE' where featuretype is null" );
}
private void updateAggregationTypes()
{
executeSql( "alter table dataelement alter column aggregationtype type varchar(50)" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype='sum'" );
executeSql( "update dataelement set aggregationtype='AVERAGE' where aggregationtype='avg'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='avg_sum_org_unit'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='average'" );
executeSql( "update dataelement set aggregationtype='COUNT' where aggregationtype='count'" );
executeSql( "update dataelement set aggregationtype='STDDEV' where aggregationtype='stddev'" );
executeSql( "update dataelement set aggregationtype='VARIANCE' where aggregationtype='variance'" );
executeSql( "update dataelement set aggregationtype='MIN' where aggregationtype='min'" );
executeSql( "update dataelement set aggregationtype='MAX' where aggregationtype='max'" );
executeSql( "update dataelement set aggregationtype='NONE' where aggregationtype='none'" );
executeSql( "update dataelement set aggregationtype='DEFAULT' where aggregationtype='default'" );
executeSql( "update dataelement set aggregationtype='CUSTOM' where aggregationtype='custom'" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype is null" );
}
private void updateValueTypes()
{
executeSql( "alter table dataelement alter column valuetype type varchar(50)" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype='number'" );
executeSql( "update dataelement set valuetype='INTEGER' where valuetype='int' and numbertype='int'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='posInt'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='positiveNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negInt'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negativeNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_ZERO_OR_POSITIVE' where valuetype='int' and numbertype='zeroPositiveInt'" );
executeSql( "update dataelement set valuetype='PERCENTAGE' where valuetype='int' and numbertype='percentage'" );
executeSql( "update dataelement set valuetype='UNIT_INTERVAL' where valuetype='int' and numbertype='unitInterval'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype is null" );
executeSql( "alter table dataelement drop column numbertype" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype='text'" );
executeSql( "update dataelement set valuetype='LONG_TEXT' where valuetype='string' and texttype='longText'" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype is null" );
executeSql( "alter table dataelement drop column texttype" );
executeSql( "update dataelement set valuetype='DATE' where valuetype='date'" );
executeSql( "update dataelement set valuetype='DATETIME' where valuetype='datetime'" );
executeSql( "update dataelement set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update dataelement set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update dataelement set valuetype='USERNAME' where valuetype='username'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype is null" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update trackedentityattribute set valuetype='PHONE_NUMBER' where valuetype='phoneNumber'" );
executeSql( "update trackedentityattribute set valuetype='EMAIL' where valuetype='email'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='int'" );
executeSql( "update trackedentityattribute set valuetype='LETTER' where valuetype='letter'" );
executeSql( "update trackedentityattribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update trackedentityattribute set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update trackedentityattribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='optionSet'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='OPTION_SET'" );
executeSql( "update trackedentityattribute set valuetype='TRACKER_ASSOCIATE' where valuetype='trackerAssociate'" );
executeSql( "update trackedentityattribute set valuetype='USERNAME' where valuetype='users'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype is null" );
executeSql( "update optionset set valuetype='TEXT' where valuetype is null" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update attribute set valuetype='LONG_TEXT' where valuetype='text'" );
executeSql( "update attribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update attribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update attribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update attribute set valuetype='INTEGER' where valuetype='integer'" );
executeSql( "update attribute set valuetype='INTEGER_POSITIVE' where valuetype='positive_integer'" );
executeSql( "update attribute set valuetype='INTEGER_NEGATIVE' where valuetype='negative_integer'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='option_set'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype is null" );
}
private void upgradeProgramStageDataElements()
{
if ( tableExists( "programstage_dataelements" ) )
{
String autoIncr = statementBuilder.getAutoIncrementValue();
String insertSql =
"insert into programstagedataelement(programstagedataelementid,programstageid,dataelementid,compulsory,allowprovidedelsewhere," +
"sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order) " + "select " + autoIncr +
",programstageid,dataelementid,compulsory,allowprovidedelsewhere,sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order from programstage_dataelements";
executeSql( insertSql );
String dropSql = "drop table programstage_dataelements";
executeSql( dropSql );
log.info( "Upgraded program stage data elements" );
}
}
private void updateRelativePeriods()
{
if ( tableExists( "relativeperiods" ) )
{
executeSql( "UPDATE relativeperiods SET thisbiweek='f' WHERE thisbiweek IS NULL" );
executeSql( "UPDATE relativeperiods SET lastbiweek='f' WHERE lastbiweek IS NULL" );
executeSql( "UPDATE relativeperiods SET last4biweeks='f' WHERE last4biweeks IS NULL" );
}
}
private int executeSql( String sql )
{
try
{
return statementManager.getHolder().executeUpdate( sql );
}
catch ( Exception ex )
{
log.debug( ex );
return -1;
}
}
private boolean tableExists( String table )
{
try
{
statementManager.getHolder().queryForInteger( "select 1 from " + table );
return true;
}
catch ( Exception ex )
{
return false;
}
}
}
|
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/startup/InitTableAlteror.java
|
package org.hisp.dhis.startup;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.system.startup.AbstractStartupRoutine;
import org.hisp.quick.StatementManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Lars Helge Overland
*/
public class InitTableAlteror
extends AbstractStartupRoutine
{
private static final Log log = LogFactory.getLog( InitTableAlteror.class );
@Autowired
private StatementManager statementManager;
@Autowired
private StatementBuilder statementBuilder;
// -------------------------------------------------------------------------
// Execute
// -------------------------------------------------------------------------
@Override
@Transactional
public void execute()
{
executeSql( "update dataelement set domaintype='AGGREGATE' where domaintype='aggregate' or domaintype is null;" );
executeSql( "update dataelement set domaintype='TRACKER' where domaintype='patient';" );
executeSql( "update users set invitation = false where invitation is null" );
executeSql( "update users set selfregistered = false where selfregistered is null" );
executeSql( "update users set externalauth = false where externalauth is null" );
executeSql( "update users set disabled = false where disabled is null" );
executeSql( "alter table dataelement alter column domaintype set not null;" );
executeSql( "alter table programstageinstance alter column status type varchar(25);" );
executeSql( "UPDATE programstageinstance SET status='ACTIVE' WHERE status='0';" );
executeSql( "UPDATE programstageinstance SET status='COMPLETED' WHERE status='1';" );
executeSql( "UPDATE programstageinstance SET status='SKIPPED' WHERE status='5';" );
executeSql( "ALTER TABLE program DROP COLUMN displayonallorgunit" );
upgradeProgramStageDataElements();
updateValueTypes();
updateAggregationTypes();
updateFeatureTypes();
updateValidationRuleEnums();
updateProgramStatus();
removeDeprecatedConfigurationColumns();
updateTimestamps();
updateCompletedBy();
updateRelativePeriods();
executeSql( "ALTER TABLE program ALTER COLUMN \"type\" TYPE varchar(255);" );
executeSql( "update program set \"type\"='WITH_REGISTRATION' where type='1' or type='2'" );
executeSql( "update program set \"type\"='WITHOUT_REGISTRATION' where type='3'" );
// Update userkeyjsonvalue and keyjsonvalue to set new encrypted column to false.
executeSql( "UPDATE keyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
executeSql( "UPDATE userkeyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
// Set messages "ticket" properties to non-null values
executeSql( "UPDATE message SET internal = FALSE WHERE internal IS NULL" );
executeSql( "UPDATE messageconversation SET priority = 'NONE' WHERE priority IS NULL" );
executeSql( "UPDATE messageconversation SET status = 'NONE' WHERE status IS NULL" );
updateMessageConversationMessageCount();
// Set OrganisationUnitGroupSet includeSubhierarchyInAnalytics to false where IS NULL
executeSql( "UPDATE orgunitgroupset SET includesubhierarchyinanalytics = FALSE WHERE includesubhierarchyinanalytics IS NULL" );
// Update programstageinstance set deleted = false where deleted = null
executeSql( "UPDATE programstageinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table programstageinstance alter column deleted set not null" );
executeSql( "create index in_programstageinstance_deleted on programstageinstace(deleted)" );
// Update trackedentityinstance set deleted = false where deleted = null
executeSql( "UPDATE trackedentityinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table trackedentityinstance alter column deleted set not null" );
executeSql( "create index in_trackedentityinstance_deleted on trackedentityinstance(deleted)" );
// Update programinstance set deleted = false where deleted = null
executeSql( "UPDATE programinstance SET deleted = false WHERE deleted IS NULL" );
executeSql( "alter table programinstance alter column deleted set not null" );
executeSql( "create index in_programinstance_deleted on programinstance(deleted)" );
// Remove DataSet start and end date - replaced by DataInputPeriods
executeSql( "ALTER TABLE dataset drop column startdate" );
executeSql( "ALTER TABLE dataset drop column enddate" );
updateLegendSetAssociationAndDeleteOldAssociation();
// Message Conversation Message Type
updateMessageConversationMessageTypes();
executeSql( "UPDATE expression SET slidingWindow = FALSE WHERE slidingWindow IS NULL" );
executeSql( "UPDATE validationResult set notificationsent = false WHERE notificationsent is null" );
executeSql( "UPDATE trackedentityinstance SET featuretype = 'NONE' WHERE featuretype IS NULL " );
updateTrackedEntityAttributePatternAndTextPattern();
// 2FA fixes for 2.30
executeSql( "UPDATE users set twofa = false where twofa is null" );
executeSql( "ALTER TABLE users alter column twofa set not null" );
// Update trackedentityattribute set skipsynchronization = false where skipsynchronization = null
executeSql( "UPDATE trackedentityattribute SET skipsynchronization = false WHERE skipsynchronization IS NULL" );
executeSql( "ALTER TABLE trackedentityattribute ALTER COLUMN skipsynchronization SET NOT NULL" );
// alter/update lastsynchronized column in trackedentityinstance to: NOT NULL, DEFAULT to_timestamp(0)
executeSql( "UPDATE trackedentityinstance SET lastsynchronized = to_timestamp(0) WHERE lastsynchronized IS NULL;" ); //Do not remove this line if some cleanup will ever happen
executeSql( "ALTER TABLE trackedentityinstance ALTER COLUMN lastsynchronized SET NOT NULL" );
executeSql( "ALTER TABLE trackedentityinstance ALTER COLUMN lastsynchronized SET DEFAULT to_timestamp(0)" );
// alter/update lastsynchronized column in programstageinstance to: NOT NULL, DEFAULT to_timestamp(0)
executeSql( "UPDATE programstageinstance SET lastsynchronized = to_timestamp(0) WHERE lastsynchronized IS NULL" ); //Do not remove this line if some cleanup will ever happen
executeSql( "ALTER TABLE programstageinstance ALTER COLUMN lastsynchronized SET NOT NULL" );
executeSql( "ALTER TABLE programstageinstance ALTER COLUMN lastsynchronized SET DEFAULT to_timestamp(0)" );
// Update trackedentityattribute set skipsynchronization = false where skipsynchronization = null
executeSql( "UPDATE programstagedataelement SET skipsynchronization = false WHERE skipsynchronization IS NULL" );
executeSql( "ALTER TABLE programstagedataelement ALTER COLUMN skipsynchronization SET NOT NULL" );
executeSql( "UPDATE programstage SET featuretype = 'POINT' WHERE capturecoordinates = true AND featuretype IS NULL" );
executeSql( "UPDATE programstage SET featuretype = 'NONE' WHERE capturecoordinates = false AND featuretype IS NULL" );
updateAndRemoveOldProgramStageInstanceCoordinates();
//Remove createddate column from trackedentitycomment table
executeSql( "UPDATE trackedentitycomment SET created = createddate WHERE created IS NOT NULL;" );
executeSql( "ALTER TABLE trackedentitycomment DROP COLUMN createddate;" );
}
private void updateAndRemoveOldProgramStageInstanceCoordinates()
{
executeSql( "UPDATE programstageinstance " +
"SET geometry = ST_GeomFromText('POINT(' || longitude || ' ' || latitude || ')', 4326) " +
"WHERE longitude IS NOT NULL " +
"AND latitude IS NOT NULL" +
"AND geometry IS NULL" );
executeSql( "ALTER TABLE programstageinstance DROP COLUMN latitude " );
executeSql( "ALTER TABLE programstageinstance DROP COLUMN longitude " );
}
private void updateTrackedEntityAttributePatternAndTextPattern()
{
// Create textpattern jsonb
executeSql( "UPDATE trackedentityattribute SET textpattern = concat('{\"ownerUid\": \"', uid, '\",\"segments\": [{\"parameter\": \"', pattern, '\",\"method\": \"RANDOM\"}],\"ownerObject\": \"TRACKEDENTITYATTRIBUTE\"}')::jsonb WHERE pattern SIMILAR TO '#+' AND generated = true AND textpattern IS NULL" );
// Update pattern to match new syntax
executeSql( "UPDATE trackedentityattribute SET pattern = concat('RANDOM(', pattern, ')') WHERE pattern SIMILAR TO '#+' AND generated = true AND textpattern IS NOT NULL" );
// Move all reserved values into the new table
executeSql( "INSERT INTO reservedvalue(owneruid, key, value, expires, ownerobject, reservedvalueid) " +
"SELECT TEA.uid, TEA.pattern, TEARV.value, TEARV.expirydate, 'TRACKEDENTITYATTRIBUTE', nextval('hibernate_sequence') " +
"FROM trackedentityattributereservedvalue TEARV, trackedentityattribute TEA " +
"WHERE TEARV.trackedentityattributeid = TEA.trackedentityattributeid " +
"AND TEARV.expirydate > NOW() " +
"AND TEARV.trackedentityinstanceid IS NULL" );
// Drop the old table
executeSql( "DROP TABLE trackedentityattributereservedvalue" );
}
private void updateMessageConversationMessageTypes()
{
// Tickets has status != NONE
executeSql( "UPDATE messageconversation SET messagetype = 'TICKET' WHERE messagetype IS NULL AND status != 'NONE'" );
// Validation results existing ValidationResults always start with "Alerts as of%"
executeSql( "UPDATE messageconversation SET messagetype = 'VALIDATION_RESULT' WHERE messagetype IS NULL AND ( subject LIKE 'Alerts as of%' OR subject LIKE 'DHIS alerts as of%' )" );
// System Always have no user "owner"
executeSql( "UPDATE messageconversation SET messagetype = 'SYSTEM' WHERE messagetype IS NULL AND userid IS NULL" );
// Direct messages is what is left
executeSql( "UPDATE messageconversation SET messagetype = 'PRIVATE' WHERE messagetype IS NULL" );
executeSql( "ALTER TABLE messageconversation ALTER COLUMN messagetype set not null" );
}
private void updateLegendSetAssociationAndDeleteOldAssociation()
{
// Transfer all existing references from dataelement to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO dataelementlegendsets (dataelementid, sort_order, legendsetid) SELECT dataelementid, 0, legendsetid FROM dataelement WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE dataelement DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO datasetlegendsets (datasetid, sort_order, legendsetid) SELECT datasetid, 0, legendsetid FROM dataset WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE dataset DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO indicatorlegendsets (indicatorid, sort_order, legendsetid) SELECT indicatorid, 0, legendsetid FROM indicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE indicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO programindicatorlegendsets (programindicatorid, sort_order, legendsetid) SELECT programindicatorid, 0, legendsetid FROM programindicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE programindicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO programindicatorlegendsets (programindicatorid, sort_order, legendsetid) SELECT programindicatorid, 0, legendsetid FROM programindicator WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE programindicator DROP COLUMN legendsetid " );
// Transfer all existing references from dataset to legendset to new many-to-many table
// Then delete old reference
executeSql( "INSERT INTO trackedentityattributelegendsets (trackedentityattributeid, sort_order, legendsetid) SELECT trackedentityattributeid, 0, legendsetid FROM trackedentityattribute WHERE legendsetid IS NOT NULL" );
executeSql( "ALTER TABLE trackedentityattribute DROP COLUMN legendsetid " );
}
private void updateMessageConversationMessageCount()
{
Integer nullCounts = statementManager.getHolder().queryForInteger( "SELECT count(*) from messageconversation WHERE messagecount IS NULL" );
if ( nullCounts > 0 )
{
executeSql( "update messageconversation MC SET messagecount = (SELECT count(MCM.messageconversationid) FROM messageconversation_messages MCM WHERE messageconversationid=MC.messageconversationid)" );
}
}
private void updateCompletedBy()
{
executeSql( "update programinstance set completedby=completeduser where completedby is null" );
executeSql( "update programstageinstance set completedby=completeduser where completedby is null" );
executeSql( "alter table programinstance drop column completeduser" );
executeSql( "alter table programstageinstance drop column completeduser" );
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private void removeDeprecatedConfigurationColumns()
{
try
{
executeSql( "ALTER TABLE configuration DROP COLUMN smptpassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN smtppassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverurl" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverusername" );
executeSql( "ALTER TABLE configuration DROP COLUMN remotepassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverpassword" );
}
catch ( Exception ex )
{
log.debug( ex );
}
}
private void updateTimestamps()
{
executeSql( "update datavalueaudit set created=timestamp where created is null" );
executeSql( "update datavalueaudit set created=now() where created is null" );
executeSql( "alter table datavalueaudit drop column timestamp" );
executeSql( "update trackedentitydatavalue set created=timestamp where created is null" );
executeSql( "update trackedentitydatavalue set lastupdated=timestamp where lastupdated is null" );
executeSql( "update trackedentityattributevalue set created=now() where created is null" );
executeSql( "update trackedentityattributevalue set lastupdated=now() where lastupdated is null" );
executeSql( "alter table trackedentitydatavalue drop column timestamp" );
}
private void updateProgramStatus()
{
executeSql( "alter table programinstance alter column status type varchar(50)" );
executeSql( "update programinstance set status='ACTIVE' where status='0'" );
executeSql( "update programinstance set status='COMPLETED' where status='1'" );
executeSql( "update programinstance set status='CANCELLED' where status='2'" );
executeSql( "update programinstance set status='ACTIVE' where status is null" );
}
private void updateValidationRuleEnums()
{
executeSql( "alter table validationrule alter column ruletype type varchar(50)" );
executeSql( "alter table validationrule alter column importance type varchar(50)" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='validation'" );
executeSql( "update validationrule set ruletype='SURVEILLANCE' where ruletype='surveillance'" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='' or ruletype is null" );
executeSql( "update validationrule set importance='HIGH' where importance='high'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='medium'" );
executeSql( "update validationrule set importance='LOW' where importance='low'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='' or importance is null" );
}
private void updateFeatureTypes()
{
executeSql( "update organisationunit set featuretype='NONE' where featuretype='None'" );
executeSql( "update organisationunit set featuretype='MULTI_POLYGON' where featuretype='MultiPolygon'" );
executeSql( "update organisationunit set featuretype='POLYGON' where featuretype='Polygon'" );
executeSql( "update organisationunit set featuretype='POINT' where featuretype='Point'" );
executeSql( "update organisationunit set featuretype='SYMBOL' where featuretype='Symbol'" );
executeSql( "update organisationunit set featuretype='NONE' where featuretype is null" );
}
private void updateAggregationTypes()
{
executeSql( "alter table dataelement alter column aggregationtype type varchar(50)" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype='sum'" );
executeSql( "update dataelement set aggregationtype='AVERAGE' where aggregationtype='avg'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='avg_sum_org_unit'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='average'" );
executeSql( "update dataelement set aggregationtype='COUNT' where aggregationtype='count'" );
executeSql( "update dataelement set aggregationtype='STDDEV' where aggregationtype='stddev'" );
executeSql( "update dataelement set aggregationtype='VARIANCE' where aggregationtype='variance'" );
executeSql( "update dataelement set aggregationtype='MIN' where aggregationtype='min'" );
executeSql( "update dataelement set aggregationtype='MAX' where aggregationtype='max'" );
executeSql( "update dataelement set aggregationtype='NONE' where aggregationtype='none'" );
executeSql( "update dataelement set aggregationtype='DEFAULT' where aggregationtype='default'" );
executeSql( "update dataelement set aggregationtype='CUSTOM' where aggregationtype='custom'" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype is null" );
}
private void updateValueTypes()
{
executeSql( "alter table dataelement alter column valuetype type varchar(50)" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype='number'" );
executeSql( "update dataelement set valuetype='INTEGER' where valuetype='int' and numbertype='int'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='posInt'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='positiveNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negInt'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negativeNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_ZERO_OR_POSITIVE' where valuetype='int' and numbertype='zeroPositiveInt'" );
executeSql( "update dataelement set valuetype='PERCENTAGE' where valuetype='int' and numbertype='percentage'" );
executeSql( "update dataelement set valuetype='UNIT_INTERVAL' where valuetype='int' and numbertype='unitInterval'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype is null" );
executeSql( "alter table dataelement drop column numbertype" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype='text'" );
executeSql( "update dataelement set valuetype='LONG_TEXT' where valuetype='string' and texttype='longText'" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype is null" );
executeSql( "alter table dataelement drop column texttype" );
executeSql( "update dataelement set valuetype='DATE' where valuetype='date'" );
executeSql( "update dataelement set valuetype='DATETIME' where valuetype='datetime'" );
executeSql( "update dataelement set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update dataelement set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update dataelement set valuetype='USERNAME' where valuetype='username'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype is null" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update trackedentityattribute set valuetype='PHONE_NUMBER' where valuetype='phoneNumber'" );
executeSql( "update trackedentityattribute set valuetype='EMAIL' where valuetype='email'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='int'" );
executeSql( "update trackedentityattribute set valuetype='LETTER' where valuetype='letter'" );
executeSql( "update trackedentityattribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update trackedentityattribute set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update trackedentityattribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='optionSet'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='OPTION_SET'" );
executeSql( "update trackedentityattribute set valuetype='TRACKER_ASSOCIATE' where valuetype='trackerAssociate'" );
executeSql( "update trackedentityattribute set valuetype='USERNAME' where valuetype='users'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype is null" );
executeSql( "update optionset set valuetype='TEXT' where valuetype is null" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update attribute set valuetype='LONG_TEXT' where valuetype='text'" );
executeSql( "update attribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update attribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update attribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update attribute set valuetype='INTEGER' where valuetype='integer'" );
executeSql( "update attribute set valuetype='INTEGER_POSITIVE' where valuetype='positive_integer'" );
executeSql( "update attribute set valuetype='INTEGER_NEGATIVE' where valuetype='negative_integer'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='option_set'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype is null" );
}
private void upgradeProgramStageDataElements()
{
if ( tableExists( "programstage_dataelements" ) )
{
String autoIncr = statementBuilder.getAutoIncrementValue();
String insertSql =
"insert into programstagedataelement(programstagedataelementid,programstageid,dataelementid,compulsory,allowprovidedelsewhere," +
"sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order) " + "select " + autoIncr +
",programstageid,dataelementid,compulsory,allowprovidedelsewhere,sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order from programstage_dataelements";
executeSql( insertSql );
String dropSql = "drop table programstage_dataelements";
executeSql( dropSql );
log.info( "Upgraded program stage data elements" );
}
}
private void updateRelativePeriods()
{
if ( tableExists( "relativeperiods" ) )
{
executeSql( "UPDATE relativeperiods SET thisbiweek='f' WHERE thisbiweek IS NULL" );
executeSql( "UPDATE relativeperiods SET lastbiweek='f' WHERE lastbiweek IS NULL" );
executeSql( "UPDATE relativeperiods SET last4biweeks='f' WHERE last4biweeks IS NULL" );
}
}
private int executeSql( String sql )
{
try
{
return statementManager.getHolder().executeUpdate( sql );
}
catch ( Exception ex )
{
log.debug( ex );
return -1;
}
}
private boolean tableExists( String table )
{
try
{
statementManager.getHolder().queryForInteger( "select 1 from " + table );
return true;
}
catch ( Exception ex )
{
return false;
}
}
}
|
Added startup method for adding SQL-function, generate_uid()
|
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/startup/InitTableAlteror.java
|
Added startup method for adding SQL-function, generate_uid()
|
|
Java
|
mit
|
406ffa172df14cb764ff69e7321b1822917dbbea
| 0
|
CS2103AUG2016-T09-C4/main
|
package seedu.unburden.model.task;
import java.util.Objects;
import seedu.unburden.commons.exceptions.IllegalValueException;
import seedu.unburden.commons.util.CollectionUtil;
import seedu.unburden.model.tag.UniqueTagList;
/**
* Represents a Task in the address book.
* Guarantees: details are present and not null, field values are validated.
*/
public class Task implements ReadOnlyTask {
private Name name;
private Date date;
private Time startTime;
private Time endTime;
private UniqueTagList tags;
/**
* Every field must be present and not null.
*/
public Task(Name name,Date date, Time startTime, Time endTime, UniqueTagList tags) {
assert !CollectionUtil.isAnyNull(name, tags);
this.name = name;
this.date = date;
this.startTime = startTime;
this.endTime = endTime;
this.tags = new UniqueTagList(tags); // protect internal tags from changes in the arg list
}
/**
* Copy constructor.
*/
public Task(ReadOnlyTask source) {
this(source.getName(), source.getDate(), source.getStartTime(), source.getEndTime(), source.getTags());
}
public Task(Name name, UniqueTagList tags) throws IllegalValueException {
assert !CollectionUtil.isAnyNull(name, tags);
this.name = name;
this.date = new Date("NIL");
this.startTime = new Time("NIL");
this.endTime = new Time("NIL");
this.tags = new UniqueTagList(tags);
}
public Task(Name name, Date date, UniqueTagList tags) throws IllegalValueException {
assert!CollectionUtil.isAnyNull(name,date, tags);
this.name = name;
this.date = date;
this.startTime = new Time("NIL");
this.endTime = new Time("NIL");
this.tags = new UniqueTagList(tags);
}
@Override
public Name getName() {
return name;
}
@Override
public Date getDate() {
return date;
}
@Override
public Time getStartTime() {
return startTime;
}
@Override
public Time getEndTime() {
return endTime;
}
@Override
public UniqueTagList getTags() {
return new UniqueTagList(tags);
}
/**
* Replaces this person's tags with the tags in the argument tag list.
*/
public void setTags(UniqueTagList replacement) {
tags.setTags(replacement);
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof ReadOnlyTask // instanceof handles nulls
&& this.isSameStateAs((ReadOnlyTask) other));
}
@Override
public int hashCode() {
// use this method for custom fields hashing instead of implementing your own
return Objects.hash(name, tags);
}
@Override
public String toString() {
return getAsText();
}
}
|
src/main/java/seedu/unburden/model/task/Task.java
|
package seedu.unburden.model.task;
import java.util.Objects;
import seedu.unburden.commons.exceptions.IllegalValueException;
import seedu.unburden.commons.util.CollectionUtil;
import seedu.unburden.model.tag.UniqueTagList;
/**
* Represents a Task in the address book.
* Guarantees: details are present and not null, field values are validated.
*/
public class Task implements ReadOnlyTask {
private Name name;
private Date date;
private Time startTime;
private Time endTime;
private UniqueTagList tags;
/**
* Every field must be present and not null.
*/
public Task(Name name,Date date, Time startTime, Time endTime, UniqueTagList tags) {
assert !CollectionUtil.isAnyNull(name, tags);
this.name = name;
this.date = date;
this.startTime = startTime;
this.endTime = endTime;
this.tags = new UniqueTagList(tags); // protect internal tags from changes in the arg list
}
/**
* Copy constructor.
*/
public Task(ReadOnlyTask source) {
this(source.getName(), source.getDate(), source.getStartTime(), source.getEndTime(), source.getTags());
}
public Task(Name name, UniqueTagList tags) throws IllegalValueException {
assert !CollectionUtil.isAnyNull(name, tags);
this.name = name;
this.date = new Date("00-00-0000");
this.startTime = new Time("0000");
this.endTime = new Time("0000");
this.tags = new UniqueTagList(tags);
}
public Task(Name name, Date date, UniqueTagList tags) throws IllegalValueException {
assert!CollectionUtil.isAnyNull(name,date, tags);
this.name = name;
this.date = date;
this.startTime = new Time("0000");
this.endTime = new Time("0000");
this.tags = new UniqueTagList(tags);
}
@Override
public Name getName() {
return name;
}
@Override
public Date getDate() {
return date;
}
@Override
public Time getStartTime() {
return startTime;
}
@Override
public Time getEndTime() {
return endTime;
}
@Override
public UniqueTagList getTags() {
return new UniqueTagList(tags);
}
/**
* Replaces this person's tags with the tags in the argument tag list.
*/
public void setTags(UniqueTagList replacement) {
tags.setTags(replacement);
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof ReadOnlyTask // instanceof handles nulls
&& this.isSameStateAs((ReadOnlyTask) other));
}
@Override
public int hashCode() {
// use this method for custom fields hashing instead of implementing your own
return Objects.hash(name, tags);
}
@Override
public String toString() {
return getAsText();
}
}
|
Edited the unentered fields to "NIL"
|
src/main/java/seedu/unburden/model/task/Task.java
|
Edited the unentered fields to "NIL"
|
|
Java
|
mit
|
aef2fdc1948c2ea744eea49eea74c4f136184984
| 0
|
sudip55/DemoSupport,sudip55/DemoSupport
|
/**
*
*/
package uk.co.jemos.podam.test.enums;
/**
* An enum to use in tests
*
* @author mtedone
*
*/
public enum ExternalRatePodamEnum {
EXTERNAL_COOL, EXTERNAL_ROCKS, EXTERNAL_SUPERCOOL
}
|
src/test/java/uk/co/jemos/podam/test/enums/ExternalRatePodamEnum.java
|
Added files via upload
|
src/test/java/uk/co/jemos/podam/test/enums/ExternalRatePodamEnum.java
|
Added files via upload
|
||
Java
|
mit
|
f98f2cb1c70846e82fecb5bb43f7f0fd2e0c64fa
| 0
|
CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab
|
package org.xcolab.view.util.entity.flash;
import java.io.Serializable;
import javax.servlet.http.HttpServletRequest;
public class AlertMessage implements Serializable {
private static final long serialVersionUID = 1L;
public static final AlertMessage CHANGES_SAVED = AlertMessage.success("Changes saved");
public static final AlertMessage NOT_SAVED = AlertMessage.danger("Changes NOT saved");
public static final AlertMessage CREATED = AlertMessage.success("Element created");
public static final AlertMessage DELETED = AlertMessage.success("Element deleted");
public static final AlertMessage ERROR = AlertMessage.danger("An error occurred");
private static final FlashMessageStore MESSAGE_STORE = new FlashMessageStore();
private final String message;
private final Type type;
private AlertMessage(String message, Type type) {
super();
this.message = message;
this.type = type;
}
public static AlertMessage danger(String message) {
return new AlertMessage(message, Type.DANGER);
}
public static AlertMessage warning(String message) {
return new AlertMessage(message, Type.WARNING);
}
public static AlertMessage info(String message) {
return new AlertMessage(message, Type.INFO);
}
public static AlertMessage success(String message) {
return new AlertMessage(message, Type.SUCCESS);
}
public static AlertMessage extract(HttpServletRequest request) {
return MESSAGE_STORE.pop(request, AlertMessage.class);
}
public void flash(HttpServletRequest request) {
MESSAGE_STORE.put(request, this);
}
public String getMessage() {
return message;
}
public Type getType() {
return type;
}
@Override
public String toString() {
return "AlertMessage [message=" + message + ", type=" + type + "]";
}
public enum Type {
DANGER("error"),
WARNING("warning"),
INFO("information"),
SUCCESS("success"),
ALERT("alert");
private final String notyType;
Type(String notyType) {
this.notyType = notyType;
}
public String getNotyType() {
return notyType;
}
}
}
|
view/src/main/java/org/xcolab/view/util/entity/flash/AlertMessage.java
|
package org.xcolab.view.util.entity.flash;
import javax.servlet.http.HttpServletRequest;
public class AlertMessage {
public static final AlertMessage CHANGES_SAVED = AlertMessage.success("Changes saved");
public static final AlertMessage NOT_SAVED = AlertMessage.danger("Changes NOT saved");
public static final AlertMessage CREATED = AlertMessage.success("Element created");
public static final AlertMessage DELETED = AlertMessage.success("Element deleted");
public static final AlertMessage ERROR = AlertMessage.danger("An error occurred");
private static final FlashMessageStore MESSAGE_STORE = new FlashMessageStore();
private final String message;
private final Type type;
private AlertMessage(String message, Type type) {
super();
this.message = message;
this.type = type;
}
public static AlertMessage danger(String message) {
return new AlertMessage(message, Type.DANGER);
}
public static AlertMessage warning(String message) {
return new AlertMessage(message, Type.WARNING);
}
public static AlertMessage info(String message) {
return new AlertMessage(message, Type.INFO);
}
public static AlertMessage success(String message) {
return new AlertMessage(message, Type.SUCCESS);
}
public static AlertMessage extract(HttpServletRequest request) {
return MESSAGE_STORE.pop(request, AlertMessage.class);
}
public void flash(HttpServletRequest request) {
MESSAGE_STORE.put(request, this);
}
public String getMessage() {
return message;
}
public Type getType() {
return type;
}
@Override
public String toString() {
return "AlertMessage [message=" + message + ", type=" + type + "]";
}
public enum Type {
DANGER("error"),
WARNING("warning"),
INFO("information"),
SUCCESS("success"),
ALERT("alert");
private final String notyType;
Type(String notyType) {
this.notyType = notyType;
}
public String getNotyType() {
return notyType;
}
}
}
|
[COLAB-1813] Fixed serialization of AlertMessage in redis session
|
view/src/main/java/org/xcolab/view/util/entity/flash/AlertMessage.java
|
[COLAB-1813] Fixed serialization of AlertMessage in redis session
|
|
Java
|
mit
|
e40bfd5d230520243083f312277eecf1a55a1c99
| 0
|
mikedayupay03/GetBetter,mikedayupay/GetBetter,mikedayupay03/GetBetter,mikedayupay/GetBetter
|
package com.dlsu.getbetter.getbetter.objects;
import java.util.ArrayList;
/**
* Created by mikedayupay on 25/02/2016.
*/
public class CaseRecord {
private int caseRecordId;
private String caseRecordComplaint;
private String caseRecordControlNumber;
private String caseRecordStatus;
private String caseRecordUpdatedOn;
private String caseRecordUpdatedBy;
private ArrayList<Attachment> caseRecordAttachments;
public CaseRecord(int caseRecordId, String caseRecordComplaint, String caseRecordControlNumber, String caseRecordStatus) {
this.caseRecordId = caseRecordId;
this.caseRecordComplaint = caseRecordComplaint;
this.caseRecordControlNumber = caseRecordControlNumber;
this.caseRecordStatus = caseRecordStatus;
}
public CaseRecord(int caseRecordId, String caseRecordComplaint, String caseRecordControlNumber) {
this.caseRecordId = caseRecordId;
this.caseRecordComplaint = caseRecordComplaint;
this.caseRecordControlNumber = caseRecordControlNumber;
}
public void setCaseRecordAttachments(ArrayList<Attachment> caseRecordAttachments) {
this.caseRecordAttachments = caseRecordAttachments;
}
public void setCaseRecordStatus(String caseRecordStatus) {
this.caseRecordStatus = caseRecordStatus;
}
public void setCaseRecordId(int caseRecordId) {
this.caseRecordId = caseRecordId;
}
public void setCaseRecordComplaint(String caseRecordComplaint) {
this.caseRecordComplaint = caseRecordComplaint;
}
public void setCaseRecordControlNumber(String caseRecordControlNumber) {
this.caseRecordControlNumber = caseRecordControlNumber;
}
public String getCaseRecordUpdatedOn() {
return caseRecordUpdatedOn;
}
public void setCaseRecordUpdatedOn(String caseRecordUpdatedOn) {
this.caseRecordUpdatedOn = caseRecordUpdatedOn;
}
public String getCaseRecordUpdatedBy() {
return caseRecordUpdatedBy;
}
public void setCaseRecordUpdatedBy(String caseRecordUpdatedBy) {
this.caseRecordUpdatedBy = caseRecordUpdatedBy;
}
public ArrayList<Attachment> getCaseRecordAttachments() {
return caseRecordAttachments;
}
public int getCaseRecordId() {
return caseRecordId;
}
public String getCaseRecordComplaint() {
return caseRecordComplaint;
}
public String getCaseRecordControlNumber() {
return caseRecordControlNumber;
}
public String getCaseRecordStatus() {
return caseRecordStatus;
}
}
|
app/src/main/java/com/dlsu/getbetter/getbetter/objects/CaseRecord.java
|
package com.dlsu.getbetter.getbetter.objects;
import java.util.ArrayList;
/**
* Created by mikedayupay on 25/02/2016.
*/
public class CaseRecord {
private int caseRecordId;
private String caseRecordComplaint;
private String caseRecordControlNumber;
private ArrayList<Attachment> caseRecordAttachments;
public CaseRecord(int caseRecordId, String caseRecordComplaint, String caseRecordControlNumber) {
this.caseRecordId = caseRecordId;
this.caseRecordComplaint = caseRecordComplaint;
this.caseRecordControlNumber = caseRecordControlNumber;
}
public CaseRecord(ArrayList<Attachment> caseRecordAttachments) {
this.caseRecordAttachments = caseRecordAttachments;
}
public ArrayList<Attachment> getCaseRecordAttachments() {
return caseRecordAttachments;
}
public int getCaseRecordId() {
return caseRecordId;
}
public String getCaseRecordComplaint() {
return caseRecordComplaint;
}
public String getCaseRecordControlNumber() {
return caseRecordControlNumber;
}
}
|
Add new fields and getter setter for object CaseRecord
Added fields:
String caseRecordStatus
String caseRecordUpdatedOn
String caseRecordUpdatedBy
|
app/src/main/java/com/dlsu/getbetter/getbetter/objects/CaseRecord.java
|
Add new fields and getter setter for object CaseRecord
|
|
Java
|
mit
|
05fe93b70d8550fa846cb32b515c78a5d22481fd
| 0
|
CS2103AUG2016-F11-C1/main,ChaseYaoCong/main,CS2103AUG2016-F11-C1/main
|
package seedu.todo.controllers;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.joestelmach.natty.DateGroup;
import com.joestelmach.natty.Parser;
import seedu.todo.commons.exceptions.UnmatchedQuotesException;
import seedu.todo.commons.util.DateUtil;
import seedu.todo.commons.util.StringUtil;
import seedu.todo.controllers.concerns.Tokenizer;
import seedu.todo.controllers.concerns.Renderer;
import seedu.todo.models.Event;
import seedu.todo.models.Task;
import seedu.todo.models.TodoListDB;
/**
* Controller to list CalendarItems.
*
* @author Tiong Yaocong
*
*/
public class ListController implements Controller {
private static final String NAME = "List";
private static final String DESCRIPTION = "Lists all tasks and events.";
private static final String COMMAND_SYNTAX = "list [task/event] [complete/incomplete] [on date] or [from date to date]";
private static final String COMMAND_WORD = "list";
private static final String MESSAGE_LISTING_SUCCESS = "Listing a total of %s";
private static final String MESSAGE_LISTING_FAILURE = "No task or event found!";
private static CommandDefinition commandDefinition =
new CommandDefinition(NAME, DESCRIPTION, COMMAND_SYNTAX);
public static CommandDefinition getCommandDefinition() {
return commandDefinition;
}
@Override
public float inputConfidence(String input) {
return (input.toLowerCase().startsWith("list")) ? 1 : 0;
}
private static Map<String, String[]> getTokenDefinitions() {
Map<String, String[]> tokenDefinitions = new HashMap<String, String[]>();
tokenDefinitions.put("default", new String[] {"list"});
tokenDefinitions.put("eventType", new String[] { "event", "events", "task", "tasks"});
tokenDefinitions.put("status", new String[] { "complete" , "completed", "incomplete", "incompleted"});
tokenDefinitions.put("time", new String[] { "at", "by", "on", "time" });
tokenDefinitions.put("timeFrom", new String[] { "from" });
tokenDefinitions.put("timeTo", new String[] { "to", "before" });
return tokenDefinitions;
}
@Override
public void process(String input) {
Map<String, String[]> parsedResult;
try {
parsedResult = Tokenizer.tokenize(getTokenDefinitions(), input);
} catch (UnmatchedQuotesException e) {
System.out.println("Unmatched quote!");
return ;
}
boolean isExactCommand = parseExactListCommand(parsedResult);
// Task or event?
boolean listAll = parseListAllType(parsedResult);
boolean isTask = true; //default
//if listing all type , set isTask and isEvent true
if (!listAll) {
isTask = parseIsTask(parsedResult);
}
boolean listAllStatus = parseListAllStatus(parsedResult);
boolean isCompleted = false; //default
//if listing all status, isCompleted will be ignored, listing both complete and incomplete
if (!listAllStatus) {
isCompleted = !parseIsIncomplete(parsedResult);
}
String[] parsedDates = parseDates(parsedResult);
boolean isDateProvided = true;
LocalDateTime dateOn = null;
LocalDateTime dateFrom = null;
LocalDateTime dateTo = null;
if (parsedDates == null) {
isDateProvided = false;
} else {
String naturalOn = parsedDates[0];
String naturalFrom = parsedDates[1];
String naturalTo = parsedDates[2];
// Parse natural date using Natty.
dateOn = naturalOn == null ? null : parseNatural(naturalOn);
dateFrom = naturalFrom == null ? null : parseNatural(naturalFrom);
dateTo = naturalTo == null ? null : parseNatural(naturalTo);
//setting up view
}
setupView(isTask, listAll, isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
parsedDates, isExactCommand, input);
}
/**
* Setting up the view
*
* @param isTask
* true if CalendarItem should be a Task, false if Event
* @param isEvent
* true if CalendarItem should be a Event, false if Task
* @param listAll
* true if listing all type, isTask or isEvent are ignored
* @param isCompleted
* true if user request completed item
* @param listAllStatus
* true if user did not request any status, isCompleted is ignored
* @param dateOn
* Date if user specify for a certain date
* @param dateFrom
* Due date for Task or start date for Event
* @param dateTo
* End date for Event
*/
private void setupView(boolean isTask, boolean listAll, boolean isCompleted,
boolean listAllStatus, LocalDateTime dateOn, LocalDateTime dateFrom,
LocalDateTime dateTo, boolean isDateProvided, String[] parsedDates, boolean isExactCommand, String input) {
TodoListDB db = TodoListDB.getInstance();
List<Task> tasks = null;
List<Event> events = null;
// isTask and isEvent = true, list all type
if (listAll || isTask) { //task or event not specify
//no event or task keyword found
isTask = false;
tasks = setupTaskView(isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
isExactCommand, listAll, db);
} else {
events = setupEventView(isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
isExactCommand, listAll, db);
}
if (tasks == null && events == null) {
displayErrorMessage(input, listAll, listAllStatus, isCompleted, isTask, parsedDates);
return ; //display error message
}
// Update console message
int numTasks = 0;
int numEvents = 0;
if (tasks != null) {
numTasks = tasks.size();
}
if(events != null) {
numEvents = events.size();
}
String consoleMessage = "";
if (numTasks != 0 || numEvents != 0) {
consoleMessage = String.format(MESSAGE_LISTING_SUCCESS, formatDisplayMessage(numTasks, numEvents));
} else {
consoleMessage = MESSAGE_LISTING_FAILURE;
}
Renderer.renderSelected(db, consoleMessage, tasks, events);
}
/**
* display error message due to invalid clear command
*
* @param input
* based on user input
* @param parsedDate
* the date entered by the user
*/
private void displayErrorMessage(String input, boolean listAll, boolean listAllStatus, boolean isCompleted,
boolean isTask, String[] parsedDates) {
String consoleDisplayMessage = String.format("You have entered : %s.",input);
String commandLineMessage = COMMAND_WORD;
String commandLineCompleteSuggestMessage = "complete";
String commandLineIncompleteSuggestMessage = "incomplete";
String commandLineTaskSuggestMessage = "task";
String commandLineEventSuggestMessage = "event";
if (!listAll) {
if (isTask) {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineTaskSuggestMessage);
} else {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineEventSuggestMessage);
}
}
if (!listAllStatus) {
if (isCompleted) {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineCompleteSuggestMessage);
} else {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineIncompleteSuggestMessage);
}
}
if (parsedDates != null) {
if (parsedDates[0] != null) {
commandLineMessage = String.format("%s by <date>", commandLineMessage);
} else {
commandLineMessage = String.format("%s from <date> to <date>", commandLineMessage);
}
}
Renderer.renderDisambiguation(commandLineMessage, consoleDisplayMessage);
}
private String formatDisplayMessage (int numTasks, int numEvents) {
if (numTasks != 0 && numEvents != 0) {
return String.format("%s and %s.", formatTaskMessage(numTasks), formatEventMessage(numEvents));
} else if (numTasks != 0) {
return formatTaskMessage(numTasks);
} else {
return formatEventMessage(numEvents);
}
}
private String formatEventMessage (int numEvents) {
return String.format("%d %s", numEvents, StringUtil.pluralizer(numEvents, "event", "events"));
}
private String formatTaskMessage (int numTasks) {
return String.format("%d %s", numTasks, StringUtil.pluralizer(numTasks, "task", "tasks"));
}
private List<Event> setupEventView(boolean isCompleted, boolean listAllStatus, LocalDateTime dateOn,
LocalDateTime dateFrom, LocalDateTime dateTo, boolean isDateProvided,
boolean isExactCommand, boolean listAll, TodoListDB db) {
if (dateFrom == null && dateTo == null && dateOn == null) {
if (listAllStatus) { // not specify
if (isExactCommand && isDateProvided == false) {
if (listAll) {
return db.getAllCurrentEvents();
} else {
return db.getAllEvents();
}
} else {
return null;
}
} else if (isCompleted) {
return db.getEventByRange(null, LocalDateTime.now());
} else {
return db.getEventByRange(LocalDateTime.now(), null);
}
} else if (dateOn != null) { //by keyword found
return db.getEventByDate(dateOn);
} else {
return db.getEventByRange(dateFrom, dateTo);
}
}
private List<Task> setupTaskView(boolean isCompleted, boolean listAllStatus, LocalDateTime dateOn, LocalDateTime dateFrom,
LocalDateTime dateTo, boolean isDateProvided, boolean isExactCommand, boolean listAll, TodoListDB db) {
if (dateFrom == null && dateTo == null && dateOn == null) {
if (listAllStatus) { // not specify
if (isExactCommand && isDateProvided == false) {
if (listAll) {
return db.getIncompleteTasksAndTaskFromTodayDate();
} else {
return db.getAllTasks();
}
} else {
return null;
}
} else {
return db.getTaskByRangeWithStatus(dateFrom, dateTo, isCompleted, listAllStatus);
}
} else if (dateOn != null) { //by keyword found
return db.getTaskByDateWithStatus(dateOn, isCompleted, listAllStatus);
} else {
return db.getTaskByRangeWithStatus(dateFrom, dateTo, isCompleted, listAllStatus);
}
}
/**
* Parse a natural date into a LocalDateTime object.
*
* @param natural
* @return LocalDateTime object
*/
private LocalDateTime parseNatural(String natural) {
Parser parser = new Parser();
List<DateGroup> groups = parser.parse(natural);
Date date = null;
try {
date = groups.get(0).getDates().get(0);
} catch (IndexOutOfBoundsException e) {
System.out.println("Error!"); // TODO
return null;
}
LocalDateTime ldt = LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
return DateUtil.floorDate(ldt);
}
private boolean parseExactListCommand(Map<String, String[]> parsedResult) {
return parsedResult.get("default")[1] == null;
}
/**
* Extracts the intended CalendarItem type specify from parsedResult.
*
* @param parsedResult
* @return true if Task or event is not specify, false if either Task or Event specify
*/
private boolean parseListAllType (Map<String, String[]> parsedResult) {
return !(parsedResult.get("eventType") != null);
}
/**
* Extracts the intended status type specify from parsedResult.
*
* @param parsedResult
* @return true if Task or event is not specify, false if either Task or Event specify
*/
private boolean parseListAllStatus (Map<String, String[]> parsedResult) {
return !(parsedResult.get("status") != null);
}
/**
* Extracts the intended CalendarItem status from parsedResult.
*
* @param parsedResult
* @return true if incomplete, false if complete
*/
private boolean parseIsIncomplete (Map<String, String[]> parsedResult) {
return parsedResult.get("status")[0].contains("incomplete");
}
/**
* Extracts the intended CalendarItem status from parsedResult.
*
* @param parsedResult
* @return true if over or prior, false if not
*/
private boolean parseIsPriorEvents (Map<String, String[]> parsedResult) {
return parsedResult.get("status")[0].contains("prior") || parsedResult.get("status")[0].contains("over");
}
/**
* Extracts the intended CalendarItem status from parsedResult.
*
* @param parsedResult
* @return true if incomplete, false if complete
*/
private boolean parseIsOngoingEvents (Map<String, String[]> parsedResult) {
return parsedResult.get("status")[0].contains("ongoing") || parsedResult.get("status")[0].contains("schedule");
}
/**
* Extracts the intended CalendarItem type from parsedResult.
*
* @param parsedResult
* @return true if Task, false if Event
*/
private boolean parseIsTask (Map<String, String[]> parsedResult) {
return parsedResult.get("eventType")[0].contains("task");
}
/**
* Extracts the natural dates from parsedResult.
*
* @param parsedResult
* @return { naturalOn, naturalFrom, naturalTo } or null if no date provided
*/
private String[] parseDates(Map<String, String[]> parsedResult) {
String naturalFrom = null;
String naturalTo = null;
String naturalOn = null;
if (parsedResult.get("time") == null) {
if (parsedResult.get("timeFrom") != null) {
naturalFrom = parsedResult.get("timeFrom")[1];
}
if (parsedResult.get("timeTo") != null) {
naturalTo = parsedResult.get("timeTo")[1];
}
} else {
naturalOn = parsedResult.get("time")[1];
}
if (naturalFrom != null || naturalTo != null || naturalOn != null) {
return new String[] { naturalOn, naturalFrom, naturalTo };
} else {
return null;
}
}
}
|
src/main/java/seedu/todo/controllers/ListController.java
|
package seedu.todo.controllers;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.joestelmach.natty.DateGroup;
import com.joestelmach.natty.Parser;
import seedu.todo.commons.exceptions.UnmatchedQuotesException;
import seedu.todo.commons.util.DateUtil;
import seedu.todo.commons.util.StringUtil;
import seedu.todo.controllers.concerns.Tokenizer;
import seedu.todo.controllers.concerns.Renderer;
import seedu.todo.models.Event;
import seedu.todo.models.Task;
import seedu.todo.models.TodoListDB;
/**
* Controller to list CalendarItems.
*
* @author Tiong Yaocong
*
*/
public class ListController implements Controller {
private static final String NAME = "List";
private static final String DESCRIPTION = "Lists all tasks and events.";
private static final String COMMAND_SYNTAX = "list [task/event] [complete/incomplete] [on date] or [from date to date]";
private static final String COMMAND_WORD = "list";
private static final String MESSAGE_LISTING_SUCCESS = "Listing a total of %s";
private static final String MESSAGE_LISTING_FAILURE = "No task or event found!";
private static CommandDefinition commandDefinition =
new CommandDefinition(NAME, DESCRIPTION, COMMAND_SYNTAX);
public static CommandDefinition getCommandDefinition() {
return commandDefinition;
}
@Override
public float inputConfidence(String input) {
return (input.toLowerCase().startsWith("list")) ? 1 : 0;
}
private static Map<String, String[]> getTokenDefinitions() {
Map<String, String[]> tokenDefinitions = new HashMap<String, String[]>();
tokenDefinitions.put("default", new String[] {"list"});
tokenDefinitions.put("eventType", new String[] { "event", "events", "task", "tasks"});
tokenDefinitions.put("status", new String[] { "complete" , "completed", "incomplete", "incompleted"});
tokenDefinitions.put("time", new String[] { "at", "by", "on", "time" });
tokenDefinitions.put("timeFrom", new String[] { "from" });
tokenDefinitions.put("timeTo", new String[] { "to", "before" });
return tokenDefinitions;
}
@Override
public void process(String input) {
Map<String, String[]> parsedResult;
try {
parsedResult = Tokenizer.tokenize(getTokenDefinitions(), input);
} catch (UnmatchedQuotesException e) {
System.out.println("Unmatched quote!");
return ;
}
boolean isExactCommand = parseExactListCommand(parsedResult);
// Task or event?
boolean listAll = parseListAllType(parsedResult);
boolean isTask = true; //default
//if listing all type , set isTask and isEvent true
if (!listAll) {
isTask = parseIsTask(parsedResult);
}
boolean listAllStatus = parseListAllStatus(parsedResult);
boolean isCompleted = false; //default
//if listing all status, isCompleted will be ignored, listing both complete and incomplete
if (!listAllStatus) {
isCompleted = !parseIsIncomplete(parsedResult);
}
String[] parsedDates = parseDates(parsedResult);
boolean isDateProvided = true;
LocalDateTime dateOn = null;
LocalDateTime dateFrom = null;
LocalDateTime dateTo = null;
if (parsedDates == null) {
isDateProvided = false;
} else {
String naturalOn = parsedDates[0];
String naturalFrom = parsedDates[1];
String naturalTo = parsedDates[2];
// Parse natural date using Natty.
dateOn = naturalOn == null ? null : parseNatural(naturalOn);
dateFrom = naturalFrom == null ? null : parseNatural(naturalFrom);
dateTo = naturalTo == null ? null : parseNatural(naturalTo);
//setting up view
}
setupView(isTask, listAll, isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
parsedDates, isExactCommand, input);
}
/**
* Setting up the view
*
* @param isTask
* true if CalendarItem should be a Task, false if Event
* @param isEvent
* true if CalendarItem should be a Event, false if Task
* @param listAll
* true if listing all type, isTask or isEvent are ignored
* @param isCompleted
* true if user request completed item
* @param listAllStatus
* true if user did not request any status, isCompleted is ignored
* @param dateOn
* Date if user specify for a certain date
* @param dateFrom
* Due date for Task or start date for Event
* @param dateTo
* End date for Event
*/
private void setupView(boolean isTask, boolean listAll, boolean isCompleted,
boolean listAllStatus, LocalDateTime dateOn, LocalDateTime dateFrom,
LocalDateTime dateTo, boolean isDateProvided, String[] parsedDates, boolean isExactCommand, String input) {
TodoListDB db = TodoListDB.getInstance();
List<Task> tasks = null;
List<Event> events = null;
// isTask and isEvent = true, list all type
if (listAll || isTask) { //task or event not specify
//no event or task keyword found
isTask = false;
tasks = setupTaskView(isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
isExactCommand, listAll, db);
} else {
events = setupEventView(isCompleted, listAllStatus, dateOn, dateFrom, dateTo, isDateProvided,
isExactCommand, listAll, db);
}
if (tasks == null && events == null) {
displayErrorMessage(input, listAll, listAllStatus, isCompleted, isTask, parsedDates);
return ; //display error message
}
// Update console message
int numTasks = 0;
int numEvents = 0;
if (tasks != null) {
numTasks = tasks.size();
}
if(events != null) {
numEvents = events.size();
}
String consoleMessage = "";
if (numTasks != 0 || numEvents != 0) {
consoleMessage = String.format(MESSAGE_LISTING_SUCCESS, formatDisplayMessage(numTasks, numEvents));
} else {
consoleMessage = MESSAGE_LISTING_FAILURE;
}
Renderer.renderSelected(db, consoleMessage, tasks, events);
}
/**
* display error message due to invalid clear command
*
* @param input
* based on user input
* @param parsedDate
* the date entered by the user
*/
private void displayErrorMessage(String input, boolean listAll, boolean listAllStatus, boolean isCompleted,
boolean isTask, String[] parsedDates) {
String consoleDisplayMessage = String.format("You have entered : %s.",input);
String commandLineMessage = COMMAND_WORD;
String commandLineCompleteSuggestMessage = "complete";
String commandLineIncompleteSuggestMessage = "incomplete";
String commandLineTaskSuggestMessage = "task";
String commandLineEventSuggestMessage = "event";
if (!listAll) {
if (isTask) {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineTaskSuggestMessage);
} else {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineEventSuggestMessage);
}
}
if (!listAllStatus) {
if (isCompleted) {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineCompleteSuggestMessage);
} else {
commandLineMessage = String.format("%s %s", commandLineMessage, commandLineIncompleteSuggestMessage);
}
}
if (parsedDates != null) {
if (parsedDates[0] != null) {
commandLineMessage = String.format("%s by <date>", commandLineMessage);
} else {
commandLineMessage = String.format("%s from <date> to <date>", commandLineMessage);
}
}
Renderer.renderDisambiguation(commandLineMessage, consoleDisplayMessage);
}
private String formatDisplayMessage (int numTasks, int numEvents) {
if (numTasks != 0 && numEvents != 0) {
return String.format("%s and %s.", formatTaskMessage(numTasks), formatEventMessage(numEvents));
} else if (numTasks != 0) {
return formatTaskMessage(numTasks);
} else {
return formatEventMessage(numEvents);
}
}
private String formatEventMessage (int numEvents) {
return String.format("%d %s", numEvents, StringUtil.pluralizer(numEvents, "event", "events"));
}
private String formatTaskMessage (int numTasks) {
return String.format("%d %s", numTasks, StringUtil.pluralizer(numTasks, "task", "tasks"));
}
private List<Event> setupEventView(boolean isCompleted, boolean listAllStatus, LocalDateTime dateOn,
LocalDateTime dateFrom, LocalDateTime dateTo, boolean isDateProvided,
boolean isExactCommand, boolean listAll, TodoListDB db) {
if (dateFrom == null && dateTo == null && dateOn == null) {
if (listAllStatus) { // not specify
if (isExactCommand && isDateProvided == false) {
if (listAll) {
return db.getAllCurrentEvents();
} else {
return db.getAllEvents();
}
} else {
return null;
}
} else if (isCompleted) {
return db.getEventByRange(null, LocalDateTime.now());
} else {
return db.getEventByRange(LocalDateTime.now(), null);
}
} else if (dateOn != null) { //by keyword found
return db.getEventByDate(dateOn);
} else {
return db.getEventByRange(dateFrom, dateTo);
}
}
private List<Task> setupTaskView(boolean isCompleted, boolean listAllStatus, LocalDateTime dateOn, LocalDateTime dateFrom,
LocalDateTime dateTo, boolean isDateProvided, boolean isExactCommand, boolean listAll, TodoListDB db) {
if (dateFrom == null && dateTo == null && dateOn == null) {
if (listAllStatus) { // not specify
if (isExactCommand && isDateProvided == false) {
if (listAll) {
return db.getIncompleteTasksAndTaskFromTodayDate();
} else {
return db.getAllTasks();
}
} else {
return null;
}
} else {
return db.getTaskByRangeWithStatus(dateFrom, dateTo, isCompleted, listAllStatus);
}
} else if (dateOn != null) { //by keyword found
return db.getTaskByDateWithStatus(dateOn, isCompleted, listAllStatus);
} else {
return db.getTaskByRangeWithStatus(dateFrom, dateTo, isCompleted, listAllStatus);
}
}
/**
* Parse a natural date into a LocalDateTime object.
*
* @param natural
* @return LocalDateTime object
*/
private LocalDateTime parseNatural(String natural) {
Parser parser = new Parser();
List<DateGroup> groups = parser.parse(natural);
Date date = null;
try {
date = groups.get(0).getDates().get(0);
} catch (IndexOutOfBoundsException e) {
System.out.println("Error!"); // TODO
return null;
}
LocalDateTime ldt = LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
return DateUtil.floorDate(ldt);
}
private boolean parseExactListCommand(Map<String, String[]> parsedResult) {
return parsedResult.get("default")[1] == null;
}
/**
* Extracts the intended CalendarItem type specify from parsedResult.
*
* @param parsedResult
* @return true if Task or event is not specify, false if either Task or Event specify
*/
private boolean parseListAllType (Map<String, String[]> parsedResult) {
return !(parsedResult.get("eventType") != null);
}
/**
* Extracts the intended status type specify from parsedResult.
*
* @param parsedResult
* @return true if Task or event is not specify, false if either Task or Event specify
*/
private boolean parseListAllStatus (Map<String, String[]> parsedResult) {
return !(parsedResult.get("status") != null);
}
/**
* Extracts the intended CalendarItem status from parsedResult.
*
* @param parsedResult
* @return true if incomplete, false if complete
*/
private boolean parseIsIncomplete (Map<String, String[]> parsedResult) {
return parsedResult.get("status")[0].contains("incomplete");
}
/**
* Extracts the intended CalendarItem type from parsedResult.
*
* @param parsedResult
* @return true if Task, false if Event
*/
private boolean parseIsTask (Map<String, String[]> parsedResult) {
return parsedResult.get("eventType")[0].contains("task");
}
/**
* Extracts the natural dates from parsedResult.
*
* @param parsedResult
* @return { naturalOn, naturalFrom, naturalTo } or null if no date provided
*/
private String[] parseDates(Map<String, String[]> parsedResult) {
String naturalFrom = null;
String naturalTo = null;
String naturalOn = null;
if (parsedResult.get("time") == null) {
if (parsedResult.get("timeFrom") != null) {
naturalFrom = parsedResult.get("timeFrom")[1];
}
if (parsedResult.get("timeTo") != null) {
naturalTo = parsedResult.get("timeTo")[1];
}
} else {
naturalOn = parsedResult.get("time")[1];
}
if (naturalFrom != null || naturalTo != null || naturalOn != null) {
return new String[] { naturalOn, naturalFrom, naturalTo };
} else {
return null;
}
}
}
|
new method to display over and ongoing events
|
src/main/java/seedu/todo/controllers/ListController.java
|
new method to display over and ongoing events
|
|
Java
|
mit
|
c520336b86c68e20e93814e875762b4518e4f45c
| 0
|
FAForever/faf-java-api,FAForever/faf-java-api,micheljung/faf-java-api,micheljung/faf-java-api,FAForever/faf-java-api
|
package com.faforever.api.map;
import com.faforever.api.config.FafApiProperties;
import com.faforever.api.content.ContentService;
import com.faforever.api.data.domain.Map;
import com.faforever.api.data.domain.MapVersion;
import com.faforever.api.data.domain.Player;
import com.faforever.api.error.ApiException;
import com.faforever.api.error.Error;
import com.faforever.api.error.ErrorCode;
import com.faforever.api.error.ProgrammingError;
import com.faforever.api.utils.FilePermissionUtil;
import com.faforever.commons.io.Unzipper;
import com.faforever.commons.io.Zipper;
import com.faforever.commons.lua.LuaLoader;
import com.faforever.commons.map.PreviewGenerator;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.luaj.vm2.LuaValue;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import org.springframework.util.FileSystemUtils;
import javax.imageio.ImageIO;
import javax.inject.Inject;
import java.awt.image.BufferedImage;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import static com.github.nocatch.NoCatch.noCatch;
@Service
@Slf4j
public class MapService {
private static final String[] REQUIRED_FILES = new String[]{
".scmap",
"_save.lua",
"_scenario.lua",
"_script.lua"};
private static final Charset MAP_CHARSET = StandardCharsets.ISO_8859_1;
private static final String STUPID_MAP_FOLDER_PREFIX = "maps/";
private final FafApiProperties fafApiProperties;
private final MapRepository mapRepository;
private final ContentService contentService;
@Inject
public MapService(FafApiProperties fafApiProperties, MapRepository mapRepository, ContentService contentService) {
this.fafApiProperties = fafApiProperties;
this.mapRepository = mapRepository;
this.contentService = contentService;
}
@Transactional
@SneakyThrows
void uploadMap(byte[] mapData, String mapFilename, Player author, boolean isRanked) {
Assert.notNull(author, "'author' must not be null");
Assert.isTrue(mapData.length > 0, "'mapData' must not be empty");
MapUploadData progressData = new MapUploadData()
.setBaseDir(contentService.createTempDir())
.setUploadFileName(mapFilename)
.setAuthorEntity(author)
.setRanked(isRanked);
progressData.setUploadedFile(progressData.getBaseDir().resolve(mapFilename));
copyToTemporaryDirectory(mapData, progressData);
unzipFile(progressData);
postProcessZipFiles(progressData);
parseScenarioLua(progressData);
checkLua(progressData);
postProcessLuaFile(progressData);
updateMapEntities(progressData);
renameFolderNameAndCorrectPathInLuaFiles(progressData);
generatePreview(progressData);
zipMapData(progressData);
assert cleanup(progressData);
}
@SneakyThrows
private Path copyToTemporaryDirectory(byte[] mapData, MapUploadData progressData) {
return Files.write(progressData.getUploadedFile(), mapData);
}
@SneakyThrows
private void unzipFile(MapUploadData mapData) {
try (ZipInputStream zipInputStream = new ZipInputStream(new BufferedInputStream(
Files.newInputStream(mapData.getUploadedFile())))) {
Unzipper.from(zipInputStream).to(mapData.getBaseDir()).unzip();
}
}
@SneakyThrows
private void postProcessZipFiles(MapUploadData mapUploadData) {
Optional<Path> mapFolder;
try (Stream<Path> mapFolderStream = Files.list(mapUploadData.getBaseDir())) {
mapFolder = mapFolderStream
.filter(path -> Files.isDirectory(path))
.findFirst();
}
if (!mapFolder.isPresent()) {
throw new ApiException(new Error(ErrorCode.MAP_MISSING_MAP_FOLDER_INSIDE_ZIP));
}
try (Stream<Path> mapFolderStream = Files.list(mapUploadData.getBaseDir())) {
if (mapFolderStream.count() != 2) {
throw new ApiException(new Error(ErrorCode.MAP_INVALID_ZIP));
}
}
mapUploadData.setOriginalMapFolder(mapFolder.get());
mapUploadData.setUploadFolderName(mapUploadData.getOriginalMapFolder().getFileName().toString());
List<Path> filePaths = new ArrayList<>();
try (Stream<Path> mapFileStream = Files.list(mapUploadData.getOriginalMapFolder())) {
mapFileStream.forEach(filePaths::add);
Arrays.stream(REQUIRED_FILES)
.forEach(filePattern -> {
if (filePaths.stream()
.noneMatch(filePath -> filePath.toString().endsWith(filePattern))) {
throw new ApiException(new Error(ErrorCode.MAP_FILE_INSIDE_ZIP_MISSING, filePattern));
}
});
}
}
@SneakyThrows
private void parseScenarioLua(MapUploadData progressData) {
try (Stream<Path> mapFilesStream = Files.list(progressData.getOriginalMapFolder())) {
Path scenarioLuaPath = noCatch(() -> mapFilesStream)
.filter(myFile -> myFile.toString().endsWith("_scenario.lua"))
.findFirst()
.orElseThrow(() -> new ApiException(new Error(ErrorCode.MAP_SCENARIO_LUA_MISSING)));
LuaValue root = noCatch(() -> LuaLoader.loadFile(scenarioLuaPath), IllegalStateException.class);
progressData.setLuaRoot(root);
}
}
private void checkLua(MapUploadData progressData) {
List<Error> errors = new ArrayList<>();
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
if (scenarioInfo.get(ScenarioMapInfo.NAME) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_NAME_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.DESCRIPTION) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_DESCRIPTION_MISSING));
}
if (invalidTeam(scenarioInfo)) {
errors.add(new Error(ErrorCode.MAP_FIRST_TEAM_FFA));
}
if (scenarioInfo.get(ScenarioMapInfo.TYPE) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_TYPE_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.SIZE) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_SIZE_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.MAP_VERSION) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_VERSION_MISSING));
}
if (!errors.isEmpty()) {
throw new ApiException(errors.toArray(new Error[errors.size()]));
}
}
private boolean invalidTeam(LuaValue scenarioInfo) {
LuaValue scenario = scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS);
if (scenario == LuaValue.NIL) {
return true;
}
LuaValue standard = scenario.get(ScenarioMapInfo.CONFIGURATION_STANDARD);
if (standard == LuaValue.NIL) {
return true;
}
LuaValue teams = standard.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS);
if (teams == LuaValue.NIL) {
return true;
}
LuaValue firstTeam = teams.get(1);
if (firstTeam == LuaValue.NIL) {
return true;
}
LuaValue teamName = firstTeam.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_NAME);
if (teamName == LuaValue.NIL) {
return true;
}
LuaValue armies = firstTeam.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_ARMIES);
return armies == LuaValue.NIL || !teamName.tojstring().equals("FFA");
}
private void postProcessLuaFile(MapUploadData progressData) {
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
Optional<Map> mapEntity = mapRepository.findOneByDisplayName(
scenarioInfo.get(ScenarioMapInfo.NAME).toString());
if (!mapEntity.isPresent()) {
return;
}
if (mapEntity.get().getAuthor().getId() != progressData.getAuthorEntity().getId()) {
throw new ApiException(new Error(ErrorCode.MAP_NOT_ORIGINAL_AUTHOR, mapEntity.get().getDisplayName()));
}
int newVersion = scenarioInfo.get(ScenarioMapInfo.MAP_VERSION).toint();
if (mapEntity.get().getVersions().stream()
.anyMatch(mapVersion -> mapVersion.getVersion() == newVersion)) {
throw new ApiException(new Error(ErrorCode.MAP_VERSION_EXISTS, mapEntity.get().getDisplayName(), newVersion));
}
progressData.setMapEntity(mapEntity.get());
}
private void updateMapEntities(MapUploadData progressData) {
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
Map map = progressData.getMapEntity();
if (map == null) {
map = new Map();
}
map.setDisplayName(scenarioInfo.get(ScenarioMapInfo.NAME).toString())
.setMapType(scenarioInfo.get(ScenarioMapInfo.TYPE).tojstring())
.setBattleType(scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS).get(ScenarioMapInfo.CONFIGURATION_STANDARD).get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS).get(1)
.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_NAME).tojstring())
.setAuthor(progressData.getAuthorEntity());
LuaValue size = scenarioInfo.get(ScenarioMapInfo.SIZE);
MapVersion version = new MapVersion()
.setDescription(scenarioInfo.get(ScenarioMapInfo.DESCRIPTION).tojstring().replaceAll("<LOC .*?>", ""))
.setWidth(size.get(1).toint())
.setHeight(size.get(2).toint())
.setHidden(false)
.setRanked(progressData.isRanked())
.setMaxPlayers(scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS).get(ScenarioMapInfo.CONFIGURATION_STANDARD).get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS).get(1)
.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_ARMIES).length())
.setVersion(scenarioInfo.get(ScenarioMapInfo.MAP_VERSION).toint());
map.getVersions().add(version);
version.setMap(map);
progressData.setMapEntity(map);
progressData.setMapVersionEntity(version);
version.setFilename(STUPID_MAP_FOLDER_PREFIX + progressData.getFinalZipName());
progressData.setFinalZipFile(
this.fafApiProperties.getMap().getTargetDirectory()
.resolve(progressData.getFinalZipName()));
if (Files.exists(progressData.getFinalZipFile())) {
throw new ApiException(new Error(ErrorCode.MAP_NAME_CONFLICT, progressData.getFinalZipName()));
}
// this triggers validation
mapRepository.save(map);
}
@SneakyThrows
private void renameFolderNameAndCorrectPathInLuaFiles(MapUploadData progressData) {
progressData.setNewMapFolder(progressData.getBaseDir().resolve(progressData.getNewFolderName()));
Files.move(progressData.getOriginalMapFolder(), progressData.getNewMapFolder());
updateLuaFiles(progressData);
}
@SneakyThrows
private void updateLuaFiles(MapUploadData mapData) {
String oldNameFolder = "/maps/" + mapData.getUploadFolderName();
String newNameFolder = "/maps/" + mapData.getNewFolderName();
try (Stream<Path> mapFileStream = Files.list(mapData.getNewMapFolder())) {
mapFileStream
.filter(path -> path.toString().toLowerCase().endsWith(".lua"))
.forEach(path -> noCatch(() -> {
List<String> lines = Files.readAllLines(path, MAP_CHARSET).stream()
.map(line -> line.replaceAll("(?i)" + oldNameFolder, newNameFolder))
.collect(Collectors.toList());
Files.write(path, lines, MAP_CHARSET);
}));
}
}
@SneakyThrows
private void generatePreview(MapUploadData mapData) {
String previewFilename = mapData.getNewFolderName() + ".png";
generateImage(
fafApiProperties.getMap().getDirectoryPreviewPathSmall().resolve(previewFilename),
mapData.getNewMapFolder(),
fafApiProperties.getMap().getPreviewSizeSmall());
generateImage(
fafApiProperties.getMap().getDirectoryPreviewPathLarge().resolve(previewFilename),
mapData.getNewMapFolder(),
fafApiProperties.getMap().getPreviewSizeLarge());
}
@SneakyThrows
private void zipMapData(MapUploadData progressData) {
cleanupBaseDir(progressData);
Path finalZipFile = progressData.getFinalZipFile();
Files.createDirectories(finalZipFile.getParent());
try (ZipOutputStream zipOutputStream = new ZipOutputStream(new BufferedOutputStream(
Files.newOutputStream(finalZipFile)))) {
Zipper.contentOf(progressData.getBaseDir()).to(zipOutputStream).zip();
}
// TODO if possible, this should be done using umask instead
FilePermissionUtil.setDefaultFilePermission(finalZipFile);
}
@SneakyThrows
private void cleanupBaseDir(MapUploadData progressData) {
Files.delete(progressData.getUploadedFile());
try (Stream<Path> stream = Files.list(progressData.getBaseDir())) {
if (stream.count() != 1) {
throw new ProgrammingError("Folder containing unknown data: " + progressData.getBaseDir());
}
}
}
@SneakyThrows
private void generateImage(Path target, Path baseDir, int size) {
BufferedImage image = PreviewGenerator.generatePreview(baseDir, size, size);
if (target.getNameCount() > 0) {
Files.createDirectories(target.getParent());
}
ImageIO.write(image, "png", target.toFile());
}
private boolean cleanup(MapUploadData mapData) {
return FileSystemUtils.deleteRecursively(mapData.getBaseDir().toFile());
}
@Data
private class MapUploadData {
private String uploadFileName;
private String uploadFolderName;
private String newFolderName;
private Path uploadedFile;
private Path baseDir;
private Path originalMapFolder;
private Path newMapFolder;
private Path finalZipFile;
private LuaValue luaRoot;
private Map mapEntity;
private MapVersion mapVersionEntity;
private Player authorEntity;
private boolean isRanked;
private LuaValue scenarioInfo;
private LuaValue getLuaScenarioInfo() {
if (getLuaRoot() == null) {
throw new IllegalStateException("*_scenario.lua parse result not available");
}
if (scenarioInfo == null) {
scenarioInfo = getLuaRoot().get("ScenarioInfo");
}
return scenarioInfo;
}
private String normalizeMapName(String mapName) {
return Paths.get(mapName.toLowerCase().replaceAll(" ", "_")).normalize().toString();
}
private String getNewFolderName() {
return generateNewMapNameWithVersion("");
}
private String generateNewMapNameWithVersion(String extension) {
return Paths.get(String.format("%s.v%04d%s",
normalizeMapName(mapEntity.getDisplayName()),
mapVersionEntity.getVersion(),
extension))
.normalize().toString();
}
private String getFinalZipName() {
return generateNewMapNameWithVersion(".zip");
}
}
private class ScenarioMapInfo {
private static final String CONFIGURATIONS = "Configurations";
private static final String NAME = "name";
private static final String DESCRIPTION = "description";
private static final String TYPE = "type";
private static final String SIZE = "size";
private static final String MAP_VERSION = "map_version";
private static final String CONFIGURATION_STANDARD = "standard";
private static final String CONFIGURATION_STANDARD_TEAMS = "teams";
private static final String CONFIGURATION_STANDARD_TEAMS_NAME = "name";
private static final String CONFIGURATION_STANDARD_TEAMS_ARMIES = "armies";
}
}
|
src/main/java/com/faforever/api/map/MapService.java
|
package com.faforever.api.map;
import com.faforever.api.config.FafApiProperties;
import com.faforever.api.content.ContentService;
import com.faforever.api.data.domain.Map;
import com.faforever.api.data.domain.MapVersion;
import com.faforever.api.data.domain.Player;
import com.faforever.api.error.ApiException;
import com.faforever.api.error.Error;
import com.faforever.api.error.ErrorCode;
import com.faforever.api.error.ProgrammingError;
import com.faforever.api.utils.FilePermissionUtil;
import com.faforever.commons.io.Unzipper;
import com.faforever.commons.io.Zipper;
import com.faforever.commons.lua.LuaLoader;
import com.faforever.commons.map.PreviewGenerator;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.luaj.vm2.LuaValue;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import org.springframework.util.FileSystemUtils;
import javax.imageio.ImageIO;
import javax.inject.Inject;
import java.awt.image.BufferedImage;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import static com.github.nocatch.NoCatch.noCatch;
@Service
@Slf4j
public class MapService {
private static final String[] REQUIRED_FILES = new String[]{
".scmap",
"_save.lua",
"_scenario.lua",
"_script.lua"};
private static final Charset MAP_CHARSET = StandardCharsets.ISO_8859_1;
private final FafApiProperties fafApiProperties;
private final MapRepository mapRepository;
private final ContentService contentService;
@Inject
public MapService(FafApiProperties fafApiProperties, MapRepository mapRepository, ContentService contentService) {
this.fafApiProperties = fafApiProperties;
this.mapRepository = mapRepository;
this.contentService = contentService;
}
@Transactional
@SneakyThrows
void uploadMap(byte[] mapData, String mapFilename, Player author, boolean isRanked) {
Assert.notNull(author, "'author' must not be null");
Assert.isTrue(mapData.length > 0, "'mapData' must not be empty");
MapUploadData progressData = new MapUploadData()
.setBaseDir(contentService.createTempDir())
.setUploadFileName(mapFilename)
.setAuthorEntity(author)
.setRanked(isRanked);
progressData.setUploadedFile(progressData.getBaseDir().resolve(mapFilename));
copyToTemporaryDirectory(mapData, progressData);
unzipFile(progressData);
postProcessZipFiles(progressData);
parseScenarioLua(progressData);
checkLua(progressData);
postProcessLuaFile(progressData);
updateMapEntities(progressData);
renameFolderNameAndCorrectPathInLuaFiles(progressData);
generatePreview(progressData);
zipMapData(progressData);
assert cleanup(progressData);
}
@SneakyThrows
private Path copyToTemporaryDirectory(byte[] mapData, MapUploadData progressData) {
return Files.write(progressData.getUploadedFile(), mapData);
}
@SneakyThrows
private void unzipFile(MapUploadData mapData) {
try (ZipInputStream zipInputStream = new ZipInputStream(new BufferedInputStream(
Files.newInputStream(mapData.getUploadedFile())))) {
Unzipper.from(zipInputStream).to(mapData.getBaseDir()).unzip();
}
}
@SneakyThrows
private void postProcessZipFiles(MapUploadData mapUploadData) {
Optional<Path> mapFolder;
try (Stream<Path> mapFolderStream = Files.list(mapUploadData.getBaseDir())) {
mapFolder = mapFolderStream
.filter(path -> Files.isDirectory(path))
.findFirst();
}
if (!mapFolder.isPresent()) {
throw new ApiException(new Error(ErrorCode.MAP_MISSING_MAP_FOLDER_INSIDE_ZIP));
}
try (Stream<Path> mapFolderStream = Files.list(mapUploadData.getBaseDir())) {
if (mapFolderStream.count() != 2) {
throw new ApiException(new Error(ErrorCode.MAP_INVALID_ZIP));
}
}
mapUploadData.setOriginalMapFolder(mapFolder.get());
mapUploadData.setUploadFolderName(mapUploadData.getOriginalMapFolder().getFileName().toString());
List<Path> filePaths = new ArrayList<>();
try (Stream<Path> mapFileStream = Files.list(mapUploadData.getOriginalMapFolder())) {
mapFileStream.forEach(filePaths::add);
Arrays.stream(REQUIRED_FILES)
.forEach(filePattern -> {
if (filePaths.stream()
.noneMatch(filePath -> filePath.toString().endsWith(filePattern))) {
throw new ApiException(new Error(ErrorCode.MAP_FILE_INSIDE_ZIP_MISSING, filePattern));
}
});
}
}
@SneakyThrows
private void parseScenarioLua(MapUploadData progressData) {
try (Stream<Path> mapFilesStream = Files.list(progressData.getOriginalMapFolder())) {
Path scenarioLuaPath = noCatch(() -> mapFilesStream)
.filter(myFile -> myFile.toString().endsWith("_scenario.lua"))
.findFirst()
.orElseThrow(() -> new ApiException(new Error(ErrorCode.MAP_SCENARIO_LUA_MISSING)));
LuaValue root = noCatch(() -> LuaLoader.loadFile(scenarioLuaPath), IllegalStateException.class);
progressData.setLuaRoot(root);
}
}
private void checkLua(MapUploadData progressData) {
List<Error> errors = new ArrayList<>();
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
if (scenarioInfo.get(ScenarioMapInfo.NAME) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_NAME_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.DESCRIPTION) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_DESCRIPTION_MISSING));
}
if (invalidTeam(scenarioInfo)) {
errors.add(new Error(ErrorCode.MAP_FIRST_TEAM_FFA));
}
if (scenarioInfo.get(ScenarioMapInfo.TYPE) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_TYPE_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.SIZE) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_SIZE_MISSING));
}
if (scenarioInfo.get(ScenarioMapInfo.MAP_VERSION) == LuaValue.NIL) {
errors.add(new Error(ErrorCode.MAP_VERSION_MISSING));
}
if (!errors.isEmpty()) {
throw new ApiException(errors.toArray(new Error[errors.size()]));
}
}
private boolean invalidTeam(LuaValue scenarioInfo) {
LuaValue scenario = scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS);
if (scenario == LuaValue.NIL) {
return true;
}
LuaValue standard = scenario.get(ScenarioMapInfo.CONFIGURATION_STANDARD);
if (standard == LuaValue.NIL) {
return true;
}
LuaValue teams = standard.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS);
if (teams == LuaValue.NIL) {
return true;
}
LuaValue firstTeam = teams.get(1);
if (firstTeam == LuaValue.NIL) {
return true;
}
LuaValue teamName = firstTeam.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_NAME);
if (teamName == LuaValue.NIL) {
return true;
}
LuaValue armies = firstTeam.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_ARMIES);
return armies == LuaValue.NIL || !teamName.tojstring().equals("FFA");
}
private void postProcessLuaFile(MapUploadData progressData) {
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
Optional<Map> mapEntity = mapRepository.findOneByDisplayName(
scenarioInfo.get(ScenarioMapInfo.NAME).toString());
if (!mapEntity.isPresent()) {
return;
}
if (mapEntity.get().getAuthor().getId() != progressData.getAuthorEntity().getId()) {
throw new ApiException(new Error(ErrorCode.MAP_NOT_ORIGINAL_AUTHOR, mapEntity.get().getDisplayName()));
}
int newVersion = scenarioInfo.get(ScenarioMapInfo.MAP_VERSION).toint();
if (mapEntity.get().getVersions().stream()
.anyMatch(mapVersion -> mapVersion.getVersion() == newVersion)) {
throw new ApiException(new Error(ErrorCode.MAP_VERSION_EXISTS, mapEntity.get().getDisplayName(), newVersion));
}
progressData.setMapEntity(mapEntity.get());
}
private void updateMapEntities(MapUploadData progressData) {
LuaValue scenarioInfo = progressData.getLuaScenarioInfo();
Map map = progressData.getMapEntity();
if (map == null) {
map = new Map();
}
map.setDisplayName(scenarioInfo.get(ScenarioMapInfo.NAME).toString())
.setMapType(scenarioInfo.get(ScenarioMapInfo.TYPE).tojstring())
.setBattleType(scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS).get(ScenarioMapInfo.CONFIGURATION_STANDARD).get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS).get(1)
.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_NAME).tojstring())
.setAuthor(progressData.getAuthorEntity());
LuaValue size = scenarioInfo.get(ScenarioMapInfo.SIZE);
MapVersion version = new MapVersion()
.setDescription(scenarioInfo.get(ScenarioMapInfo.DESCRIPTION).tojstring().replaceAll("<LOC .*?>", ""))
.setWidth(size.get(1).toint())
.setHeight(size.get(2).toint())
.setHidden(false)
.setRanked(progressData.isRanked())
.setMaxPlayers(scenarioInfo.get(ScenarioMapInfo.CONFIGURATIONS).get(ScenarioMapInfo.CONFIGURATION_STANDARD).get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS).get(1)
.get(ScenarioMapInfo.CONFIGURATION_STANDARD_TEAMS_ARMIES).length())
.setVersion(scenarioInfo.get(ScenarioMapInfo.MAP_VERSION).toint());
map.getVersions().add(version);
version.setMap(map);
progressData.setMapEntity(map);
progressData.setMapVersionEntity(version);
version.setFilename(progressData.getFinalZipName());
progressData.setFinalZipFile(
this.fafApiProperties.getMap().getTargetDirectory()
.resolve(progressData.getFinalZipName()));
if (Files.exists(progressData.getFinalZipFile())) {
throw new ApiException(new Error(ErrorCode.MAP_NAME_CONFLICT, progressData.getFinalZipName()));
}
// this triggers validation
mapRepository.save(map);
}
@SneakyThrows
private void renameFolderNameAndCorrectPathInLuaFiles(MapUploadData progressData) {
progressData.setNewMapFolder(progressData.getBaseDir().resolve(progressData.getNewFolderName()));
Files.move(progressData.getOriginalMapFolder(), progressData.getNewMapFolder());
updateLuaFiles(progressData);
}
@SneakyThrows
private void updateLuaFiles(MapUploadData mapData) {
String oldNameFolder = "/maps/" + mapData.getUploadFolderName();
String newNameFolder = "/maps/" + mapData.getNewFolderName();
try (Stream<Path> mapFileStream = Files.list(mapData.getNewMapFolder())) {
mapFileStream
.filter(path -> path.toString().toLowerCase().endsWith(".lua"))
.forEach(path -> noCatch(() -> {
List<String> lines = Files.readAllLines(path, MAP_CHARSET).stream()
.map(line -> line.replaceAll("(?i)" + oldNameFolder, newNameFolder))
.collect(Collectors.toList());
Files.write(path, lines, MAP_CHARSET);
}));
}
}
@SneakyThrows
private void generatePreview(MapUploadData mapData) {
String previewFilename = mapData.getNewFolderName() + ".png";
generateImage(
fafApiProperties.getMap().getDirectoryPreviewPathSmall().resolve(previewFilename),
mapData.getNewMapFolder(),
fafApiProperties.getMap().getPreviewSizeSmall());
generateImage(
fafApiProperties.getMap().getDirectoryPreviewPathLarge().resolve(previewFilename),
mapData.getNewMapFolder(),
fafApiProperties.getMap().getPreviewSizeLarge());
}
@SneakyThrows
private void zipMapData(MapUploadData progressData) {
cleanupBaseDir(progressData);
Path finalZipFile = progressData.getFinalZipFile();
Files.createDirectories(finalZipFile.getParent());
try (ZipOutputStream zipOutputStream = new ZipOutputStream(new BufferedOutputStream(
Files.newOutputStream(finalZipFile)))) {
Zipper.contentOf(progressData.getBaseDir()).to(zipOutputStream).zip();
}
// TODO if possible, this should be done using umask instead
FilePermissionUtil.setDefaultFilePermission(finalZipFile);
}
@SneakyThrows
private void cleanupBaseDir(MapUploadData progressData) {
Files.delete(progressData.getUploadedFile());
try (Stream<Path> stream = Files.list(progressData.getBaseDir())) {
if (stream.count() != 1) {
throw new ProgrammingError("Folder containing unknown data: " + progressData.getBaseDir());
}
}
}
@SneakyThrows
private void generateImage(Path target, Path baseDir, int size) {
BufferedImage image = PreviewGenerator.generatePreview(baseDir, size, size);
if (target.getNameCount() > 0) {
Files.createDirectories(target.getParent());
}
ImageIO.write(image, "png", target.toFile());
}
private boolean cleanup(MapUploadData mapData) {
return FileSystemUtils.deleteRecursively(mapData.getBaseDir().toFile());
}
@Data
private class MapUploadData {
private String uploadFileName;
private String uploadFolderName;
private String newFolderName;
private Path uploadedFile;
private Path baseDir;
private Path originalMapFolder;
private Path newMapFolder;
private Path finalZipFile;
private LuaValue luaRoot;
private Map mapEntity;
private MapVersion mapVersionEntity;
private Player authorEntity;
private boolean isRanked;
private LuaValue scenarioInfo;
private LuaValue getLuaScenarioInfo() {
if (getLuaRoot() == null) {
throw new IllegalStateException("*_scenario.lua parse result not available");
}
if (scenarioInfo == null) {
scenarioInfo = getLuaRoot().get("ScenarioInfo");
}
return scenarioInfo;
}
private String normalizeMapName(String mapName) {
return Paths.get(mapName.toLowerCase().replaceAll(" ", "_")).normalize().toString();
}
private String getNewFolderName() {
return generateNewMapNameWithVersion("");
}
private String generateNewMapNameWithVersion(String extension) {
return Paths.get(String.format("%s.v%04d%s",
normalizeMapName(mapEntity.getDisplayName()),
mapVersionEntity.getVersion(),
extension))
.normalize().toString();
}
private String getFinalZipName() {
return generateNewMapNameWithVersion(".zip");
}
}
private class ScenarioMapInfo {
private static final String CONFIGURATIONS = "Configurations";
private static final String NAME = "name";
private static final String DESCRIPTION = "description";
private static final String TYPE = "type";
private static final String SIZE = "size";
private static final String MAP_VERSION = "map_version";
private static final String CONFIGURATION_STANDARD = "standard";
private static final String CONFIGURATION_STANDARD_TEAMS = "teams";
private static final String CONFIGURATION_STANDARD_TEAMS_NAME = "name";
private static final String CONFIGURATION_STANDARD_TEAMS_ARMIES = "armies";
}
}
|
Add stupid map filename prefix
|
src/main/java/com/faforever/api/map/MapService.java
|
Add stupid map filename prefix
|
|
Java
|
mit
|
c9442281cdfc6437639919980e5c76ca0188d8ce
| 0
|
nallar/TickThreading
|
package me.nallar.tickthreading.minecraft;
import java.util.HashSet;
import java.util.Set;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.profiler.Profiler;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.ChunkCoordIntPair;
import net.minecraft.world.WorldServer;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.gen.ChunkProviderServer;
public class ChunkGarbageCollector {
public static final boolean enabled = supportsGC();
private static boolean supportsGC() {
try {
Class.forName("org.bukkit.craftbukkit.util.WatchdogThread");
} catch (ClassNotFoundException e) {
return true;
}
return false;
}
public static Profiler profiler = MinecraftServer.getServer().theProfiler;
public static void garbageCollect(WorldServer worldServer) {
if (!enabled) {
return;
}
profiler.startSection("chunkGC");
int viewDistance = MinecraftServer.getServer().getConfigurationManager().getViewDistance();
ChunkProviderServer chunkProvider = worldServer.theChunkProviderServer;
Set<Long> chunksToUnload = new HashSet<Long>();
for (Chunk chunk : chunkProvider.getLoadedChunks()) {
chunksToUnload.add(ChunkCoordIntPair.chunkXZ2Int(chunk.xPosition, chunk.zPosition));
}
for (Object player_ : worldServer.playerEntities) {
EntityPlayerMP player = (EntityPlayerMP) player_;
int cX = (int) player.managedPosX >> 4;
int cZ = (int) player.managedPosZ >> 4;
int minX = cX - viewDistance;
int maxX = cX + viewDistance;
int minZ = cZ - viewDistance;
int maxZ = cZ + viewDistance;
for (int x = minX; x <= maxX; x++) {
for (int z = minZ; z <= maxZ; z++) {
chunksToUnload.remove(ChunkCoordIntPair.chunkXZ2Int(x, z));
}
}
}
chunkProvider.getChunksToUnloadSet().addAll(chunksToUnload);
profiler.endSection();
}
}
|
src/common/me/nallar/tickthreading/minecraft/ChunkGarbageCollector.java
|
package me.nallar.tickthreading.minecraft;
import java.util.HashSet;
import java.util.Set;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.profiler.Profiler;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.ChunkCoordIntPair;
import net.minecraft.world.WorldServer;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.gen.ChunkProviderServer;
public class ChunkGarbageCollector {
public static Profiler profiler = MinecraftServer.getServer().theProfiler;
public static void garbageCollect(WorldServer worldServer) {
profiler.startSection("chunkGC");
int viewDistance = MinecraftServer.getServer().getConfigurationManager().getViewDistance();
ChunkProviderServer chunkProvider = worldServer.theChunkProviderServer;
Set<Long> chunksToUnload = new HashSet<Long>();
for (Chunk chunk : chunkProvider.getLoadedChunks()) {
chunksToUnload.add(ChunkCoordIntPair.chunkXZ2Int(chunk.xPosition, chunk.zPosition));
}
for (Object player_ : worldServer.playerEntities) {
EntityPlayerMP player = (EntityPlayerMP) player_;
int cX = (int) player.managedPosX >> 4;
int cZ = (int) player.managedPosZ >> 4;
int minX = cX - viewDistance;
int maxX = cX + viewDistance;
int minZ = cZ - viewDistance;
int maxZ = cZ + viewDistance;
for (int x = minX; x <= maxX; x++) {
for (int z = minZ; z <= maxZ; z++) {
chunksToUnload.remove(ChunkCoordIntPair.chunkXZ2Int(x, z));
}
}
}
chunkProvider.getChunksToUnloadSet().addAll(chunksToUnload);
profiler.endSection();
}
}
|
Disable chunk GC under Spigot
Signed-off-by: Ross Allan <ca2c77e14df1e7ee673215c1ef658354e220f471@gmail.com>
|
src/common/me/nallar/tickthreading/minecraft/ChunkGarbageCollector.java
|
Disable chunk GC under Spigot
|
|
Java
|
mit
|
00837b329f7db5179800ee4eacca35fb078ed2fa
| 0
|
armandgray/SeeMe,armandgray/SeeMe,armandgray/SeeMe,armandgray/SeeMe
|
package com.armandgray.seeme.views;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.armandgray.seeme.R;
import com.armandgray.seeme.models.User;
import com.armandgray.seeme.services.HttpService;
import com.armandgray.seeme.utils.RecyclerItemClickListener;
import com.armandgray.seeme.utils.UserRVAdapter;
import java.util.Arrays;
import java.util.List;
import static com.armandgray.seeme.MainActivity.ACTIVE_USER;
import static com.armandgray.seeme.utils.StringHelper.getBoldStringBuilder;
/**
* A simple {@link Fragment} subclass.
*/
public class DiscoverFragment extends Fragment {
private static final String TAG = "DISCOVER_FRAGMENT";
private static final String NO_USERS_HEADER = "No Current Available Users";
private static final String NO_USERS_CONTENT = "Users are discoverable through SeeMe Touch. On the main screen, press the touch button or set SeeMe Touch to auto.";
private TextView tvNoUsers;
private ImageView ivCycle;
private LinearLayout usersContainer;
private LinearLayout noUsersContainer;
private RecyclerView rvUsers;
private User[] userArray;
private DiscoverCycleListener discoverCycleListener;
private BroadcastReceiver httpBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.e("BroadcastReceiver: ", "http Broadcast Received");
userArray = null;
userArray = (User[]) intent.getParcelableArrayExtra(HttpService.HTTP_SERVICE_JSON_PAYLOAD);
if (userArray != null && userArray.length != 0) {
setupRvUsers(Arrays.asList(userArray));
}
toggleShowUsers();
}
};
public DiscoverFragment() {}
public static DiscoverFragment newInstance(User activeUser) {
Bundle args = new Bundle();
args.putParcelable(ACTIVE_USER, activeUser);
DiscoverFragment fragment = new DiscoverFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
discoverCycleListener = (DiscoverCycleListener) context;
} catch (ClassCastException e) {
throw new ClassCastException(context.toString()
+ " must implement DiscoverCycleListener");
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_discover, container, false);
assignFields(rootView);
tvNoUsers.setText(getBoldStringBuilder(NO_USERS_HEADER, NO_USERS_CONTENT));
toggleShowUsers();
setupIvClickListener();
setupDummyUsers();
return rootView;
}
private void assignFields(View rootView) {
rvUsers = (RecyclerView) rootView.findViewById(R.id.rvUsers);
tvNoUsers = (TextView) rootView.findViewById(R.id.tvNoUsers);
ivCycle = (ImageView) rootView.findViewById(R.id.ivCycle);
noUsersContainer = (LinearLayout) rootView.findViewById(R.id.noUsersContainer);
usersContainer = (LinearLayout) rootView.findViewById(R.id.usersContainer);
}
private void toggleShowUsers() {
if (userArray == null || userArray.length == 0) {
noUsersContainer.setVisibility(View.VISIBLE);
usersContainer.setVisibility(View.INVISIBLE);
return;
}
noUsersContainer.setVisibility(View.INVISIBLE);
usersContainer.setVisibility(View.VISIBLE);
}
private void setupIvClickListener() {
ivCycle.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
discoverCycleListener.onTouchCycle();
}
});
}
private void setupRvUsers(List<User> list) {
rvUsers.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
rvUsers.setAdapter(new UserRVAdapter(getActivity(), list));
}
private void setupDummyUsers() {
tvNoUsers.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
userArray = new User[5];
userArray[0] = new User("Armand", "Gray", "Creator", "danimeza@gmail.com", "1234567890", true, "");
userArray[1] = new User("Michael", "Mei", "Unemployed", "test@gmail.com", "1234567890", true, "");
userArray[2] = new User("Dylan", "Goodman", "Contract Reader", "genius@gmail.com", "1234567890", true, "");
userArray[3] = new User("Amazing", "Gray", "Creator", "amazing@gmail.com", "1234567890", true, "");
userArray[4] = new User("Blue", "Gray", "Creator", "blue@gmail.com", "1234567890", true, "");
setupRvUsers(Arrays.asList(userArray));
toggleShowUsers();
}
});
rvUsers.addOnItemTouchListener(new RecyclerItemClickListener(getContext(),
new RecyclerItemClickListener.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
if (userArray != null && userArray.length >= position) {
User user = userArray[position];
Toast.makeText(getContext(),
"Request Sent to " + user.getFirstName() + " " + user.getLastName(),
Toast.LENGTH_SHORT).show();
}
}
}));
}
@Override
public void onResume() {
super.onResume();
LocalBroadcastManager.getInstance(getActivity().getApplicationContext())
.registerReceiver(httpBroadcastReceiver,
new IntentFilter(HttpService.HTTP_SERVICE_MESSAGE));
}
@Override
public void onPause() {
super.onPause();
LocalBroadcastManager.getInstance(getActivity().getApplicationContext())
.unregisterReceiver(httpBroadcastReceiver);
}
public interface DiscoverCycleListener {
void onTouchCycle();
}
}
|
SeeMe/app/src/main/java/com/armandgray/seeme/views/DiscoverFragment.java
|
package com.armandgray.seeme.views;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.armandgray.seeme.R;
import com.armandgray.seeme.models.User;
import com.armandgray.seeme.services.HttpService;
import com.armandgray.seeme.utils.RecyclerItemClickListener;
import com.armandgray.seeme.utils.UserRVAdapter;
import java.util.Arrays;
import java.util.List;
import static com.armandgray.seeme.MainActivity.ACTIVE_USER;
import static com.armandgray.seeme.utils.StringHelper.getBoldStringBuilder;
/**
* A simple {@link Fragment} subclass.
*/
public class DiscoverFragment extends Fragment {
private static final String TAG = "DISCOVER_FRAGMENT";
private static final String NO_USERS_HEADER = "No Current Available Users";
private static final String NO_USERS_CONTENT = "Users are discoverable through SeeMe Touch. On the main screen, press the touch button or set SeeMe Touch to auto.";
private TextView tvNoUsers;
private ImageView ivCycle;
private LinearLayout usersContainer;
private LinearLayout noUsersContainer;
private RecyclerView rvUsers;
private User[] userArray;
private DiscoverCycleListener discoverCycleListener;
private BroadcastReceiver httpBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.e("BroadcastReceiver: ", "http Broadcast Received");
userArray = null;
userArray = (User[]) intent.getParcelableArrayExtra(HttpService.HTTP_SERVICE_JSON_PAYLOAD);
if (userArray != null && userArray.length != 0) {
setupRvUsers(Arrays.asList(userArray));
}
toggleShowUsers();
}
};
public DiscoverFragment() {}
public static DiscoverFragment newInstance(User activeUser) {
Bundle args = new Bundle();
args.putParcelable(ACTIVE_USER, activeUser);
DiscoverFragment fragment = new DiscoverFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
discoverCycleListener = (DiscoverCycleListener) context;
} catch (ClassCastException e) {
throw new ClassCastException(context.toString()
+ " must implement DiscoverCycleListener");
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_discover, container, false);
assignFields(rootView);
tvNoUsers.setText(getBoldStringBuilder(NO_USERS_HEADER, NO_USERS_CONTENT));
toggleShowUsers();
setupIvClickListener();
setupDummyUsers();
return rootView;
}
private void assignFields(View rootView) {
rvUsers = (RecyclerView) rootView.findViewById(R.id.rvUsers);
tvNoUsers = (TextView) rootView.findViewById(R.id.tvNoUsers);
ivCycle = (ImageView) rootView.findViewById(R.id.ivCycle);
noUsersContainer = (LinearLayout) rootView.findViewById(R.id.noUsersContainer);
usersContainer = (LinearLayout) rootView.findViewById(R.id.usersContainer);
}
private void toggleShowUsers() {
if (userArray == null || userArray.length == 0) {
noUsersContainer.setVisibility(View.VISIBLE);
usersContainer.setVisibility(View.INVISIBLE);
return;
}
noUsersContainer.setVisibility(View.INVISIBLE);
usersContainer.setVisibility(View.VISIBLE);
}
private void setupIvClickListener() {
ivCycle.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
discoverCycleListener.onTouchCycle();
}
});
}
private void setupRvUsers(List<User> list) {
rvUsers.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
rvUsers.setAdapter(new UserRVAdapter(getActivity(), list));
}
private void setupDummyUsers() {
tvNoUsers.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
userArray = new User[5];
userArray[0] = new User("Armand", "Gray", "Creator", "danimeza@gmail.com", "1234567890", true, "");
userArray[1] = new User("Michael", "Mei", "Unemployed", "test@gmail.com", "1234567890", true, "");
userArray[2] = new User("Dylan", "Goodman", "Contract Reader", "genius@gmail.com", "1234567890", true, "");
userArray[3] = new User("Amazing", "Gray", "Creator", "amazing@gmail.com", "1234567890", true, "");
userArray[4] = new User("Blue", "Gray", "Creator", "blue@gmail.com", "1234567890", true, "");
setupRvUsers(Arrays.asList(userArray));
toggleShowUsers();
}
});
rvUsers.addOnItemTouchListener(new RecyclerItemClickListener(getContext(),
new RecyclerItemClickListener.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
Toast.makeText(getContext(), "Click", Toast.LENGTH_SHORT).show();
}
}));
}
@Override
public void onResume() {
super.onResume();
LocalBroadcastManager.getInstance(getActivity().getApplicationContext())
.registerReceiver(httpBroadcastReceiver,
new IntentFilter(HttpService.HTTP_SERVICE_MESSAGE));
}
@Override
public void onPause() {
super.onPause();
LocalBroadcastManager.getInstance(getActivity().getApplicationContext())
.unregisterReceiver(httpBroadcastReceiver);
}
public interface DiscoverCycleListener {
void onTouchCycle();
}
}
|
added code in ItemClickListener to get User and toast item specific message
|
SeeMe/app/src/main/java/com/armandgray/seeme/views/DiscoverFragment.java
|
added code in ItemClickListener to get User and toast item specific message
|
|
Java
|
cc0-1.0
|
ad2de9a3e343e07f5a88febcc91ea8d32e653455
| 0
|
MobileApps-Cascadia/hidenseek-phase2,MobileApps-Cascadia/hidenseek-phase2,MobileApps-Cascadia/hidenseek-phase2
|
package com.cascadia.hidenseek;
import com.cascadia.hidenseek.Player.Role;
import com.cascadia.hidenseek.Player.Status;
import com.cascadia.hidenseek.network.DeletePlayingRequest;
import com.cascadia.hidenseek.network.GetPlayerListRequest;
import com.cascadia.hidenseek.network.PutGpsRequest;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.os.Bundle;
import android.os.Handler;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageButton;
import android.widget.Toast;
import android.os.Build;
public class Active extends FragmentActivity {
GoogleMap googleMap;
Match match;
Player player;
boolean isActive;
Status pend;
Role playerRole;
Player temp;
//Used for periodic callback.
private Handler h2 = new Handler();
//Millisecond delay between callbacks
private final int callbackDelay = 500;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_active);
match = LoginManager.GetMatch();
player = LoginManager.playerMe;
isActive = true;
if(match == null || player == null) {
Dialog d = new Dialog(this);
d.setTitle("Error: null match.");
d.show();
finish();
}
//Show user's position on map
googleMap = ((SupportMapFragment)getSupportFragmentManager().findFragmentById(R.id.mapview)).getMap();
googleMap.setMapType(GoogleMap.MAP_TYPE_NORMAL);
googleMap.setMyLocationEnabled(true);
googleMap.setOnMyLocationChangeListener(new GoogleMap.OnMyLocationChangeListener(){
@Override
public void onMyLocationChange(Location location){
LatLng point = new LatLng(location.getLatitude(),location.getLongitude());
player.SetLocation(location);
googleMap.animateCamera(CameraUpdateFactory.newLatLngZoom(point,17));
}
});
//User clicked Leave Match button
ImageButton btnLeave = (ImageButton) findViewById(R.id.btnLeaveGame);
btnLeave.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(Active.this, Home.class);
startActivity(intent);
}
});
Runnable callback = new Runnable() {
//This function gets called twice per second until the app is stopped.
@Override
public void run() {
//Do request and update values in match. No callback needed.
GetPlayerListRequest gplRequest = new GetPlayerListRequest() {
@Override
protected void onException(Exception e) {}
@Override
protected void onComplete(Match match) {
googleMap.clear();
for( Player p : match.players) {
pend=p.GetStatus();
playerRole=p.GetRole();
temp=p;
if(pend==Status.Spotted)
{
//Create alert dialog to ask if this was correct and if it was mark as not playing
AlertDialog.Builder builder1 = new AlertDialog.Builder(getBaseContext());
builder1.setTitle("Found You");
builder1.setMessage("The Seeker has just marked that he found you is this correct?");
builder1.setCancelable(true);
builder1.setPositiveButton("Yes",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
DeletePlayingRequest dpr = new DeletePlayingRequest() {
@Override
protected void onException(Exception e) {
e.printStackTrace();
}
};
dpr.DoRequest(temp);
}
});
builder1.setNegativeButton("No",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
//Dont't add a marker for players with null locations or one for myself.
if(p.GetLocation() != null && p.GetId() != player.GetId()) {
googleMap.addMarker(
new MarkerOptions()
.position(new LatLng(p.GetLocation().getLatitude(),
p.GetLocation().getLongitude()))
.title(p.GetName()));
}
}
}
};
gplRequest.DoRequest(match);
//Do request. No callback needed. Player location set by
//Google Maps' onMyLocationChange
PutGpsRequest pgRequest = new PutGpsRequest() {
@Override
protected void onException(Exception e) {}
};
pgRequest.DoRequest(player);
if(isActive) {
h2.postDelayed(this, callbackDelay);
}
}
};
callback.run(); //Begin periodic updating!
}
public void onPause(){
super.onPause();
}
@Override
public void onResume(){
super.onResume();
final String TAG_ERROR_DIALOG_FRAGMENT="errorDialog";
int status=GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
if(status == ConnectionResult.SUCCESS){
//no problems just work
}
else if(GooglePlayServicesUtil.isUserRecoverableError(status)){
ErrorDialogFragment.newInstance(status).show(getSupportFragmentManager(), TAG_ERROR_DIALOG_FRAGMENT);
}
else{
Toast.makeText(this,"Google Maps V2 is not available!",Toast.LENGTH_LONG).show();
finish();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
DeletePlayingRequest dpRequest = new DeletePlayingRequest() {
@Override
protected void onException(Exception e) {
e.printStackTrace();
}
};
dpRequest.DoRequest(player);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class ErrorDialogFragment extends DialogFragment{
static final String ARG_STATUS="status";
static ErrorDialogFragment newInstance (int status){
Bundle args=new Bundle();
args.putInt(ARG_STATUS, status);
ErrorDialogFragment result = new ErrorDialogFragment();
result.setArguments(args);
return(result);
}
public void show(FragmentManager supportFragmentManager,
String TAG_ERROR_DIALOG_FRAGMENT) {
// TODO Auto-generated method stub
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Bundle args=getArguments();
return GooglePlayServicesUtil.getErrorDialog(args.getInt(ARG_STATUS),
getActivity(), 0);
}
@Override
public void onDismiss(DialogInterface dlg) {
if (getActivity() != null) {
getActivity().finish();
}
}
}
@Override
public void onStop() {
super.onStop();
isActive = false;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.players, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.players_list) {
Intent intent = new Intent(Active.this, CurrentPlayers.class);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
HideNSeek/src/com/cascadia/hidenseek/Active.java
|
package com.cascadia.hidenseek;
import com.cascadia.hidenseek.network.DeletePlayingRequest;
import com.cascadia.hidenseek.network.GetPlayerListRequest;
import com.cascadia.hidenseek.network.PutGpsRequest;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.annotation.TargetApi;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.os.Bundle;
import android.os.Handler;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageButton;
import android.widget.Toast;
import android.os.Build;
public class Active extends FragmentActivity {
GoogleMap googleMap;
Match match;
Player player;
boolean isActive;
//Used for periodic callback.
private Handler h2 = new Handler();
//Millisecond delay between callbacks
private final int callbackDelay = 500;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_active);
match = LoginManager.GetMatch();
player = LoginManager.playerMe;
isActive = true;
if(match == null || player == null) {
Dialog d = new Dialog(this);
d.setTitle("Error: null match.");
d.show();
finish();
}
//Show user's position on map
googleMap = ((SupportMapFragment)getSupportFragmentManager().findFragmentById(R.id.mapview)).getMap();
googleMap.setMapType(GoogleMap.MAP_TYPE_NORMAL);
googleMap.setMyLocationEnabled(true);
googleMap.setOnMyLocationChangeListener(new GoogleMap.OnMyLocationChangeListener(){
@Override
public void onMyLocationChange(Location location){
LatLng point = new LatLng(location.getLatitude(),location.getLongitude());
player.SetLocation(location);
googleMap.animateCamera(CameraUpdateFactory.newLatLngZoom(point,17));
}
});
//User clicked Leave Match button
ImageButton btnLeave = (ImageButton) findViewById(R.id.btnLeaveGame);
btnLeave.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(Active.this, Home.class);
startActivity(intent);
}
});
Runnable callback = new Runnable() {
//This function gets called twice per second until the app is stopped.
@Override
public void run() {
//Do request and update values in match. No callback needed.
GetPlayerListRequest gplRequest = new GetPlayerListRequest() {
@Override
protected void onException(Exception e) {}
@Override
protected void onComplete(Match match) {
googleMap.clear();
for(Player p : match.players) {
//Dont't add a marker for players with null locations or one for myself.
if(p.GetLocation() != null && p.GetId() != player.GetId()) {
googleMap.addMarker(
new MarkerOptions()
.position(new LatLng(p.GetLocation().getLatitude(),
p.GetLocation().getLongitude()))
.title(p.GetName()));
}
}
}
};
gplRequest.DoRequest(match);
//Do request. No callback needed. Player location set by
//Google Maps' onMyLocationChange
PutGpsRequest pgRequest = new PutGpsRequest() {
@Override
protected void onException(Exception e) {}
};
pgRequest.DoRequest(player);
if(isActive) {
h2.postDelayed(this, callbackDelay);
}
}
};
callback.run(); //Begin periodic updating!
}
public void onPause(){
super.onPause();
}
@Override
public void onResume(){
super.onResume();
final String TAG_ERROR_DIALOG_FRAGMENT="errorDialog";
int status=GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
if(status == ConnectionResult.SUCCESS){
//no problems just work
}
else if(GooglePlayServicesUtil.isUserRecoverableError(status)){
ErrorDialogFragment.newInstance(status).show(getSupportFragmentManager(), TAG_ERROR_DIALOG_FRAGMENT);
}
else{
Toast.makeText(this,"Google Maps V2 is not available!",Toast.LENGTH_LONG).show();
finish();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
DeletePlayingRequest dpRequest = new DeletePlayingRequest() {
@Override
protected void onException(Exception e) {
e.printStackTrace();
}
};
dpRequest.DoRequest(player);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class ErrorDialogFragment extends DialogFragment{
static final String ARG_STATUS="status";
static ErrorDialogFragment newInstance (int status){
Bundle args=new Bundle();
args.putInt(ARG_STATUS, status);
ErrorDialogFragment result = new ErrorDialogFragment();
result.setArguments(args);
return(result);
}
public void show(FragmentManager supportFragmentManager,
String TAG_ERROR_DIALOG_FRAGMENT) {
// TODO Auto-generated method stub
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Bundle args=getArguments();
return GooglePlayServicesUtil.getErrorDialog(args.getInt(ARG_STATUS),
getActivity(), 0);
}
@Override
public void onDismiss(DialogInterface dlg) {
if (getActivity() != null) {
getActivity().finish();
}
}
}
@Override
public void onStop() {
super.onStop();
isActive = false;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.players, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.players_list) {
Intent intent = new Intent(Active.this, CurrentPlayers.class);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
i believe the listner for being marked as spotter is finished. i have
the alert set up to put delete request to make it mark him off the game
which should take him off the match. net the putrequest to test.
|
HideNSeek/src/com/cascadia/hidenseek/Active.java
|
i believe the listner for being marked as spotter is finished. i have the alert set up to put delete request to make it mark him off the game which should take him off the match. net the putrequest to test.
|
|
Java
|
mpl-2.0
|
437f7ffbce8e16b08695ef173cd96570240dfb1e
| 0
|
WeaveTeam/WeaveJS,WeaveTeam/WeaveJS,WeaveTeam/WeaveJS,WeaveTeam/WeaveJS
|
/*
Weave (Web-based Analysis and Visualization Environment)
Copyright (C) 2008-2011 University of Massachusetts Lowell
This file is a part of Weave.
Weave is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License, Version 3,
as published by the Free Software Foundation.
Weave is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Weave. If not, see <http://www.gnu.org/licenses/>.
*/
package weave.servlets;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import weave.utils.CSVParser;
import weave.utils.ListUtils;
import com.thoughtworks.paranamer.BytecodeReadingParanamer;
import com.thoughtworks.paranamer.Paranamer;
import flex.messaging.MessageException;
import flex.messaging.io.SerializationContext;
import flex.messaging.io.amf.ASObject;
import flex.messaging.io.amf.Amf3Input;
import flex.messaging.io.amf.Amf3Output;
import flex.messaging.messages.ErrorMessage;
/**
* This class provides a servlet interface to a set of functions.
* The functions may be invoked using URL parameters via HTTP GET or AMF3-serialized objects via HTTP POST.
* Currently, the result of calling a function is given as an AMF3-serialized object.
*
* TODO: Provide optional JSON output.
*
* Not all objects will be supported automatically.
* GenericServlet supports basic AMF3-serialized objects such as String,Object,Array.
*
* The following mappings work:
* Flex Array -> Java Object[]
* Flex Object -> Java Map<String,Object>
* Flex String -> Java String
* Flex Boolean -> Java boolean
* Flex Number -> Java double
*
* The following Java parameter types are supported:
* Boolean,
* String, String[], String[][],
* Object, Object[],
* double[], double[][],
* Map<String,Object>, List
*
* TODO: Add support for more common parameter types.
*
* @author skota
* @author adufilie
*/
public class GenericServlet extends HttpServlet
{
private static final long serialVersionUID = 1L;
/**
* This is the name of the URL parameter corresponding to the method name.
* Subclasses can change this to something else if there is a conflict with another parameter.
*/
protected String METHOD_PARAM_NAME = "methodName";
private Map<String, ExposedMethod> methodMap = new HashMap<String, ExposedMethod>(); //Key: methodName
private Paranamer paranamer = new BytecodeReadingParanamer(); // this gets parameter names from Methods
/**
* This class contains a Method with its parameter names and class instance.
*/
private class ExposedMethod
{
public ExposedMethod(Object instance, Method method, String[] paramNames)
{
this.instance = instance;
this.method = method;
this.paramNames = paramNames;
}
public Object instance;
public Method method;
public String[] paramNames;
}
/**
* Default constructor.
* This initializes all public methods defined in a class extending GenericServlet.
*/
protected GenericServlet()
{
super();
initLocalMethods();
}
/**
* @param serviceObjects The objects to invoke methods on.
*/
protected GenericServlet(Object ...serviceObjects)
{
super();
initLocalMethods();
for (Object serviceObject : serviceObjects)
initAllMethods(serviceObject);
}
/**
* @param serviceObject The object to invoke methods on.
* @param methodParamName The name of the URL parameter specifying the method name.
*/
protected GenericServlet(String methodParamName, Object ...serviceObjects)
{
super();
initLocalMethods();
this.METHOD_PARAM_NAME = methodParamName;
for (Object serviceObject : serviceObjects)
initAllMethods(serviceObject);
}
/**
* This function will expose all the public methods of a class as servlet methods.
* @param serviceObject The object containing public methods to be exposed by the servlet.
*/
protected void initLocalMethods()
{
initAllMethods(this);
}
/**
* This function will expose all the declared public methods of a class as servlet methods,
* except methods that match those declared by GenericServlet or a superclass of GenericServlet.
* @param serviceObject The object containing public methods to be exposed by the servlet.
*/
protected void initAllMethods(Object serviceObject)
{
Method[] genericServletMethods = GenericServlet.class.getMethods();
Method[] declaredMethods = serviceObject.getClass().getDeclaredMethods();
for (int i = declaredMethods.length - 1; i >= 0; i--)
{
Method declaredMethod = declaredMethods[i];
boolean shouldIgnore = false;
for (Method genericServletMethod : genericServletMethods)
{
if (declaredMethod.getName().equals(genericServletMethod.getName()) &&
Arrays.equals(declaredMethod.getParameterTypes(), genericServletMethod.getParameterTypes()) )
{
shouldIgnore = true;
break;
}
}
if (!shouldIgnore)
initMethod(serviceObject, declaredMethod);
}
// for debugging
printExposedMethods();
}
/**
* @param serviceObject The instance of an object to use in the servlet.
* @param methodName The method to expose on serviceObject.
*/
synchronized protected void initMethod(Object serviceObject, Method method)
{
// only expose public methods
if (!Modifier.isPublic(method.getModifiers()))
return;
String methodName = method.getName();
if (methodMap.containsKey(methodName))
{
methodMap.put(methodName, null);
System.err.println(String.format(
"Method %s.%s will not be supported because there are multiple definitions.",
this.getClass().getName(), methodName
));
}
else
{
String[] paramNames = null;
paramNames = paranamer.lookupParameterNames(method, false); // returns null if not found
methodMap.put(methodName, new ExposedMethod(serviceObject, method, paramNames));
}
}
protected void printExposedMethods()
{
String output = "";
List<String> methodNames = new Vector<String>(methodMap.keySet());
Collections.sort(methodNames);
for (String methodName : methodNames)
{
ExposedMethod m = methodMap.get(methodName);
if (m != null)
output += String.format(
"Exposed servlet method: %s.%s\n",
m.instance.getClass().getName(),
formatFunctionSignature(
m.method.getName(),
m.method.getParameterTypes(),
m.paramNames
)
);
else
output += "Not exposed: "+methodName;
}
System.out.print(output);
}
@SuppressWarnings("rawtypes")
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
try
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.put(Thread.currentThread(), response.getOutputStream());
}
try
{
Object obj = deseriaizeCompressedAmf3(request.getInputStream());
String methodName = (String) ((ASObject)obj).get(METHOD_PARAM_NAME);
Object methodParameters = ((ASObject)obj).get("methodParameters");
if(methodParameters instanceof Object[])
invokeMethod(methodName, (Object[])methodParameters, request, response);
else
invokeMethod(methodName, (Map)methodParameters, request, response);
}
catch (Exception e)
{
e.printStackTrace();
sendError(response, e.getMessage());
}
}
finally
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.remove(Thread.currentThread());
}
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
try
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.put(Thread.currentThread(), response.getOutputStream());
}
List<String> urlParamNames = Collections.list(request.getParameterNames());
// Read Method Name from URL
String methodName = request.getParameter(METHOD_PARAM_NAME);
HashMap<String, String> params = new HashMap();
for(String paramName: urlParamNames)
params.put(paramName, request.getParameter(paramName));
invokeMethod(methodName, params, request, response);
}
finally
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.remove(Thread.currentThread());
}
}
}
/**
* This maps a thread to the corresponding ServletOutputStream for the doGet() or doPost() call that thread is handling.
*/
private Map<Thread,ServletOutputStream> servletOutputStreamMap = new HashMap<Thread,ServletOutputStream>();
/**
* This function retrieves the ServletOutputStream associated with the current thread's doGet() or doPost() call.
* In a public function with a void return type, you can use this ServletOutputStream for full control over the output.
*/
protected ServletOutputStream getServletOutputStream()
{
synchronized (servletOutputStreamMap)
{
return servletOutputStreamMap.get(Thread.currentThread());
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void invokeMethod(String methodName, Map params, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
if(!methodMap.containsKey(methodName) || methodMap.get(methodName) == null)
{
sendError(response, String.format("Method \"%s\" not supported.", methodName));
return;
}
ExposedMethod exposedMethod = methodMap.get(methodName);
String[] argNames = exposedMethod.paramNames;
Class[] argTypes = exposedMethod.method.getParameterTypes();
Object[] argValues = new Object[argTypes.length];
Map extraParameters = null; // parameters that weren't mapped directly to method arguments
// For each method parameter, get the corresponding url parameter value.
if (argNames != null && params != null)
{
//TODO: check why params is null
for (Object parameterName : params.keySet())
{
Object parameterValue = params.get(parameterName);
int index = ListUtils.findString((String)parameterName, argNames);
if (index >= 0)
{
argValues[index] = parameterValue;
}
else if (!parameterName.equals(METHOD_PARAM_NAME))
{
if (extraParameters == null)
extraParameters = new HashMap();
extraParameters.put(parameterName, parameterValue);
}
}
}
// support for a function having a single Map<String,String> parameter
// see if we can find a Map arg. If so, set it to extraParameters
if (argTypes != null)
{
for (int i = 0; i < argTypes.length; i++)
{
if (argTypes[i] == Map.class)
{
// avoid passing a null Map to the function
if (extraParameters == null)
extraParameters = new HashMap<String,String>();
argValues[i] = extraParameters;
extraParameters = null;
break;
}
}
}
if (extraParameters != null)
{
System.out.println("Received servlet request: " + methodName + Arrays.asList(argValues));
System.out.println("Unused parameters: "+extraParameters.entrySet());
}
invokeMethod(methodName, argValues, request, response);
}
/**
* @param methodName The name of the function to invoke.
* @param methodParameters A list of input parameters for the method. Values will be cast to the appropriate types if necessary.
* @param request This parameter is the same from doPost() and doGet().
* @param response This parameter is the same from doPost() and doGet().
* @throws ServletException
* @throws IOException
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private void invokeMethod(String methodName, Object[] methodParameters, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
// debug
//System.out.println(methodName + Arrays.asList(methodParameters));
// get method by name
ExposedMethod exposedMethod = methodMap.get(methodName);
if (exposedMethod == null)
{
sendError(response, "Unknown method: "+methodName);
}
// cast input values to appropriate types if necessary
Class[] expectedArgTypes = exposedMethod.method.getParameterTypes();
if (expectedArgTypes.length == methodParameters.length)
{
for (int index = 0; index < methodParameters.length; index++)
{
Object value = methodParameters[index];
// if given value is a String, check if the function is expecting a different type
if (value instanceof String)
{
if (expectedArgTypes[index] == boolean.class || expectedArgTypes[index] == Boolean.class)
value = ((String)(value)).equalsIgnoreCase("true");
else if (expectedArgTypes[index] == String[].class)
{
String[][] table = CSVParser.defaultParser.parseCSV((String)value);
if (table.length == 0)
value = new String[0];
else
value = table[0];
}
else if (expectedArgTypes[index] == List.class)
value = Arrays.asList(CSVParser.defaultParser.parseCSV((String)value)[0]);
}
else if (value != null)
{
if (value instanceof Boolean && expectedArgTypes[index] == boolean.class)
{
value = (boolean)(Boolean)value;
}
else if (value.getClass() == Object[].class)
{
Object[] valueArray = (Object[])value;
if (expectedArgTypes[index] == List.class)
{
value = ListUtils.copyArrayToList(valueArray, new Vector());
}
else if (expectedArgTypes[index] == Object[][].class)
{
Object[][] valueMatrix = new Object[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
valueMatrix[i] = (Object[])valueArray[i];
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == String[][].class)
{
String[][] valueMatrix = new String[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
// cast Objects to Strings
Object[] objectArray = (Object[])valueArray[i];
valueMatrix[i] = ListUtils.copyStringArray(objectArray, new String[objectArray.length]);
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == String[].class)
{
value = ListUtils.copyStringArray(valueArray, new String[valueArray.length]);
}
else if (expectedArgTypes[index] == double[][].class)
{
double[][] valueMatrix = new double[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
// cast Objects to doubles
Object[] objectArray = (Object[])valueArray[i];
valueMatrix[i] = ListUtils.copyDoubleArray(objectArray, new double[objectArray.length]);
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == double[].class)
{
value = ListUtils.copyDoubleArray(valueArray, new double[valueArray.length]);
}
}
}
methodParameters[index] = value;
}
}
// prepare to output the result of the method call
ServletOutputStream servletOutputStream = response.getOutputStream();
// Invoke the method on the object with the arguments
try
{
Object result = exposedMethod.method.invoke(exposedMethod.instance, methodParameters);
if (exposedMethod.method.getReturnType() != void.class)
seriaizeCompressedAmf3(result, servletOutputStream);
}
catch (InvocationTargetException e)
{
e.getCause().printStackTrace();
sendError(response, e.getCause().getMessage());
}
catch (IllegalArgumentException e)
{
String error = e.getMessage() + "\n" +
"Expected: " + formatFunctionSignature(methodName, expectedArgTypes, exposedMethod.paramNames) + "\n" +
"Received: " + formatFunctionSignature(methodName, methodParameters, null);
System.out.println(error);
sendError(response, error);
}
catch (Exception e)
{
System.out.println(methodName + (List)Arrays.asList(methodParameters));
e.printStackTrace();
sendError(response, e.getMessage());
}
}
/**
* This function formats a Java function signature as a String.
* @param methodName The name of the method.
* @param paramValuesOrTypes A list of Class objects or arbitrary Objects to get the class names from.
* @param paramNames The names of the parameters, may be null.
* @return A readable Java function signature.
*/
private String formatFunctionSignature(String methodName, Object[] paramValuesOrTypes, String[] paramNames)
{
// don't use paramNames if the length doesn't match the paramValuesOrTypes length.
if (paramNames != null && paramNames.length != paramValuesOrTypes.length)
paramNames = null;
List<String> names = new Vector<String>(paramValuesOrTypes.length);
for (int i = 0; i < paramValuesOrTypes.length; i++)
{
String name = "null";
if (paramValuesOrTypes[i] instanceof Class)
name = ((Class<?>)paramValuesOrTypes[i]).getName();
else
name = paramValuesOrTypes[i].getClass().getName();
// decode output of Class.getName()
while (name.charAt(0) == '[') // array type
{
name = name.substring(1) + "[]";
// decode element type encoding
String type = "";
switch (name.charAt(0))
{
case 'Z': type = "boolean"; break;
case 'B': type = "byte"; break;
case 'C': type = "char"; break;
case 'D': type = "double"; break;
case 'F': type = "float"; break;
case 'I': type = "int"; break;
case 'J': type = "long"; break;
case 'S': type = "short"; break;
case 'L':
// remove ';'
name = name.replace(";", "");
break;
default: continue;
}
// remove first char encoding
name = type + name.substring(1);
}
// hide package names
if (name.indexOf('.') >= 0)
name = name.substring(name.lastIndexOf('.') + 1);
if (paramNames != null)
name += " " + paramNames[i];
names.add(name);
}
String result = names.toString();
return String.format("%s(%s)", methodName, result.substring(1, result.length() - 1));
}
private void sendError(HttpServletResponse response, String message) throws IOException
{
//response.setHeader("Cache-Control", "no-cache");
//response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message);
System.out.println("Serializing ErrorMessage: "+message);
ServletOutputStream servletOutputStream = response.getOutputStream();
ErrorMessage errorMessage = new ErrorMessage(new MessageException(message));
errorMessage.faultCode = "Error";
seriaizeCompressedAmf3(errorMessage, servletOutputStream);
}
protected static SerializationContext getSerializationContext()
{
SerializationContext context = SerializationContext.getSerializationContext();
// set serialization context properties
context.enableSmallMessages = true;
context.instantiateTypes = true;
context.supportRemoteClass = true;
context.legacyCollection = false;
context.legacyMap = false;
context.legacyXMLDocument = false;
context.legacyXMLNamespaces = false;
context.legacyThrowable = false;
context.legacyBigNumbers = false;
context.restoreReferences = false;
context.logPropertyErrors = false;
context.ignorePropertyErrors = true;
return context;
}
// Serialize a Java Object to AMF3 ByteArray
protected void seriaizeCompressedAmf3(Object objToSerialize, ServletOutputStream servletOutputStream)
{
try
{
SerializationContext context = getSerializationContext();
DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(servletOutputStream);
Amf3Output amf3Output = new Amf3Output(context);
amf3Output.setOutputStream(deflaterOutputStream); // compress
amf3Output.writeObject(objToSerialize);
amf3Output.flush();
deflaterOutputStream.close(); // this is necessary to finish the compression
//amf3Output.close(); //not closing output stream -- see http://viveklakhanpal.wordpress.com/2010/07/01/error-2032ioerror/
}
catch (Exception e)
{
e.printStackTrace();
}
}
// De-serialize a ByteArray/AMF3/Flex object to a Java object
protected Object deseriaizeCompressedAmf3(InputStream inputStream) throws ClassNotFoundException, IOException
{
Object deSerializedObj = null;
SerializationContext context = getSerializationContext();
InflaterInputStream inflaterInputStream = new InflaterInputStream(inputStream);
Amf3Input amf3Input = new Amf3Input(context);
amf3Input.setInputStream(inflaterInputStream); // uncompress
deSerializedObj = amf3Input.readObject();
amf3Input.close();
return deSerializedObj;
}
}
|
WeaveServices/src/weave/servlets/GenericServlet.java
|
/*
Weave (Web-based Analysis and Visualization Environment)
Copyright (C) 2008-2011 University of Massachusetts Lowell
This file is a part of Weave.
Weave is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License, Version 3,
as published by the Free Software Foundation.
Weave is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Weave. If not, see <http://www.gnu.org/licenses/>.
*/
package weave.servlets;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import weave.utils.CSVParser;
import weave.utils.ListUtils;
import com.thoughtworks.paranamer.BytecodeReadingParanamer;
import com.thoughtworks.paranamer.Paranamer;
import flex.messaging.MessageException;
import flex.messaging.io.SerializationContext;
import flex.messaging.io.amf.ASObject;
import flex.messaging.io.amf.Amf3Input;
import flex.messaging.io.amf.Amf3Output;
import flex.messaging.messages.ErrorMessage;
/**
* This class provides a servlet interface to a set of functions.
* The functions may be invoked using URL parameters via HTTP GET or AMF3-serialized objects via HTTP POST.
* Currently, the result of calling a function is given as an AMF3-serialized object.
*
* TODO: Provide optional JSON output.
*
* Not all objects will be supported automatically.
* GenericServlet supports basic AMF3-serialized objects such as String,Object,Array.
*
* The following mappings work:
* Flex Array -> Java Object[]
* Flex Object -> Java Map<String,Object>
* Flex String -> Java String
* Flex Boolean -> Java boolean
* Flex Number -> Java double
*
* The following Java parameter types are supported:
* Boolean,
* String, String[], String[][],
* Object, Object[],
* double[], double[][],
* Map<String,Object>, List
*
* TODO: Add support for more common parameter types.
*
* @author skota
* @author adufilie
*/
public class GenericServlet extends HttpServlet
{
private static final long serialVersionUID = 1L;
/**
* This is the name of the URL parameter corresponding to the method name.
* Subclasses can change this to something else if there is a conflict with another parameter.
*/
protected String METHOD_PARAM_NAME = "methodName";
private Map<String, ExposedMethod> methodMap = new HashMap<String, ExposedMethod>(); //Key: methodName
private Paranamer paranamer = new BytecodeReadingParanamer(); // this gets parameter names from Methods
/**
* This class contains a Method with its parameter names and class instance.
*/
private class ExposedMethod
{
public ExposedMethod(Object instance, Method method, String[] paramNames)
{
this.instance = instance;
this.method = method;
this.paramNames = paramNames;
}
public Object instance;
public Method method;
public String[] paramNames;
}
/**
* Default constructor.
* This initializes all public methods defined in a class extending GenericServlet.
*/
protected GenericServlet()
{
super();
initLocalMethods();
}
/**
* @param serviceObjects The objects to invoke methods on.
*/
protected GenericServlet(Object ...serviceObjects)
{
super();
initLocalMethods();
for (Object serviceObject : serviceObjects)
initAllMethods(serviceObject);
}
/**
* @param serviceObject The object to invoke methods on.
* @param methodParamName The name of the URL parameter specifying the method name.
*/
protected GenericServlet(String methodParamName, Object ...serviceObjects)
{
super();
initLocalMethods();
this.METHOD_PARAM_NAME = methodParamName;
for (Object serviceObject : serviceObjects)
initAllMethods(serviceObject);
}
/**
* This function will expose all the public methods of a class as servlet methods.
* @param serviceObject The object containing public methods to be exposed by the servlet.
*/
protected void initLocalMethods()
{
initAllMethods(this);
}
/**
* This function will expose all the declared public methods of a class as servlet methods,
* except methods that match those declared by GenericServlet or a superclass of GenericServlet.
* @param serviceObject The object containing public methods to be exposed by the servlet.
*/
protected void initAllMethods(Object serviceObject)
{
Method[] genericServletMethods = GenericServlet.class.getMethods();
Method[] declaredMethods = serviceObject.getClass().getDeclaredMethods();
for (int i = declaredMethods.length - 1; i >= 0; i--)
{
Method declaredMethod = declaredMethods[i];
boolean shouldIgnore = false;
for (Method genericServletMethod : genericServletMethods)
{
if (declaredMethod.getName().equals(genericServletMethod.getName()) &&
Arrays.equals(declaredMethod.getParameterTypes(), genericServletMethod.getParameterTypes()) )
{
shouldIgnore = true;
break;
}
}
if (!shouldIgnore)
initMethod(serviceObject, declaredMethod);
}
// for debugging
printExposedMethods();
}
/**
* @param serviceObject The instance of an object to use in the servlet.
* @param methodName The method to expose on serviceObject.
*/
synchronized protected void initMethod(Object serviceObject, Method method)
{
// only expose public methods
if (!Modifier.isPublic(method.getModifiers()))
return;
String methodName = method.getName();
if (methodMap.containsKey(methodName))
{
methodMap.put(methodName, null);
System.err.println(String.format(
"Method %s.%s will not be supported because there are multiple definitions.",
this.getClass().getName(), methodName
));
}
else
{
String[] paramNames = null;
paramNames = paranamer.lookupParameterNames(method, false); // returns null if not found
methodMap.put(methodName, new ExposedMethod(serviceObject, method, paramNames));
}
}
protected void printExposedMethods()
{
String output = "";
List<String> methodNames = new Vector<String>(methodMap.keySet());
Collections.sort(methodNames);
for (String methodName : methodNames)
{
ExposedMethod m = methodMap.get(methodName);
if (m != null)
output += String.format(
"Exposed servlet method: %s.%s\n",
m.instance.getClass().getName(),
formatFunctionSignature(
m.method.getName(),
m.method.getParameterTypes(),
m.paramNames
)
);
else
output += "Not exposed: "+methodName;
}
System.out.print(output);
}
@SuppressWarnings("rawtypes")
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
try
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.put(Thread.currentThread(), response.getOutputStream());
}
try
{
Object obj = deseriaizeCompressedAmf3(request.getInputStream());
String methodName = (String) ((ASObject)obj).get(METHOD_PARAM_NAME);
Object methodParameters = ((ASObject)obj).get("methodParameters");
if(methodParameters instanceof Object[])
invokeMethod(methodName, (Object[])methodParameters, request, response);
else
invokeMethod(methodName, (Map)methodParameters, request, response);
}
catch (Exception e)
{
e.printStackTrace();
sendError(response, e.getMessage());
}
}
finally
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.remove(Thread.currentThread());
}
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
try
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.put(Thread.currentThread(), response.getOutputStream());
}
List<String> urlParamNames = Collections.list(request.getParameterNames());
// Read Method Name from URL
String methodName = request.getParameter(METHOD_PARAM_NAME);
HashMap<String, String> params = new HashMap();
for(String paramName: urlParamNames)
params.put(paramName, request.getParameter(paramName));
invokeMethod(methodName, params, request, response);
}
finally
{
synchronized (servletOutputStreamMap)
{
servletOutputStreamMap.remove(Thread.currentThread());
}
}
}
/**
* This maps a thread to the corresponding ServletOutputStream for the doGet() or doPost() call that thread is handling.
*/
private Map<Thread,ServletOutputStream> servletOutputStreamMap = new HashMap<Thread,ServletOutputStream>();
/**
* This function retrieves the ServletOutputStream associated with the current thread's doGet() or doPost() call.
* In a public function with a void return type, you can use this ServletOutputStream for full control over the output.
*/
protected ServletOutputStream getServletOutputStream()
{
synchronized (servletOutputStreamMap)
{
return servletOutputStreamMap.get(Thread.currentThread());
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void invokeMethod(String methodName, Map params, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
if(!methodMap.containsKey(methodName) || methodMap.get(methodName) == null)
{
sendError(response, String.format("Method \"%s\" not supported.", methodName));
return;
}
ExposedMethod exposedMethod = methodMap.get(methodName);
String[] argNames = exposedMethod.paramNames;
Class[] argTypes = exposedMethod.method.getParameterTypes();
Object[] argValues = new Object[argNames == null ? 0 : argNames.length];
Map extraParameters = null; // parameters that weren't mapped directly to method arguments
// For each method parameter, get the corresponding url parameter value.
if (argNames != null && params != null)
{
//TODO: check why params is null
for (Object parameterName : params.keySet())
{
Object parameterValue = params.get(parameterName);
int index = ListUtils.findString((String)parameterName, argNames);
if (index >= 0)
{
argValues[index] = parameterValue;
}
else if (!parameterName.equals(METHOD_PARAM_NAME))
{
if (extraParameters == null)
extraParameters = new HashMap();
extraParameters.put(parameterName, parameterValue);
}
}
}
// support for a function having a single Map<String,String> parameter
// see if we can find a Map arg. If so, set it to extraParameters
if (argTypes != null)
{
for (int i = 0; i < argTypes.length; i++)
{
if (argTypes[i] == Map.class)
{
// avoid passing a null Map to the function
if (extraParameters == null)
extraParameters = new HashMap<String,String>();
argValues[i] = extraParameters;
extraParameters = null;
break;
}
}
}
if (extraParameters != null)
{
System.out.println("Received servlet request: " + methodName + Arrays.asList(argValues));
System.out.println("Unused parameters: "+extraParameters.entrySet());
}
invokeMethod(methodName, argValues, request, response);
}
/**
* @param methodName The name of the function to invoke.
* @param methodParameters A list of input parameters for the method. Values will be cast to the appropriate types if necessary.
* @param request This parameter is the same from doPost() and doGet().
* @param response This parameter is the same from doPost() and doGet().
* @throws ServletException
* @throws IOException
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private void invokeMethod(String methodName, Object[] methodParameters, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
// debug
//System.out.println(methodName + Arrays.asList(methodParameters));
// get method by name
ExposedMethod exposedMethod = methodMap.get(methodName);
if (exposedMethod == null)
{
sendError(response, "Unknown method: "+methodName);
}
// cast input values to appropriate types if necessary
Class[] expectedArgTypes = exposedMethod.method.getParameterTypes();
if (expectedArgTypes.length == methodParameters.length)
{
for (int index = 0; index < methodParameters.length; index++)
{
Object value = methodParameters[index];
// if given value is a String, check if the function is expecting a different type
if (value instanceof String)
{
if (expectedArgTypes[index] == boolean.class || expectedArgTypes[index] == Boolean.class)
value = ((String)(value)).equalsIgnoreCase("true");
else if (expectedArgTypes[index] == String[].class)
{
String[][] table = CSVParser.defaultParser.parseCSV((String)value);
if (table.length == 0)
value = new String[0];
else
value = table[0];
}
else if (expectedArgTypes[index] == List.class)
value = Arrays.asList(CSVParser.defaultParser.parseCSV((String)value)[0]);
}
else if (value != null)
{
if (value instanceof Boolean && expectedArgTypes[index] == boolean.class)
{
value = (boolean)(Boolean)value;
}
else if (value.getClass() == Object[].class)
{
Object[] valueArray = (Object[])value;
if (expectedArgTypes[index] == List.class)
{
value = ListUtils.copyArrayToList(valueArray, new Vector());
}
else if (expectedArgTypes[index] == Object[][].class)
{
Object[][] valueMatrix = new Object[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
valueMatrix[i] = (Object[])valueArray[i];
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == String[][].class)
{
String[][] valueMatrix = new String[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
// cast Objects to Strings
Object[] objectArray = (Object[])valueArray[i];
valueMatrix[i] = ListUtils.copyStringArray(objectArray, new String[objectArray.length]);
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == String[].class)
{
value = ListUtils.copyStringArray(valueArray, new String[valueArray.length]);
}
else if (expectedArgTypes[index] == double[][].class)
{
double[][] valueMatrix = new double[valueArray.length][];
for (int i = 0; i < valueArray.length; i++)
{
// cast Objects to doubles
Object[] objectArray = (Object[])valueArray[i];
valueMatrix[i] = ListUtils.copyDoubleArray(objectArray, new double[objectArray.length]);
}
value = valueMatrix;
}
else if (expectedArgTypes[index] == double[].class)
{
value = ListUtils.copyDoubleArray(valueArray, new double[valueArray.length]);
}
}
}
methodParameters[index] = value;
}
}
// prepare to output the result of the method call
ServletOutputStream servletOutputStream = response.getOutputStream();
// Invoke the method on the object with the arguments
try
{
Object result = exposedMethod.method.invoke(exposedMethod.instance, methodParameters);
if (exposedMethod.method.getReturnType() != void.class)
seriaizeCompressedAmf3(result, servletOutputStream);
}
catch (InvocationTargetException e)
{
e.getCause().printStackTrace();
sendError(response, e.getCause().getMessage());
}
catch (IllegalArgumentException e)
{
String error = e.getMessage() + "\n" +
"Expected: " + formatFunctionSignature(methodName, expectedArgTypes, exposedMethod.paramNames) + "\n" +
"Received: " + formatFunctionSignature(methodName, methodParameters, null);
System.out.println(error);
sendError(response, error);
}
catch (Exception e)
{
System.out.println(methodName + (List)Arrays.asList(methodParameters));
e.printStackTrace();
sendError(response, e.getMessage());
}
}
/**
* This function formats a Java function signature as a String.
* @param methodName The name of the method.
* @param paramValuesOrTypes A list of Class objects or arbitrary Objects to get the class names from.
* @param paramNames The names of the parameters, may be null.
* @return A readable Java function signature.
*/
private String formatFunctionSignature(String methodName, Object[] paramValuesOrTypes, String[] paramNames)
{
// don't use paramNames if the length doesn't match the paramValuesOrTypes length.
if (paramNames != null && paramNames.length != paramValuesOrTypes.length)
paramNames = null;
List<String> names = new Vector<String>(paramValuesOrTypes.length);
for (int i = 0; i < paramValuesOrTypes.length; i++)
{
String name = "null";
if (paramValuesOrTypes[i] instanceof Class)
name = ((Class<?>)paramValuesOrTypes[i]).getName();
else
name = paramValuesOrTypes[i].getClass().getName();
// decode output of Class.getName()
while (name.charAt(0) == '[') // array type
{
name = name.substring(1) + "[]";
// decode element type encoding
String type = "";
switch (name.charAt(0))
{
case 'Z': type = "boolean"; break;
case 'B': type = "byte"; break;
case 'C': type = "char"; break;
case 'D': type = "double"; break;
case 'F': type = "float"; break;
case 'I': type = "int"; break;
case 'J': type = "long"; break;
case 'S': type = "short"; break;
case 'L':
// remove ';'
name = name.replace(";", "");
break;
default: continue;
}
// remove first char encoding
name = type + name.substring(1);
}
// hide package names
if (name.indexOf('.') >= 0)
name = name.substring(name.lastIndexOf('.') + 1);
if (paramNames != null)
name += " " + paramNames[i];
names.add(name);
}
String result = names.toString();
return String.format("%s(%s)", methodName, result.substring(1, result.length() - 1));
}
private void sendError(HttpServletResponse response, String message) throws IOException
{
//response.setHeader("Cache-Control", "no-cache");
//response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message);
System.out.println("Serializing ErrorMessage: "+message);
ServletOutputStream servletOutputStream = response.getOutputStream();
ErrorMessage errorMessage = new ErrorMessage(new MessageException(message));
errorMessage.faultCode = "Error";
seriaizeCompressedAmf3(errorMessage, servletOutputStream);
}
protected static SerializationContext getSerializationContext()
{
SerializationContext context = SerializationContext.getSerializationContext();
// set serialization context properties
context.enableSmallMessages = true;
context.instantiateTypes = true;
context.supportRemoteClass = true;
context.legacyCollection = false;
context.legacyMap = false;
context.legacyXMLDocument = false;
context.legacyXMLNamespaces = false;
context.legacyThrowable = false;
context.legacyBigNumbers = false;
context.restoreReferences = false;
context.logPropertyErrors = false;
context.ignorePropertyErrors = true;
return context;
}
// Serialize a Java Object to AMF3 ByteArray
protected void seriaizeCompressedAmf3(Object objToSerialize, ServletOutputStream servletOutputStream)
{
try
{
SerializationContext context = getSerializationContext();
DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(servletOutputStream);
Amf3Output amf3Output = new Amf3Output(context);
amf3Output.setOutputStream(deflaterOutputStream); // compress
amf3Output.writeObject(objToSerialize);
amf3Output.flush();
deflaterOutputStream.close(); // this is necessary to finish the compression
//amf3Output.close(); //not closing output stream -- see http://viveklakhanpal.wordpress.com/2010/07/01/error-2032ioerror/
}
catch (Exception e)
{
e.printStackTrace();
}
}
// De-serialize a ByteArray/AMF3/Flex object to a Java object
protected Object deseriaizeCompressedAmf3(InputStream inputStream) throws ClassNotFoundException, IOException
{
Object deSerializedObj = null;
SerializationContext context = getSerializationContext();
InflaterInputStream inflaterInputStream = new InflaterInputStream(inputStream);
Amf3Input amf3Input = new Amf3Input(context);
amf3Input.setInputStream(inflaterInputStream); // uncompress
deSerializedObj = amf3Input.readObject();
amf3Input.close();
return deSerializedObj;
}
}
|
To avoid ArrayIndexOutOfBoundsException, no longer creating
zero-length
argValues
array.
Change-Id: Ib87e0bfbecca4fad8aa647068b47cacb263f0bd6
|
WeaveServices/src/weave/servlets/GenericServlet.java
|
To avoid ArrayIndexOutOfBoundsException, no longer creating zero-length argValues array.
|
|
Java
|
agpl-3.0
|
3bc617ae6706e3064f7125d9e1451822d130a655
| 0
|
ErmiasG/hopsworks,AlexHopsworks/hopsworks,AlexHopsworks/hopsworks,AlexHopsworks/hopsworks,ErmiasG/hopsworks,AlexHopsworks/hopsworks,ErmiasG/hopsworks,AlexHopsworks/hopsworks,ErmiasG/hopsworks,ErmiasG/hopsworks,ErmiasG/hopsworks,AlexHopsworks/hopsworks
|
/*
* Changes to this file committed after and not including commit-id: ccc0d2c5f9a5ac661e60e6eaf138de7889928b8b
* are released under the following license:
*
* This file is part of Hopsworks
* Copyright (C) 2018, Logical Clocks AB. All rights reserved
*
* Hopsworks is free software: you can redistribute it and/or modify it under the terms of
* the GNU Affero General Public License as published by the Free Software Foundation,
* either version 3 of the License, or (at your option) any later version.
*
* Hopsworks is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see <https://www.gnu.org/licenses/>.
*
* Changes to this file committed before and including commit-id: ccc0d2c5f9a5ac661e60e6eaf138de7889928b8b
* are released under the following license:
*
* Copyright (C) 2013 - 2018, Logical Clocks AB and RISE SICS AB. All rights reserved
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.hops.hopsworks.common.util;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import io.hops.hopsworks.common.dao.user.UserFacade;
import io.hops.hopsworks.common.dataset.util.CompressionInfo;
import io.hops.hopsworks.common.dela.AddressJSON;
import io.hops.hopsworks.common.dela.DelaClientType;
import io.hops.hopsworks.common.hdfs.DistributedFileSystemOps;
import io.hops.hopsworks.common.provenance.core.Provenance;
import io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO;
import io.hops.hopsworks.exceptions.ProvenanceException;
import io.hops.hopsworks.persistence.entity.project.PaymentType;
import io.hops.hopsworks.persistence.entity.user.Users;
import io.hops.hopsworks.persistence.entity.util.Variables;
import io.hops.hopsworks.persistence.entity.util.VariablesVisibility;
import io.hops.hopsworks.restutils.RESTLogLevel;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import javax.ejb.ConcurrencyManagement;
import javax.ejb.ConcurrencyManagementType;
import javax.ejb.EJB;
import javax.ejb.Singleton;
import javax.ejb.Startup;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Singleton
@Startup
@ConcurrencyManagement(ConcurrencyManagementType.BEAN)
public class Settings implements Serializable {
private static final Logger LOGGER = Logger.getLogger(Settings.class.
getName());
@EJB
private UserFacade userFacade;
@EJB
private OSProcessExecutor osProcessExecutor;
@PersistenceContext(unitName = "kthfsPU")
private EntityManager em;
private static final Map<String, TimeUnit> TIME_SUFFIXES;
static {
TIME_SUFFIXES = new HashMap<>(5);
TIME_SUFFIXES.put("ms", TimeUnit.MILLISECONDS);
TIME_SUFFIXES.put("s", TimeUnit.SECONDS);
TIME_SUFFIXES.put("m", TimeUnit.MINUTES);
TIME_SUFFIXES.put("h", TimeUnit.HOURS);
TIME_SUFFIXES.put("d", TimeUnit.DAYS);
}
private static final Pattern TIME_CONF_PATTERN = Pattern.compile("([0-9]+)([a-z]+)?");
public static final String AGENT_EMAIL = "kagent@hops.io";
/**
* Global Variables taken from the DB
*/
private static final String VARIABLE_ADMIN_EMAIL = "admin_email";
private static final String VARIABLE_PYPI_REST_ENDPOINT = "pypi_rest_endpoint";
private static final String VARIABLE_PYPI_INDEXER_TIMER_INTERVAL = "pypi_indexer_timer_interval";
private static final String VARIABLE_PYPI_INDEXER_TIMER_ENABLED = "pypi_indexer_timer_enabled";
private static final String VARIABLE_PYPI_SIMPLE_ENDPOINT = "pypi_simple_endpoint";
private static final String VARIABLE_PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL =
"python_library_updates_monitor_interval";
private static final String VARIABLE_PYTHON_KERNEL = "python_kernel";
private static final String VARIABLE_HADOOP_VERSION = "hadoop_version";
private static final String VARIABLE_KIBANA_IP = "kibana_ip";
private static final String VARIABLE_LOCALHOST = "localhost";
private static final String VARIABLE_REQUESTS_VERIFY = "requests_verify";
private static final String VARIABLE_CLOUD= "cloud";
private static final String VARIABLE_OPENSEARCH_IP = "elastic_ip";
private static final String VARIABLE_OPENSEARCH_PORT = "elastic_port";
private static final String VARIABLE_OPENSEARCH_REST_PORT = "elastic_rest_port";
private static final String VARIABLE_OPENSEARCH_LOGS_INDEX_EXPIRATION = "elastic_logs_index_expiration";
private static final String VARIABLE_SPARK_USER = "spark_user";
private static final String VARIABLE_HDFS_SUPERUSER = "hdfs_user";
private static final String VARIABLE_HOPSWORKS_USER = "hopsworks_user";
private static final String VARIABLE_JUPYTER_GROUP = "jupyter_group";
private static final String VARIABLE_STAGING_DIR = "staging_dir";
private static final String VARIABLE_AIRFLOW_DIR = "airflow_dir";
private static final String VARIABLE_JUPYTER_DIR = "jupyter_dir";
private static final String VARIABLE_JUPYTER_WS_PING_INTERVAL = "jupyter_ws_ping_interval";
private static final String VARIABLE_SPARK_DIR = "spark_dir";
private static final String VARIABLE_FLINK_DIR = "flink_dir";
private static final String VARIABLE_FLINK_USER = "flink_user";
private static final String VARIABLE_HADOOP_DIR = "hadoop_dir";
private static final String VARIABLE_HOPSWORKS_DIR = "hopsworks_dir";
private static final String VARIABLE_SUDOERS_DIR = "sudoers_dir";
private static final String VARIABLE_YARN_DEFAULT_QUOTA = "yarn_default_quota";
private static final String VARIABLE_HDFS_DEFAULT_QUOTA = "hdfs_default_quota";
private static final String VARIABLE_PROJECT_PAYMENT_TYPE = "yarn_default_payment_type";
private static final String VARIABLE_HDFS_BASE_STORAGE_POLICY = "hdfs_base_storage_policy";
private static final String VARIABLE_HDFS_LOG_STORAGE_POLICY = "hdfs_log_storage_policy";
private static final String VARIABLE_MAX_NUM_PROJ_PER_USER
= "max_num_proj_per_user";
private static final String VARIABLE_RESERVED_PROJECT_NAMES = "reserved_project_names";
private static final String VARIABLE_HOPSWORKS_ENTERPRISE = "hopsworks_enterprise";
private static final String VARIABLE_SPARK_EXECUTOR_MIN_MEMORY = "spark_executor_min_memory";
// HIVE configuration variables
private static final String VARIABLE_HIVE_SUPERUSER = "hive_superuser";
private static final String VARIABLE_HIVE_WAREHOUSE = "hive_warehouse";
private static final String VARIABLE_HIVE_SCRATCHDIR = "hive_scratchdir";
private static final String VARIABLE_HIVE_SCRATCHDIR_DELAY = "hive_scratchdir_delay";
private static final String VARIABLE_HIVE_SCRATCHDIR_CLEANER_INTERVAL = "hive_scratchdir_cleaner_interval";
private static final String VARIABLE_HIVE_DEFAULT_QUOTA = "hive_default_quota";
private static final String VARIABLE_TWOFACTOR_AUTH = "twofactor_auth";
private static final String VARIABLE_TWOFACTOR_EXCLUD = "twofactor-excluded-groups";
private static final String VARIABLE_KAFKA_DIR = "kafka_dir";
private static final String VARIABLE_KAFKA_USER = "kafka_user";
private static final String VARIABLE_KAFKA_MAX_NUM_TOPICS = "kafka_max_num_topics";
private static final String VARIABLE_FILE_PREVIEW_IMAGE_SIZE
= "file_preview_image_size";
private static final String VARIABLE_FILE_PREVIEW_TXT_SIZE
= "file_preview_txt_size";
private static final String VARIABLE_HOPS_RPC_TLS = "hops_rpc_tls";
public static final String ERASURE_CODING_CONFIG = "erasure-coding-site.xml";
private static final String VARIABLE_KAFKA_NUM_PARTITIONS
= "kafka_num_partitions";
private static final String VARIABLE_KAFKA_NUM_REPLICAS = "kafka_num_replicas";
private static final String VARIABLE_HOPSWORKS_SSL_MASTER_PASSWORD = "hopsworks_master_password";
private static final String VARIABLE_ANACONDA_DIR = "anaconda_dir";
private static final String VARIABLE_ANACONDA_ENABLED = "anaconda_enabled";
private static final String VARIABLE_ANACONDA_DEFAULT_REPO = "conda_default_repo";
private static final String VARIABLE_DOWNLOAD_ALLOWED = "download_allowed";
private static final String VARIABLE_HOPSEXAMPLES_VERSION = "hopsexamples_version";
private static final String VARIABLE_KAGENT_USER = "kagent_user";
private static final String VARIABLE_KAGENT_LIVENESS_MONITOR_ENABLED = "kagent_liveness_monitor_enabled";
private static final String VARIABLE_KAGENT_LIVENESS_THRESHOLD = "kagent_liveness_threshold";
private static final String VARIABLE_RESOURCE_DIRS = "resources";
private static final String VARIABLE_CERTS_DIRS = "certs_dir";
private static final String VARIABLE_MAX_STATUS_POLL_RETRY = "max_status_poll_retry";
private static final String VARIABLE_CERT_MATER_DELAY = "cert_mater_delay";
private static final String VARIABLE_WHITELIST_USERS_LOGIN = "whitelist_users";
private static final String VARIABLE_VERIFICATION_PATH = "verification_endpoint";
private static final String VARIABLE_FIRST_TIME_LOGIN = "first_time_login";
private static final String VARIABLE_CERTIFICATE_USER_VALID_DAYS = "certificate_user_valid_days";
private static final String VARIABLE_SERVICE_DISCOVERY_DOMAIN = "service_discovery_domain";
private static final String VARIABLE_ZOOKEEPER_VERSION = "zookeeper_version";
private static final String VARIABLE_GRAFANA_VERSION = "grafana_version";
private static final String VARIABLE_LOGSTASH_VERSION = "logstash_version";
private static final String VARIABLE_KIBANA_VERSION = "kibana_version";
private static final String VARIABLE_FILEBEAT_VERSION = "filebeat_version";
private static final String VARIABLE_NDB_VERSION = "ndb_version";
private static final String VARIABLE_LIVY_VERSION = "livy_version";
private static final String VARIABLE_HIVE2_VERSION = "hive2_version";
private static final String VARIABLE_TEZ_VERSION = "tez_version";
private static final String VARIABLE_SPARK_VERSION = "spark_version";
private static final String VARIABLE_FLINK_VERSION = "flink_version";
private static final String VARIABLE_EPIPE_VERSION = "epipe_version";
private static final String VARIABLE_DELA_VERSION = "dela_version";
private static final String VARIABLE_KAFKA_VERSION = "kafka_version";
private static final String VARIABLE_OPENSEARCH_VERSION = "elastic_version";
private static final String VARIABLE_TENSORFLOW_VERSION = "tensorflow_version";
private static final String VARIABLE_KUBE_KSERVE_TENSORFLOW_VERSION = "kube_kserve_tensorflow_version";
private static final String VARIABLE_HOPSWORKS_VERSION = "hopsworks_version";
private final static String VARIABLE_LIVY_STARTUP_TIMEOUT = "livy_startup_timeout";
private final static String VARIABLE_USER_SEARCH = "enable_user_search";
private final static String VARIABLE_REJECT_REMOTE_USER_NO_GROUP = "reject_remote_user_no_group";
//Used by RESTException to include devMsg or not in response
private static final String VARIABLE_HOPSWORKS_REST_LOG_LEVEL = "hopsworks_rest_log_level";
/*
* -------------------- Serving ---------------
*/
private static final String VARIABLE_SERVING_MONITOR_INT = "serving_monitor_int";
private static final String VARIABLE_SERVING_CONNECTION_POOL_SIZE = "serving_connection_pool_size";
private static final String VARIABLE_SERVING_MAX_ROUTE_CONNECTIONS = "serving_max_route_connections";
/*
* -------------------- TensorBoard ---------------
*/
private static final String VARIABLE_TENSORBOARD_MAX_RELOAD_THREADS = "tensorboard_max_reload_threads";
/*
* -------------------- Kubernetes ---------------
*/
private static final String VARIABLE_KUBEMASTER_URL = "kube_master_url";
private static final String VARIABLE_KUBE_USER = "kube_user";
private static final String VARIABLE_KUBE_HOPSWORKS_USER = "kube_hopsworks_user";
private static final String VARIABLE_KUBE_CA_CERTFILE = "kube_ca_certfile";
private static final String VARIABLE_KUBE_CLIENT_KEYFILE = "kube_client_keyfile";
private static final String VARIABLE_KUBE_CLIENT_CERTFILE = "kube_client_certfile";
private static final String VARIABLE_KUBE_CLIENT_KEYPASS = "kube_client_keypass";
private static final String VARIABLE_KUBE_TRUSTSTORE_PATH = "kube_truststore_path";
private static final String VARIABLE_KUBE_TRUSTSTORE_KEY = "kube_truststore_key";
private static final String VARIABLE_KUBE_KEYSTORE_PATH = "kube_keystore_path";
private static final String VARIABLE_KUBE_KEYSTORE_KEY = "kube_keystore_key";
private static final String VARIABLE_KUBE_PULL_POLICY = "kube_img_pull_policy";
private static final String VARIABLE_KUBE_API_MAX_ATTEMPTS = "kube_api_max_attempts";
private static final String VARIABLE_KUBE_DOCKER_MAX_MEMORY_ALLOCATION = "kube_docker_max_memory_allocation";
private static final String VARIABLE_KUBE_DOCKER_MAX_GPUS_ALLOCATION = "kube_docker_max_gpus_allocation";
private static final String VARIABLE_KUBE_DOCKER_MAX_CORES_ALLOCATION = "kube_docker_max_cores_allocation";
private static final String VARIABLE_KUBE_INSTALLED = "kubernetes_installed";
private static final String VARIABLE_KUBE_KSERVE_INSTALLED = "kube_kserve_installed";
private static final String VARIABLE_KUBE_KNATIVE_DOMAIN_NAME = "kube_knative_domain_name";
private static final String VARIABLE_KUBE_SERVING_NODE_LABELS = "kube_serving_node_labels";
private static final String VARIABLE_KUBE_SERVING_NODE_TOLERATIONS = "kube_serving_node_tolerations";
private static final String VARIABLE_KUBE_SERVING_MAX_MEMORY_ALLOCATION = "kube_serving_max_memory_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_CORES_ALLOCATION = "kube_serving_max_cores_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_GPUS_ALLOCATION = "kube_serving_max_gpus_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_NUM_INSTANCES = "kube_serving_max_num_instances";
private static final String VARIABLE_KUBE_SERVING_MIN_NUM_INSTANCES = "kube_serving_min_num_instances";
private static final String VARIABLE_KUBE_TAINTED_NODES = "kube_tainted_nodes";
private static final String VARIABLE_KUBE_TAINTED_NODES_MONITOR_INTERVAL =
"kube_node_taints_monitor_interval";
/*
* -------------------- Jupyter ---------------
*/
private static final String VARIABLE_JUPYTER_HOST = "jupyter_host";
private static final String VARIABLE_JUPYTER_ORIGIN_SCHEME = "jupyter_origin_scheme";
// JWT Variables
private static final String VARIABLE_JWT_SIGNATURE_ALGORITHM = "jwt_signature_algorithm";
private static final String VARIABLE_JWT_LIFETIME_MS = "jwt_lifetime_ms";
private static final String VARIABLE_JWT_EXP_LEEWAY_SEC = "jwt_exp_leeway_sec";
private static final String VARIABLE_JWT_SIGNING_KEY_NAME = "jwt_signing_key_name";
private static final String VARIABLE_JWT_ISSUER_KEY = "jwt_issuer";
private static final String VARIABLE_SERVICE_MASTER_JWT = "service_master_jwt";
private static final String VARIABLE_SERVICE_JWT_LIFETIME_MS = "service_jwt_lifetime_ms";
private static final String VARIABLE_SERVICE_JWT_EXP_LEEWAY_SEC = "service_jwt_exp_leeway_sec";
private static final String VARIABLE_CONNECTION_KEEPALIVE_TIMEOUT = "keepalive_timeout";
/* -------------------- Featurestore --------------- */
private static final String VARIABLE_FEATURESTORE_DEFAULT_QUOTA = "featurestore_default_quota";
private static final String VARIABLE_FEATURESTORE_DEFAULT_STORAGE_FORMAT = "featurestore_default_storage_format";
private static final String VARIABLE_FEATURESTORE_JDBC_URL = "featurestore_jdbc_url";
private static final String VARIABLE_ONLINE_FEATURESTORE = "featurestore_online_enabled";
private static final String VARIABLE_FG_PREVIEW_LIMIT = "fg_preview_limit";
private static final String VARIABLE_ONLINE_FEATURESTORE_TS = "featurestore_online_tablespace";
private static final String VARIABLE_FS_JOB_ACTIVITY_TIME = "fs_job_activity_time";
private static final String VARIABLE_ONLINEFS_THREAD_NUMBER = "onlinefs_service_thread_number";
private static final String VARIABLE_HIVE_CONF_PATH = "hive_conf_path";
private static final String VARIABLE_FS_PY_JOB_UTIL_PATH = "fs_py_job_util";
private static final String VARIABLE_FS_JAVA_JOB_UTIL_PATH = "fs_java_job_util";
//OpenSearch Security
private static final String VARIABLE_OPENSEARCH_SECURITY_ENABLED = "elastic_opendistro_security_enabled";
private static final String VARIABLE_OPENSEARCH_HTTPS_ENABLED = "elastic_https_enabled";
private static final String VARIABLE_OPENSEARCH_ADMIN_USER = "elastic_admin_user";
private static final String VARIABLE_OPENSEARCH_SERVICE_LOG_USER = "kibana_service_log_viewer";
private static final String VARIABLE_OPENSEARCH_ADMIN_PASSWORD = "elastic_admin_password";
private static final String VARIABLE_KIBANA_HTTPS_ENABLED = "kibana_https_enabled";
private static final String VARIABLE_OPENSEARCH_JWT_ENABLED = "elastic_jwt_enabled";
private static final String VARIABLE_OPENSEARCH_JWT_URL_PARAMETER = "elastic_jwt_url_parameter";
private static final String VARIABLE_OPENSEARCH_JWT_EXP_MS = "elastic_jwt_exp_ms";
private static final String VARIABLE_KIBANA_MULTI_TENANCY_ENABLED = "kibana_multi_tenancy_enabled";
private static final String VARIABLE_CLIENT_PATH = "client_path";
//Cloud
private static final String VARIABLE_CLOUD_EVENTS_ENDPOINT=
"cloud_events_endpoint";
private static final String VARIABLE_CLOUD_EVENTS_ENDPOINT_API_KEY=
"cloud_events_endpoint_api_key";
/*-----------------------Yarn Docker------------------------*/
private final static String VARIABLE_YARN_RUNTIME = "yarn_runtime";
private final static String VARIABLE_DOCKER_MOUNTS = "docker_mounts";
private final static String VARIABLE_DOCKER_JOB_MOUNTS_LIST = "docker_job_mounts_list";
private final static String VARIABLE_DOCKER_JOB_MOUNT_ALLOWED = "docker_job_mounts_allowed";
private final static String VARIABLE_DOCKER_JOB_UID_STRICT = "docker_job_uid_strict";
private final static String VARIABLE_DOCKER_BASE_IMAGE_PYTHON_NAME = "docker_base_image_python_name";
private final static String VARIABLE_DOCKER_BASE_IMAGE_PYTHON_VERSION = "docker_base_image_python_version";
private final static String VARIABLE_YARN_APP_UID = "yarn_app_uid";
/*-----------------------Jobs - Executions-------------------*/
private final static String VARIABLE_EXECUTIONS_PER_JOB_LIMIT = "executions_per_job_limit";
private final static String VARIABLE_EXECUTIONS_CLEANER_BATCH_SIZE = "executions_cleaner_batch_size";
private final static String VARIABLE_EXECUTIONS_CLEANER_INTERVAL_MS = "executions_cleaner_interval_ms";
/*----------------------Yarn Nodemanager status------------*/
private static final String VARIABLE_CHECK_NODEMANAGERS_STATUS = "check_nodemanagers_status";
/*----------------------- Python ------------------------*/
private final static String VARIABLE_MAX_ENV_YML_BYTE_SIZE = "max_env_yml_byte_size";
//Git
private static final String VARIABLE_GIT_COMMAND_TIMEOUT_MINUTES_DEFAULT = "git_command_timeout_minutes";
/*
* ------------------ QUOTAS ------------------
*/
private static final String QUOTAS_PREFIX = "quotas";
private static final String QUOTAS_FEATUREGROUPS_PREFIX = String.format("%s_featuregroups", QUOTAS_PREFIX);
private static final String VARIABLE_QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = String.format("%s_online_enabled",
QUOTAS_FEATUREGROUPS_PREFIX);
private static final String VARIABLE_QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = String.format("%s_online_disabled",
QUOTAS_FEATUREGROUPS_PREFIX);
private static final String VARIABLE_QUOTAS_TRAINING_DATASETS = String.format("%s_training_datasets", QUOTAS_PREFIX);
private static final String QUOTAS_MODEL_DEPLOYMENTS_PREFIX = String.format("%s_model_deployments", QUOTAS_PREFIX);
private static final String VARIABLE_QUOTAS_RUNNING_MODEL_DEPLOYMENTS = String.format("%s_running",
QUOTAS_MODEL_DEPLOYMENTS_PREFIX);
private static final String VARIABLE_QUOTAS_TOTAL_MODEL_DEPLOYMENTS = String.format("%s_total",
QUOTAS_MODEL_DEPLOYMENTS_PREFIX);
private static final String VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS = String.format("%s_max_parallel_executions",
QUOTAS_PREFIX);
//Docker cgroups
private static final String VARIABLE_DOCKER_CGROUP_ENABLED = "docker_cgroup_enabled";
private static final String VARIABLE_DOCKER_CGROUP_HARD_LIMIT_MEMORY = "docker_cgroup_memory_limit_gb";
private static final String VARIABLE_DOCKER_CGROUP_SOFT_LIMIT_MEMORY = "docker_cgroup_soft_limit_memory_gb";
private static final String VARIABLE_DOCKER_CGROUP_CPU_QUOTA = "docker_cgroup_cpu_quota_percentage";
private static final String VARIABLE_DOCKER_CGROUP_CPU_PERIOD = "docker_cgroup_cpu_period";
private static final String VARIABLE_DOCKER_CGROUP_MONITOR_INTERVAL = "docker_cgroup_monitor_interval";
private static final String VARIABLE_PROMETHEUS_PORT = "prometheus_port";
private static final String VARIABLE_SKIP_NAMESPACE_CREATION =
"kube_skip_namespace_creation";
public enum KubeType{
Local("local"),
EKS("eks"),
AKS("aks");
private String name;
KubeType(String name){
this.name = name;
}
static KubeType fromString(String str){
if(str.equals(Local.name)){
return Local;
}else if(str.equals(EKS.name)){
return EKS;
}else if(str.equals(AKS.name)) {
return AKS;
}
return Local;
}
}
private static final String VARIABLE_KUBE_TYPE = "kube_type";
private static final String VARIABLE_DOCKER_NAMESPACE = "docker_namespace";
private static final String VARIABLE_MANAGED_DOCKER_REGISTRY =
"managed_docker_registry";
private String setVar(String varName, String defaultValue) {
return setStrVar(varName, defaultValue);
}
private String setStrVar(String varName, String defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return value;
}
}
return defaultValue;
}
private String setDirVar(String varName, String defaultValue) {
Optional<Variables> dirName = findById(varName);
if (dirName.isPresent()) {
String value = dirName.get().getValue();
if (!Strings.isNullOrEmpty(value) && new File(value).isDirectory()) {
return value;
}
}
return defaultValue;
}
private String setIpVar(String varName, String defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value) && Ip.validIp(value)) {
return value;
}
}
return defaultValue;
}
private Boolean setBoolVar(String varName, Boolean defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return Boolean.parseBoolean(value);
}
}
return defaultValue;
}
private Integer setIntVar(String varName, Integer defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Integer.parseInt(value);
}
} catch(NumberFormatException ex){
LOGGER.log(Level.WARNING,
"Error - not an integer! " + varName + " should be an integer. Value was " + value);
}
}
return defaultValue;
}
private Double setDoubleVar(String varName, Double defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Double.parseDouble(value);
}
} catch(NumberFormatException ex){
LOGGER.log(Level.WARNING, "Error - not a double! " + varName + " should be a double. Value was " + value);
}
}
return defaultValue;
}
private long setLongVar(String varName, Long defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Long.parseLong(value);
}
} catch (NumberFormatException ex) {
LOGGER.log(Level.WARNING, "Error - not a long! " + varName + " should be an integer. Value was " + value);
}
}
return defaultValue;
}
private RESTLogLevel setLogLevelVar(String varName, RESTLogLevel defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return RESTLogLevel.valueOf(value);
}
}
return defaultValue;
}
private long setMillisecondVar(String varName, Long defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
long timeValue = getConfTimeValue(value);
TimeUnit timeUnit = getConfTimeTimeUnit(value);
return timeUnit.toMillis(timeValue);
}
}
return defaultValue;
}
private PaymentType setPaymentType(String varName, PaymentType defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return PaymentType.valueOf(value);
}
}
return defaultValue;
}
private Set<String> setStringHashSetLowerCase(String varName, String defaultValue, String separator) {
RESERVED_PROJECT_NAMES_STR = setStrVar(varName, defaultValue);
return setStringHashSetLowerCase(RESERVED_PROJECT_NAMES_STR, separator, true);
}
private Set<String> setStringHashSetLowerCase(String values, String separator, boolean toLowerCase) {
StringTokenizer tokenizer = new StringTokenizer(values, separator);
HashSet<String> tokens = new HashSet<>(tokenizer.countTokens());
while (tokenizer.hasMoreTokens()) {
tokens.add(toLowerCase? tokenizer.nextToken().trim().toLowerCase() : tokenizer.nextToken().trim());
}
return tokens;
}
private boolean cached = false;
private void populateCache() {
if (!cached) {
ADMIN_EMAIL = setVar(VARIABLE_ADMIN_EMAIL, ADMIN_EMAIL);
LOCALHOST = setBoolVar(VARIABLE_LOCALHOST, LOCALHOST);
CLOUD = setStrVar(VARIABLE_CLOUD, CLOUD);
REQUESTS_VERIFY = setBoolVar(VARIABLE_REQUESTS_VERIFY, REQUESTS_VERIFY);
PYTHON_KERNEL = setBoolVar(VARIABLE_PYTHON_KERNEL, PYTHON_KERNEL);
TWOFACTOR_AUTH = setVar(VARIABLE_TWOFACTOR_AUTH, TWOFACTOR_AUTH);
TWOFACTOR_EXCLUDE = setVar(VARIABLE_TWOFACTOR_EXCLUD, TWOFACTOR_EXCLUDE);
HOPSWORKS_USER = setVar(VARIABLE_HOPSWORKS_USER, HOPSWORKS_USER);
JUPYTER_GROUP = setVar(VARIABLE_JUPYTER_GROUP, JUPYTER_GROUP);
JUPYTER_ORIGIN_SCHEME = setVar(VARIABLE_JUPYTER_ORIGIN_SCHEME, JUPYTER_ORIGIN_SCHEME);
HDFS_SUPERUSER = setVar(VARIABLE_HDFS_SUPERUSER, HDFS_SUPERUSER);
SPARK_USER = setVar(VARIABLE_SPARK_USER, SPARK_USER);
SPARK_DIR = setDirVar(VARIABLE_SPARK_DIR, SPARK_DIR);
FLINK_USER = setVar(VARIABLE_FLINK_USER, FLINK_USER);
FLINK_DIR = setDirVar(VARIABLE_FLINK_DIR, FLINK_DIR);
STAGING_DIR = setDirVar(VARIABLE_STAGING_DIR, STAGING_DIR);
HOPS_EXAMPLES_VERSION = setVar(VARIABLE_HOPSEXAMPLES_VERSION, HOPS_EXAMPLES_VERSION);
HIVE_SUPERUSER = setStrVar(VARIABLE_HIVE_SUPERUSER, HIVE_SUPERUSER);
HIVE_WAREHOUSE = setStrVar(VARIABLE_HIVE_WAREHOUSE, HIVE_WAREHOUSE);
HIVE_SCRATCHDIR = setStrVar(VARIABLE_HIVE_SCRATCHDIR, HIVE_SCRATCHDIR);
HIVE_SCRATCHDIR_DELAY = setStrVar(VARIABLE_HIVE_SCRATCHDIR_DELAY, HIVE_SCRATCHDIR_DELAY);
HIVE_SCRATCHDIR_CLEANER_INTERVAL = setStrVar(VARIABLE_HIVE_SCRATCHDIR_CLEANER_INTERVAL,
HIVE_SCRATCHDIR_CLEANER_INTERVAL);
HIVE_DB_DEFAULT_QUOTA = setLongVar(VARIABLE_HIVE_DEFAULT_QUOTA, HIVE_DB_DEFAULT_QUOTA);
HADOOP_VERSION = setVar(VARIABLE_HADOOP_VERSION, HADOOP_VERSION);
JUPYTER_DIR = setDirVar(VARIABLE_JUPYTER_DIR, JUPYTER_DIR);
JUPYTER_WS_PING_INTERVAL_MS = setMillisecondVar(VARIABLE_JUPYTER_WS_PING_INTERVAL, JUPYTER_WS_PING_INTERVAL_MS);
HADOOP_DIR = setDirVar(VARIABLE_HADOOP_DIR, HADOOP_DIR);
HOPSWORKS_INSTALL_DIR = setDirVar(VARIABLE_HOPSWORKS_DIR, HOPSWORKS_INSTALL_DIR);
CERTS_DIR = setDirVar(VARIABLE_CERTS_DIRS, CERTS_DIR);
SUDOERS_DIR = setDirVar(VARIABLE_SUDOERS_DIR, SUDOERS_DIR);
CERTIFICATE_USER_VALID_DAYS = setStrVar(VARIABLE_CERTIFICATE_USER_VALID_DAYS, CERTIFICATE_USER_VALID_DAYS);
SERVICE_DISCOVERY_DOMAIN = setStrVar(VARIABLE_SERVICE_DISCOVERY_DOMAIN, SERVICE_DISCOVERY_DOMAIN);
AIRFLOW_DIR = setDirVar(VARIABLE_AIRFLOW_DIR, AIRFLOW_DIR);
String openSearchIps = setStrVar(VARIABLE_OPENSEARCH_IP,
OpenSearchSettings.OPENSEARCH_IP_DEFAULT);
int openSearchPort = setIntVar(VARIABLE_OPENSEARCH_PORT, OpenSearchSettings.OPENSEARCH_PORT_DEFAULT);
int openSearchRestPort = setIntVar(VARIABLE_OPENSEARCH_REST_PORT,
OpenSearchSettings.OPENSEARCH_REST_PORT_DEFAULT);
boolean openSearchSecurityEnabled =
setBoolVar(VARIABLE_OPENSEARCH_SECURITY_ENABLED,
OpenSearchSettings.OPENSEARCH_SECURTIY_ENABLED_DEFAULT);
boolean openSearchHttpsEnabled = setBoolVar(VARIABLE_OPENSEARCH_HTTPS_ENABLED
, OpenSearchSettings.OPENSEARCH_HTTPS_ENABLED_DEFAULT);
String openSearchAdminUser = setStrVar(VARIABLE_OPENSEARCH_ADMIN_USER,
OpenSearchSettings.OPENSEARCH_ADMIN_USER_DEFAULT);
String openSearchServiceLogUser = setStrVar(VARIABLE_OPENSEARCH_SERVICE_LOG_USER,
OpenSearchSettings.OPENSEARCH_SERVICE_LOG_ROLE);
String openSearchAdminPassword = setStrVar(VARIABLE_OPENSEARCH_ADMIN_PASSWORD,
OpenSearchSettings.OPENSEARCH_ADMIN_PASSWORD_DEFAULT);
boolean openSearchJWTEnabled = setBoolVar(VARIABLE_OPENSEARCH_JWT_ENABLED
, OpenSearchSettings.OPENSEARCH_JWT_ENABLED_DEFAULT);
String openSearchJWTUrlParameter = setStrVar(VARIABLE_OPENSEARCH_JWT_URL_PARAMETER,
OpenSearchSettings.OPENSEARCH_JWT_URL_PARAMETER_DEFAULT);
long openSearchJWTEXPMS = setLongVar(VARIABLE_OPENSEARCH_JWT_EXP_MS,
OpenSearchSettings.OPENSEARCH_JWT_EXP_MS_DEFAULT);
OPENSEARCH_SETTINGS = new OpenSearchSettings(openSearchIps, openSearchPort,
openSearchRestPort, openSearchSecurityEnabled, openSearchHttpsEnabled,
openSearchAdminUser, openSearchAdminPassword, openSearchJWTEnabled,
openSearchJWTUrlParameter, openSearchJWTEXPMS, openSearchServiceLogUser);
OpenSearch_LOGS_INDEX_EXPIRATION = setLongVar(VARIABLE_OPENSEARCH_LOGS_INDEX_EXPIRATION,
OpenSearch_LOGS_INDEX_EXPIRATION);
KIBANA_IP = setIpVar(VARIABLE_KIBANA_IP, KIBANA_IP);
KAFKA_MAX_NUM_TOPICS = setIntVar(VARIABLE_KAFKA_MAX_NUM_TOPICS, KAFKA_MAX_NUM_TOPICS);
HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD = setVar(VARIABLE_HOPSWORKS_SSL_MASTER_PASSWORD,
HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD);
KAFKA_USER = setVar(VARIABLE_KAFKA_USER, KAFKA_USER);
KAFKA_DIR = setDirVar(VARIABLE_KAFKA_DIR, KAFKA_DIR);
KAFKA_DEFAULT_NUM_PARTITIONS = setIntVar(VARIABLE_KAFKA_NUM_PARTITIONS, KAFKA_DEFAULT_NUM_PARTITIONS);
KAFKA_DEFAULT_NUM_REPLICAS = setIntVar(VARIABLE_KAFKA_NUM_REPLICAS, KAFKA_DEFAULT_NUM_REPLICAS);
YARN_DEFAULT_QUOTA = setIntVar(VARIABLE_YARN_DEFAULT_QUOTA, YARN_DEFAULT_QUOTA);
DEFAULT_PAYMENT_TYPE = setPaymentType(VARIABLE_PROJECT_PAYMENT_TYPE, DEFAULT_PAYMENT_TYPE);
HDFS_DEFAULT_QUOTA_MBs = setLongVar(VARIABLE_HDFS_DEFAULT_QUOTA, HDFS_DEFAULT_QUOTA_MBs);
HDFS_BASE_STORAGE_POLICY = setHdfsStoragePolicy(VARIABLE_HDFS_BASE_STORAGE_POLICY, HDFS_BASE_STORAGE_POLICY);
HDFS_LOG_STORAGE_POLICY = setHdfsStoragePolicy(VARIABLE_HDFS_LOG_STORAGE_POLICY, HDFS_LOG_STORAGE_POLICY);
MAX_NUM_PROJ_PER_USER = setIntVar(VARIABLE_MAX_NUM_PROJ_PER_USER, MAX_NUM_PROJ_PER_USER);
CLUSTER_CERT = setVar(VARIABLE_CLUSTER_CERT, CLUSTER_CERT);
FILE_PREVIEW_IMAGE_SIZE = setIntVar(VARIABLE_FILE_PREVIEW_IMAGE_SIZE, 10000000);
FILE_PREVIEW_TXT_SIZE = setIntVar(VARIABLE_FILE_PREVIEW_TXT_SIZE, 100);
ANACONDA_DIR = setDirVar(VARIABLE_ANACONDA_DIR, ANACONDA_DIR);
ANACONDA_DEFAULT_REPO = setStrVar(VARIABLE_ANACONDA_DEFAULT_REPO, ANACONDA_DEFAULT_REPO);
ANACONDA_ENABLED = Boolean.parseBoolean(setStrVar(
VARIABLE_ANACONDA_ENABLED, ANACONDA_ENABLED.toString()));
KAGENT_USER = setStrVar(VARIABLE_KAGENT_USER, KAGENT_USER);
KAGENT_LIVENESS_MONITOR_ENABLED = setBoolVar(VARIABLE_KAGENT_LIVENESS_MONITOR_ENABLED,
KAGENT_LIVENESS_MONITOR_ENABLED);
KAGENT_LIVENESS_THRESHOLD = setStrVar(VARIABLE_KAGENT_LIVENESS_THRESHOLD, KAGENT_LIVENESS_THRESHOLD);
DOWNLOAD_ALLOWED = Boolean.parseBoolean(setStrVar(VARIABLE_DOWNLOAD_ALLOWED, DOWNLOAD_ALLOWED.toString()));
RESOURCE_DIRS = setStrVar(VARIABLE_RESOURCE_DIRS, RESOURCE_DIRS);
MAX_STATUS_POLL_RETRY = setIntVar(VARIABLE_MAX_STATUS_POLL_RETRY, MAX_STATUS_POLL_RETRY);
HOPS_RPC_TLS = setStrVar(VARIABLE_HOPS_RPC_TLS, HOPS_RPC_TLS);
CERTIFICATE_MATERIALIZER_DELAY = setStrVar(VARIABLE_CERT_MATER_DELAY,
CERTIFICATE_MATERIALIZER_DELAY);
WHITELIST_USERS_LOGIN = setStrVar(VARIABLE_WHITELIST_USERS_LOGIN,
WHITELIST_USERS_LOGIN);
FIRST_TIME_LOGIN = setStrVar(VARIABLE_FIRST_TIME_LOGIN, FIRST_TIME_LOGIN);
VERIFICATION_PATH = setStrVar(VARIABLE_VERIFICATION_PATH, VERIFICATION_PATH);
serviceKeyRotationEnabled = setBoolVar(SERVICE_KEY_ROTATION_ENABLED_KEY, serviceKeyRotationEnabled);
serviceKeyRotationInterval = setStrVar(SERVICE_KEY_ROTATION_INTERVAL_KEY, serviceKeyRotationInterval);
tensorBoardMaxLastAccessed = setIntVar(TENSORBOARD_MAX_LAST_ACCESSED, tensorBoardMaxLastAccessed);
sparkUILogsOffset = setIntVar(SPARK_UI_LOGS_OFFSET, sparkUILogsOffset);
jupyterShutdownTimerInterval = setStrVar(JUPYTER_SHUTDOWN_TIMER_INTERVAL, jupyterShutdownTimerInterval);
checkNodemanagersStatus = setBoolVar(VARIABLE_CHECK_NODEMANAGERS_STATUS, checkNodemanagersStatus);
populateDelaCache();
populateLDAPCache();
ZOOKEEPER_VERSION = setStrVar(VARIABLE_ZOOKEEPER_VERSION, ZOOKEEPER_VERSION);
GRAFANA_VERSION = setStrVar(VARIABLE_GRAFANA_VERSION, GRAFANA_VERSION);
LOGSTASH_VERSION = setStrVar(VARIABLE_LOGSTASH_VERSION, LOGSTASH_VERSION);
KIBANA_VERSION = setStrVar(VARIABLE_KIBANA_VERSION, KIBANA_VERSION);
FILEBEAT_VERSION = setStrVar(VARIABLE_FILEBEAT_VERSION, FILEBEAT_VERSION);
NDB_VERSION = setStrVar(VARIABLE_NDB_VERSION, NDB_VERSION);
LIVY_VERSION = setStrVar(VARIABLE_LIVY_VERSION, LIVY_VERSION);
HIVE2_VERSION = setStrVar(VARIABLE_HIVE2_VERSION, HIVE2_VERSION);
TEZ_VERSION = setStrVar(VARIABLE_TEZ_VERSION, TEZ_VERSION);
SPARK_VERSION = setStrVar(VARIABLE_SPARK_VERSION, SPARK_VERSION);
FLINK_VERSION = setStrVar(VARIABLE_FLINK_VERSION, FLINK_VERSION);
EPIPE_VERSION = setStrVar(VARIABLE_EPIPE_VERSION, EPIPE_VERSION);
DELA_VERSION = setStrVar(VARIABLE_DELA_VERSION, DELA_VERSION);
KAFKA_VERSION = setStrVar(VARIABLE_KAFKA_VERSION, KAFKA_VERSION);
OPENSEARCH_VERSION = setStrVar(VARIABLE_OPENSEARCH_VERSION, OPENSEARCH_VERSION);
TENSORFLOW_VERSION = setStrVar(VARIABLE_TENSORFLOW_VERSION, TENSORFLOW_VERSION);
KUBE_KSERVE_TENSORFLOW_VERSION = setStrVar(VARIABLE_KUBE_KSERVE_TENSORFLOW_VERSION,
KUBE_KSERVE_TENSORFLOW_VERSION);
HOPSWORKS_VERSION = setStrVar(VARIABLE_HOPSWORKS_VERSION, HOPSWORKS_VERSION);
HOPSWORKS_REST_LOG_LEVEL = setLogLevelVar(VARIABLE_HOPSWORKS_REST_LOG_LEVEL, HOPSWORKS_REST_LOG_LEVEL);
HOPSWORKS_PUBLIC_HOST = setStrVar(VARIABLE_HOPSWORKS_PUBLIC_HOST, HOPSWORKS_PUBLIC_HOST);
PYPI_REST_ENDPOINT = setStrVar(VARIABLE_PYPI_REST_ENDPOINT, PYPI_REST_ENDPOINT);
PYPI_SIMPLE_ENDPOINT = setStrVar(VARIABLE_PYPI_SIMPLE_ENDPOINT, PYPI_SIMPLE_ENDPOINT);
PYPI_INDEXER_TIMER_INTERVAL = setStrVar(VARIABLE_PYPI_INDEXER_TIMER_INTERVAL, PYPI_INDEXER_TIMER_INTERVAL);
PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL = setStrVar(VARIABLE_PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL,
PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL);
PYPI_INDEXER_TIMER_ENABLED = setBoolVar(VARIABLE_PYPI_INDEXER_TIMER_ENABLED, PYPI_INDEXER_TIMER_ENABLED);
IMMUTABLE_PYTHON_LIBRARY_NAMES = toSetFromCsv(
setStrVar(VARIABLE_IMMUTABLE_PYTHON_LIBRARY_NAMES, DEFAULT_IMMUTABLE_PYTHON_LIBRARY_NAMES),
",");
SERVING_MONITOR_INT = setStrVar(VARIABLE_SERVING_MONITOR_INT, SERVING_MONITOR_INT);
SERVING_CONNECTION_POOL_SIZE = setIntVar(VARIABLE_SERVING_CONNECTION_POOL_SIZE,
SERVING_CONNECTION_POOL_SIZE);
SERVING_MAX_ROUTE_CONNECTIONS = setIntVar(VARIABLE_SERVING_MAX_ROUTE_CONNECTIONS,
SERVING_MAX_ROUTE_CONNECTIONS);
TENSORBOARD_MAX_RELOAD_THREADS = setIntVar(VARIABLE_TENSORBOARD_MAX_RELOAD_THREADS,
TENSORBOARD_MAX_RELOAD_THREADS);
KUBE_USER = setStrVar(VARIABLE_KUBE_USER, KUBE_USER);
KUBE_HOPSWORKS_USER = setStrVar(VARIABLE_KUBE_HOPSWORKS_USER, KUBE_HOPSWORKS_USER);
KUBEMASTER_URL = setStrVar(VARIABLE_KUBEMASTER_URL, KUBEMASTER_URL);
KUBE_CA_CERTFILE = setStrVar(VARIABLE_KUBE_CA_CERTFILE, KUBE_CA_CERTFILE);
KUBE_CLIENT_KEYFILE = setStrVar(VARIABLE_KUBE_CLIENT_KEYFILE, KUBE_CLIENT_KEYFILE);
KUBE_CLIENT_CERTFILE = setStrVar(VARIABLE_KUBE_CLIENT_CERTFILE, KUBE_CLIENT_CERTFILE);
KUBE_CLIENT_KEYPASS = setStrVar(VARIABLE_KUBE_CLIENT_KEYPASS, KUBE_CLIENT_KEYPASS);
KUBE_TRUSTSTORE_PATH = setStrVar(VARIABLE_KUBE_TRUSTSTORE_PATH, KUBE_TRUSTSTORE_PATH);
KUBE_TRUSTSTORE_KEY = setStrVar(VARIABLE_KUBE_TRUSTSTORE_KEY, KUBE_TRUSTSTORE_KEY);
KUBE_KEYSTORE_PATH = setStrVar(VARIABLE_KUBE_KEYSTORE_PATH, KUBE_KEYSTORE_PATH);
KUBE_KEYSTORE_KEY = setStrVar(VARIABLE_KUBE_KEYSTORE_KEY, KUBE_KEYSTORE_KEY);
KUBE_PULL_POLICY = setStrVar(VARIABLE_KUBE_PULL_POLICY, KUBE_PULL_POLICY);
KUBE_API_MAX_ATTEMPTS = setIntVar(VARIABLE_KUBE_API_MAX_ATTEMPTS, KUBE_API_MAX_ATTEMPTS);
KUBE_DOCKER_MAX_MEMORY_ALLOCATION = setIntVar(VARIABLE_KUBE_DOCKER_MAX_MEMORY_ALLOCATION,
KUBE_DOCKER_MAX_MEMORY_ALLOCATION);
KUBE_DOCKER_MAX_CORES_ALLOCATION = setDoubleVar(VARIABLE_KUBE_DOCKER_MAX_CORES_ALLOCATION,
KUBE_DOCKER_MAX_CORES_ALLOCATION);
KUBE_DOCKER_MAX_GPUS_ALLOCATION = setIntVar(VARIABLE_KUBE_DOCKER_MAX_GPUS_ALLOCATION,
KUBE_DOCKER_MAX_GPUS_ALLOCATION);
KUBE_INSTALLED = setBoolVar(VARIABLE_KUBE_INSTALLED, KUBE_INSTALLED);
KUBE_KSERVE_INSTALLED = setBoolVar(VARIABLE_KUBE_KSERVE_INSTALLED, KUBE_KSERVE_INSTALLED);
KUBE_SERVING_NODE_LABELS = setStrVar(VARIABLE_KUBE_SERVING_NODE_LABELS, KUBE_SERVING_NODE_LABELS);
KUBE_SERVING_NODE_TOLERATIONS = setStrVar(VARIABLE_KUBE_SERVING_NODE_TOLERATIONS, KUBE_SERVING_NODE_TOLERATIONS);
KUBE_SERVING_MAX_MEMORY_ALLOCATION = setIntVar(VARIABLE_KUBE_SERVING_MAX_MEMORY_ALLOCATION,
KUBE_SERVING_MAX_MEMORY_ALLOCATION);
KUBE_SERVING_MAX_CORES_ALLOCATION = setDoubleVar(VARIABLE_KUBE_SERVING_MAX_CORES_ALLOCATION,
KUBE_SERVING_MAX_CORES_ALLOCATION);
KUBE_SERVING_MAX_GPUS_ALLOCATION = setIntVar(VARIABLE_KUBE_SERVING_MAX_GPUS_ALLOCATION,
KUBE_SERVING_MAX_GPUS_ALLOCATION);
KUBE_SERVING_MAX_NUM_INSTANCES = setIntVar(VARIABLE_KUBE_SERVING_MAX_NUM_INSTANCES,
KUBE_SERVING_MAX_NUM_INSTANCES);
KUBE_SERVING_MIN_NUM_INSTANCES = setIntVar(VARIABLE_KUBE_SERVING_MIN_NUM_INSTANCES,
KUBE_SERVING_MIN_NUM_INSTANCES);
KUBE_KNATIVE_DOMAIN_NAME = setStrVar(VARIABLE_KUBE_KNATIVE_DOMAIN_NAME, KUBE_KNATIVE_DOMAIN_NAME);
KUBE_TAINTED_NODES = setStrVar(VARIABLE_KUBE_TAINTED_NODES, KUBE_TAINTED_NODES);
KUBE_TAINTED_NODES_MONITOR_INTERVAL = setStrVar(VARIABLE_KUBE_TAINTED_NODES_MONITOR_INTERVAL,
KUBE_TAINTED_NODES_MONITOR_INTERVAL);
HOPSWORKS_ENTERPRISE = setBoolVar(VARIABLE_HOPSWORKS_ENTERPRISE, HOPSWORKS_ENTERPRISE);
JUPYTER_HOST = setStrVar(VARIABLE_JUPYTER_HOST, JUPYTER_HOST);
JWT_SIGNATURE_ALGORITHM = setStrVar(VARIABLE_JWT_SIGNATURE_ALGORITHM, JWT_SIGNATURE_ALGORITHM);
JWT_LIFETIME_MS = setLongVar(VARIABLE_JWT_LIFETIME_MS, JWT_LIFETIME_MS);
JWT_EXP_LEEWAY_SEC = setIntVar(VARIABLE_JWT_EXP_LEEWAY_SEC, JWT_EXP_LEEWAY_SEC);
JWT_SIGNING_KEY_NAME = setStrVar(VARIABLE_JWT_SIGNING_KEY_NAME, JWT_SIGNING_KEY_NAME);
JWT_ISSUER = setStrVar(VARIABLE_JWT_ISSUER_KEY, JWT_ISSUER);
SERVICE_JWT_LIFETIME_MS = setLongVar(VARIABLE_SERVICE_JWT_LIFETIME_MS, SERVICE_JWT_LIFETIME_MS);
SERVICE_JWT_EXP_LEEWAY_SEC = setIntVar(VARIABLE_SERVICE_JWT_EXP_LEEWAY_SEC, SERVICE_JWT_EXP_LEEWAY_SEC);
populateServiceJWTCache();
CONNECTION_KEEPALIVE_TIMEOUT = setIntVar(VARIABLE_CONNECTION_KEEPALIVE_TIMEOUT, CONNECTION_KEEPALIVE_TIMEOUT);
FEATURESTORE_DB_DEFAULT_QUOTA = setLongVar(VARIABLE_FEATURESTORE_DEFAULT_QUOTA, FEATURESTORE_DB_DEFAULT_QUOTA);
FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT =
setStrVar(VARIABLE_FEATURESTORE_DEFAULT_STORAGE_FORMAT, FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT);
FEATURESTORE_JDBC_URL = setStrVar(VARIABLE_FEATURESTORE_JDBC_URL, FEATURESTORE_JDBC_URL);
ONLINE_FEATURESTORE = setBoolVar(VARIABLE_ONLINE_FEATURESTORE, ONLINE_FEATURESTORE);
ONLINE_FEATURESTORE_TS = setStrVar(VARIABLE_ONLINE_FEATURESTORE_TS, ONLINE_FEATURESTORE_TS);
FS_JOB_ACTIVITY_TIME = setStrVar(VARIABLE_FS_JOB_ACTIVITY_TIME, FS_JOB_ACTIVITY_TIME);
ONLINEFS_THREAD_NUMBER = setIntVar(VARIABLE_ONLINEFS_THREAD_NUMBER, ONLINEFS_THREAD_NUMBER);
KIBANA_HTTPS_ENABELED = setBoolVar(VARIABLE_KIBANA_HTTPS_ENABLED,
KIBANA_HTTPS_ENABELED);
KIBANA_MULTI_TENANCY_ENABELED = setBoolVar(VARIABLE_KIBANA_MULTI_TENANCY_ENABLED,
KIBANA_MULTI_TENANCY_ENABELED);
RESERVED_PROJECT_NAMES =
setStringHashSetLowerCase(VARIABLE_RESERVED_PROJECT_NAMES, DEFAULT_RESERVED_PROJECT_NAMES, ",");
CLOUD_EVENTS_ENDPOINT = setStrVar(VARIABLE_CLOUD_EVENTS_ENDPOINT,
CLOUD_EVENTS_ENDPOINT);
CLOUD_EVENTS_ENDPOINT_API_KEY =
setStrVar(VARIABLE_CLOUD_EVENTS_ENDPOINT_API_KEY, CLOUD_EVENTS_ENDPOINT_API_KEY);
FG_PREVIEW_LIMIT = setIntVar(VARIABLE_FG_PREVIEW_LIMIT, FG_PREVIEW_LIMIT);
HIVE_CONF_PATH = setStrVar(VARIABLE_HIVE_CONF_PATH, HIVE_CONF_PATH);
FS_PY_JOB_UTIL_PATH = setStrVar(VARIABLE_FS_PY_JOB_UTIL_PATH, FS_PY_JOB_UTIL_PATH);
FS_JAVA_JOB_UTIL_PATH = setStrVar(VARIABLE_FS_JAVA_JOB_UTIL_PATH, FS_JAVA_JOB_UTIL_PATH);
YARN_RUNTIME = setStrVar(VARIABLE_YARN_RUNTIME, YARN_RUNTIME);
DOCKER_MOUNTS = setStrVar(VARIABLE_DOCKER_MOUNTS, DOCKER_MOUNTS);
DOCKER_JOB_MOUNTS_LIST = setStrVar(VARIABLE_DOCKER_JOB_MOUNTS_LIST, DOCKER_JOB_MOUNTS_LIST);
DOCKER_JOB_MOUNT_ALLOWED = setBoolVar(VARIABLE_DOCKER_JOB_MOUNT_ALLOWED, DOCKER_JOB_MOUNT_ALLOWED);
DOCKER_JOB_UID_STRICT = setBoolVar(VARIABLE_DOCKER_JOB_UID_STRICT, DOCKER_JOB_UID_STRICT);
DOCKER_BASE_IMAGE_PYTHON_NAME = setStrVar(VARIABLE_DOCKER_BASE_IMAGE_PYTHON_NAME, DOCKER_BASE_IMAGE_PYTHON_NAME);
DOCKER_BASE_IMAGE_PYTHON_VERSION = setStrVar(VARIABLE_DOCKER_BASE_IMAGE_PYTHON_VERSION,
DOCKER_BASE_IMAGE_PYTHON_VERSION);
// Job executions cleaner variables
EXECUTIONS_PER_JOB_LIMIT = setIntVar(VARIABLE_EXECUTIONS_PER_JOB_LIMIT, EXECUTIONS_PER_JOB_LIMIT);
EXECUTIONS_CLEANER_BATCH_SIZE = setIntVar(VARIABLE_EXECUTIONS_CLEANER_BATCH_SIZE, EXECUTIONS_CLEANER_BATCH_SIZE);
EXECUTIONS_CLEANER_INTERVAL_MS = setIntVar(VARIABLE_EXECUTIONS_CLEANER_INTERVAL_MS,
EXECUTIONS_CLEANER_INTERVAL_MS);
YARN_APP_UID = setLongVar(VARIABLE_YARN_APP_UID, YARN_APP_UID);
populateProvenanceCache();
CLIENT_PATH = setStrVar(VARIABLE_CLIENT_PATH, CLIENT_PATH);
KUBE_TYPE = KubeType.fromString(setStrVar(VARIABLE_KUBE_TYPE, KUBE_TYPE.name));
DOCKER_NAMESPACE = setStrVar(VARIABLE_DOCKER_NAMESPACE, DOCKER_NAMESPACE);
MANAGED_DOCKER_REGISTRY = setBoolVar(VARIABLE_MANAGED_DOCKER_REGISTRY,
MANAGED_DOCKER_REGISTRY);
MAX_ENV_YML_BYTE_SIZE = setIntVar(VARIABLE_MAX_ENV_YML_BYTE_SIZE, MAX_ENV_YML_BYTE_SIZE);
SPARK_EXECUTOR_MIN_MEMORY = setIntVar(VARIABLE_SPARK_EXECUTOR_MIN_MEMORY, SPARK_EXECUTOR_MIN_MEMORY);
LIVY_STARTUP_TIMEOUT = setIntVar(VARIABLE_LIVY_STARTUP_TIMEOUT, LIVY_STARTUP_TIMEOUT);
USER_SEARCH_ENABLED = setBoolVar(VARIABLE_USER_SEARCH, USER_SEARCH_ENABLED);
REJECT_REMOTE_USER_NO_GROUP = setBoolVar(VARIABLE_REJECT_REMOTE_USER_NO_GROUP, REJECT_REMOTE_USER_NO_GROUP);
//Git
GIT_MAX_COMMAND_TIMEOUT_MINUTES = setIntVar(VARIABLE_GIT_COMMAND_TIMEOUT_MINUTES_DEFAULT,
GIT_MAX_COMMAND_TIMEOUT_MINUTES);
DOCKER_CGROUP_ENABLED = setBoolVar(VARIABLE_DOCKER_CGROUP_ENABLED, DOCKER_CGROUP_ENABLED);
DOCKER_CGROUP_MEMORY_LIMIT = setStrVar(VARIABLE_DOCKER_CGROUP_HARD_LIMIT_MEMORY,
DOCKER_CGROUP_MEMORY_LIMIT);
DOCKER_CGROUP_MEMORY_SOFT_LIMIT = setStrVar(VARIABLE_DOCKER_CGROUP_SOFT_LIMIT_MEMORY,
DOCKER_CGROUP_MEMORY_SOFT_LIMIT);
DOCKER_CGROUP_CPU_QUOTA = setDoubleVar(VARIABLE_DOCKER_CGROUP_CPU_QUOTA, DOCKER_CGROUP_CPU_QUOTA);
DOCKER_CGROUP_CPU_PERIOD = setIntVar(VARIABLE_DOCKER_CGROUP_CPU_PERIOD, DOCKER_CGROUP_CPU_PERIOD);
DOCKER_CGROUP_MONITOR_INTERVAL = setStrVar(VARIABLE_DOCKER_CGROUP_MONITOR_INTERVAL,
DOCKER_CGROUP_MONITOR_INTERVAL);
PROMETHEUS_PORT = setIntVar(VARIABLE_PROMETHEUS_PORT, PROMETHEUS_PORT);
SKIP_NAMESPACE_CREATION = setBoolVar(VARIABLE_SKIP_NAMESPACE_CREATION,
SKIP_NAMESPACE_CREATION);
QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = setLongVar(VARIABLE_QUOTAS_ONLINE_ENABLED_FEATUREGROUPS,
QUOTAS_ONLINE_ENABLED_FEATUREGROUPS);
QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = setLongVar(VARIABLE_QUOTAS_ONLINE_DISABLED_FEATUREGROUPS,
QUOTAS_ONLINE_DISABLED_FEATUREGROUPS);
QUOTAS_TRAINING_DATASETS = setLongVar(VARIABLE_QUOTAS_TRAINING_DATASETS, QUOTAS_TRAINING_DATASETS);
QUOTAS_RUNNING_MODEL_DEPLOYMENTS = setLongVar(VARIABLE_QUOTAS_RUNNING_MODEL_DEPLOYMENTS,
QUOTAS_RUNNING_MODEL_DEPLOYMENTS);
QUOTAS_TOTAL_MODEL_DEPLOYMENTS = setLongVar(VARIABLE_QUOTAS_TOTAL_MODEL_DEPLOYMENTS,
QUOTAS_TOTAL_MODEL_DEPLOYMENTS);
QUOTAS_MAX_PARALLEL_EXECUTIONS = setLongVar(VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS,
QUOTAS_MAX_PARALLEL_EXECUTIONS);
QUOTAS_MAX_PARALLEL_EXECUTIONS = setLongVar(VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS,
QUOTAS_MAX_PARALLEL_EXECUTIONS);
cached = true;
}
}
private void checkCache() {
if (!cached) {
populateCache();
}
}
public synchronized void refreshCache() {
cached = false;
populateCache();
}
public synchronized void updateVariable(String variableName, String variableValue, VariablesVisibility visibility) {
updateVariableInternal(variableName, variableValue, visibility);
refreshCache();
}
public synchronized void updateVariables(List<Variables> variablesToUpdate) {
variablesToUpdate.forEach(v -> updateVariableInternal(v.getId(), v.getValue(), v.getVisibility()));
refreshCache();
}
/**
* This method will invalidate the cache of variables. The next call to read a variable after invalidateCache() will
* trigger a read of all variables from the database.
*/
public synchronized void invalidateCache() {
cached = false;
}
/**
* ******************************************************************
*/
private static final String GLASSFISH_DIR = "/srv/hops/glassfish";
public synchronized String getGlassfishDir() {
return GLASSFISH_DIR;
}
private String TWOFACTOR_AUTH = "false";
private String TWOFACTOR_EXCLUDE = "AGENT;CLUSTER_AGENT";
public synchronized String getTwoFactorAuth() {
checkCache();
return TWOFACTOR_AUTH;
}
public synchronized String getTwoFactorExclude() {
checkCache();
return TWOFACTOR_EXCLUDE;
}
public enum TwoFactorMode {
MANDATORY("mandatory", "User can not disable two factor auth."),
OPTIONAL("true", "Users can choose to disable two factor auth.");
private final String name;
private final String description;
TwoFactorMode(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
private String HOPS_RPC_TLS = "false";
public synchronized boolean getHopsRpcTls() {
checkCache();
return HOPS_RPC_TLS.toLowerCase().equals("true");
}
//Spark executor minimum memory
public synchronized int getSparkExecutorMinMemory() {
checkCache();
return SPARK_EXECUTOR_MIN_MEMORY;
}
/**
* Default Directory locations
*/
public static final String PRIVATE_DIRS = "/private_dirs/";
public static final String TENSORBOARD_DIRS = "/tensorboard/";
private String SPARK_DIR = "/srv/hops/spark";
public static final String SPARK_EXAMPLES_DIR = "/examples/jars";
public static final String CONVERSION_DIR = "/ipython_conversions/";
public static final String SPARK_NUMBER_EXECUTORS_ENV
= "spark.executor.instances";
public static final String SPARK_DYNAMIC_ALLOC_ENV
= "spark.dynamicAllocation.enabled";
public static final String SPARK_DYNAMIC_ALLOC_MIN_EXECS_ENV
= "spark.dynamicAllocation.minExecutors";
public static final String SPARK_DYNAMIC_ALLOC_MAX_EXECS_ENV
= "spark.dynamicAllocation.maxExecutors";
public static final String SPARK_DYNAMIC_ALLOC_INIT_EXECS_ENV
= "spark.dynamicAllocation.initialExecutors";
public static final String SPARK_SHUFFLE_SERVICE
= "spark.shuffle.service.enabled";
public static final String SPARK_SUBMIT_DEPLOYMODE = "spark.submit.deployMode";
public static final String SPARK_DRIVER_MEMORY_ENV = "spark.driver.memory";
public static final String SPARK_DRIVER_CORES_ENV = "spark.driver.cores";
public static final String SPARK_DRIVER_EXTRACLASSPATH = "spark.driver.extraClassPath";
public static final String SPARK_EXECUTOR_MEMORY_ENV = "spark.executor.memory";
public static final String SPARK_EXECUTOR_CORES_ENV = "spark.executor.cores";
public static final String SPARK_EXECUTOR_GPU_AMOUNT = "spark.executor.resource.gpu.amount";
public static final String SPARK_TASK_RESOURCE_GPU_AMOUNT = "spark.task.resource.gpu.amount";
public static final String SPARK_EXECUTOR_RESOURCE_GPU_DISCOVERY_SCRIPT =
"spark.executor.resource.gpu.discoveryScript";
public static final String SPARK_EXECUTOR_EXTRACLASSPATH = "spark.executor.extraClassPath";
public static final String SPARK_DRIVER_STAGINGDIR_ENV = "spark.yarn.stagingDir";
public static final String SPARK_JAVA_LIBRARY_PROP = "java.library.path";
public static final String SPARK_EXECUTOR_EXTRA_JAVA_OPTS = "spark.executor.extraJavaOptions";
public static final String SPARK_DRIVER_EXTRA_JAVA_OPTIONS="spark.driver.extraJavaOptions";
public static final String SPARK_YARN_DIST_PYFILES = "spark.yarn.dist.pyFiles";
public static final String SPARK_YARN_DIST_FILES = "spark.yarn.dist.files";
public static final String SPARK_YARN_DIST_ARCHIVES = "spark.yarn.dist.archives";
public static final String SPARK_YARN_JARS = "spark.yarn.jars";
//Blacklisting properties
public static final String SPARK_BLACKLIST_ENABLED = "spark.blacklist.enabled";
public static final String SPARK_BLACKLIST_MAX_TASK_ATTEMPTS_PER_EXECUTOR =
"spark.blacklist.task.maxTaskAttemptsPerExecutor";
public static final String SPARK_BLACKLIST_MAX_TASK_ATTEMPTS_PER_NODE =
"spark.blacklist.task.maxTaskAttemptsPerNode";
public static final String SPARK_BLACKLIST_STAGE_MAX_FAILED_TASKS_PER_EXECUTOR =
"spark.blacklist.stage.maxFailedTasksPerExecutor";
public static final String SPARK_BLACKLIST_STAGE_MAX_FAILED_TASKS_PER_NODE =
"spark.blacklist.stage.maxFailedExecutorsPerNode";
public static final String SPARK_BLACKLIST_APPLICATION_MAX_FAILED_TASKS_PER_EXECUTOR =
"spark.blacklist.application.maxFailedTasksPerExecutor";
public static final String SPARK_BLACKLIST_APPLICATION_MAX_FAILED_TASKS_PER_NODE =
"spark.blacklist.application.maxFailedExecutorsPerNode";
public static final String SPARK_BLACKLIST_KILL_BLACKLISTED_EXECUTORS =
"spark.blacklist.killBlacklistedExecutors";
public static final String SPARK_TASK_MAX_FAILURES = "spark.task.maxFailures";
//PySpark properties
public static final String SPARK_APP_NAME_ENV = "spark.app.name";
public static final String SPARK_YARN_IS_PYTHON_ENV = "spark.yarn.isPython";
public static final String SPARK_PYSPARK_PYTHON = "PYSPARK_PYTHON";
public static final String SPARK_PYSPARK_PYTHON_OPTION = "spark.pyspark.python";
//Spark log4j and metrics properties
public static final String JOB_LOG4J_CONFIG = "log4j.configuration";
public static final String JOB_LOG4J_PROPERTIES = "log4j.properties";
//If the value of this property changes, it must be changed in spark-chef log4j.properties as well
public static final String LOGSTASH_JOB_INFO = "hopsworks.logstash.job.info";
public static final String SPARK_CACHE_FILENAMES
= "spark.yarn.cache.filenames";
public static final String SPARK_CACHE_SIZES = "spark.yarn.cache.sizes";
public static final String SPARK_CACHE_TIMESTAMPS
= "spark.yarn.cache.timestamps";
public static final String SPARK_CACHE_VISIBILITIES
= "spark.yarn.cache.visibilities";
public static final String SPARK_CACHE_TYPES = "spark.yarn.cache.types";
//PYSPARK constants
public static final String SPARK_PY_MAINCLASS
= "org.apache.spark.deploy.PythonRunner";
public static final long PYTHON_JOB_KUBE_WAITING_TIMEOUT_MS = 60000;
public static final String SPARK_YARN_APPMASTER_ENV = "spark.yarn.appMasterEnv.";
public static final String SPARK_EXECUTOR_ENV = "spark.executorEnv.";
public static final String SPARK_YARN_APPMASTER_SPARK_USER = SPARK_YARN_APPMASTER_ENV + "SPARK_USER";
public static final String SPARK_YARN_APPMASTER_YARN_MODE = SPARK_YARN_APPMASTER_ENV + "SPARK_YARN_MODE";
public static final String SPARK_YARN_APPMASTER_YARN_STAGING_DIR = SPARK_YARN_APPMASTER_ENV
+ "SPARK_YARN_STAGING_DIR";
public static final String SPARK_YARN_APPMASTER_CUDA_DEVICES = SPARK_YARN_APPMASTER_ENV + "CUDA_VISIBLE_DEVICES";
public static final String SPARK_YARN_APPMASTER_HIP_DEVICES = SPARK_YARN_APPMASTER_ENV + "HIP_VISIBLE_DEVICES";
public static final String SPARK_YARN_APPMASTER_ENV_EXECUTOR_GPUS = SPARK_YARN_APPMASTER_ENV + "EXECUTOR_GPUS";
public static final String SPARK_YARN_APPMASTER_LIBHDFS_OPTS = SPARK_YARN_APPMASTER_ENV + "LIBHDFS_OPTS";
public static final String SPARK_YARN_APPMASTER_IS_DRIVER = SPARK_YARN_APPMASTER_ENV + "IS_HOPS_DRIVER";
public static final String SPARK_EXECUTOR_SPARK_USER = SPARK_EXECUTOR_ENV + "SPARK_USER";
public static final String SPARK_EXECUTOR_ENV_EXECUTOR_GPUS = SPARK_EXECUTOR_ENV + "EXECUTOR_GPUS";
public static final String SPARK_EXECUTOR_LIBHDFS_OPTS = SPARK_EXECUTOR_ENV + "LIBHDFS_OPTS";
//docker
public static final String SPARK_YARN_APPMASTER_CONTAINER_RUNTIME = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_TYPE";
public static final String SPARK_YARN_APPMASTER_DOCKER_IMAGE = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_DOCKER_IMAGE";
public static final String SPARK_YARN_APPMASTER_DOCKER_MOUNTS = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS";
public static final String SPARK_EXECUTOR_CONTAINER_RUNTIME = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_TYPE";
public static final String SPARK_EXECUTOR_DOCKER_IMAGE = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_DOCKER_IMAGE";
public static final String SPARK_EXECUTOR_DOCKER_MOUNTS = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS";
public static final String SPARK_HADOOP_FS_PERMISSIONS_UMASK = "spark.hadoop.fs.permissions.umask-mode";
public static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString";
public static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split()
//nccl
public static final String NCCL_SOCKET_NTHREADS = "NCCL_SOCKET_NTHREADS";
public static final String NCCL_NSOCKS_PERTHREAD = "NCCL_NSOCKS_PERTHREAD";
//Hive config
public static final String HIVE_SITE = "hive-site.xml";
public synchronized String getSparkDir() {
checkCache();
return SPARK_DIR;
}
public synchronized String getSparkConfDir() {
return getSparkDir() + "/conf";
}
// "/tmp" by default
private String STAGING_DIR = "/srv/hops/domains/domain1/staging";
public synchronized String getStagingDir() {
checkCache();
return STAGING_DIR;
}
private final String FLINK_CONF_DIR = "conf";
// Remember to change this in docker-images as well
private String FLINK_DIR = "/srv/hops/flink";
public synchronized String getFlinkDir() {
checkCache();
return FLINK_DIR;
}
public String getFlinkConfDir() {
String flinkDir = getFlinkDir();
return flinkDir + File.separator + FLINK_CONF_DIR + File.separator;
}
private final String FLINK_LIB_DIR = "lib";
public String getFlinkLibDir() {
String flinkDir = getFlinkDir();
return flinkDir + File.separator + FLINK_LIB_DIR + File.separator;
}
private final String FLINK_CONF_FILE = "flink-conf.yaml";
public String getFlinkConfFile() {
return getFlinkConfDir() + File.separator + FLINK_CONF_FILE;
}
private String AIRFLOW_DIR = "/srv/hops/airflow";
public synchronized String getAirflowDir() {
checkCache();
return AIRFLOW_DIR;
}
private String HADOOP_DIR = "/srv/hops/hadoop";
// This returns the unversioned base installation directory for hops-hadoop
// For example, "/srv/hops/hadoop" - it does not return "/srv/hops/hadoop-2.8.2"
public synchronized String getHadoopSymbolicLinkDir() {
checkCache();
return HADOOP_DIR;
}
public synchronized String getHadoopVersionedDir() {
checkCache();
return HADOOP_DIR + "-" + getHadoopVersion();
}
private String HIVE_SUPERUSER = "hive";
public synchronized String getHiveSuperUser() {
checkCache();
return HIVE_SUPERUSER;
}
private String ANACONDA_DEFAULT_REPO = "defaults";
public synchronized String getCondaDefaultRepo() {
checkCache();
return ANACONDA_DEFAULT_REPO;
}
private String HIVE_WAREHOUSE = "/apps/hive/warehouse";
public synchronized String getHiveWarehouse() {
checkCache();
return HIVE_WAREHOUSE;
}
private String HIVE_SCRATCHDIR = "/tmp/hive";
public synchronized String getHiveScratchdir() {
checkCache();
return HIVE_SCRATCHDIR;
}
private String HIVE_SCRATCHDIR_DELAY = "7d";
public synchronized String getHiveScratchdirDelay() {
checkCache();
return HIVE_SCRATCHDIR_DELAY;
}
private String HIVE_SCRATCHDIR_CLEANER_INTERVAL = "24h";
public synchronized String getHiveScratchdirCleanerInterval() {
checkCache();
return HIVE_SCRATCHDIR_CLEANER_INTERVAL;
}
private long HIVE_DB_DEFAULT_QUOTA = -1;
public synchronized long getHiveDbDefaultQuota() {
checkCache();
return HIVE_DB_DEFAULT_QUOTA;
}
private String CERTS_DIR = "/srv/hops/certs-dir";
public synchronized String getCertsDir() {
checkCache();
return CERTS_DIR;
}
public synchronized String getHopsworksMasterEncPasswordFile() {
checkCache();
return getCertsDir() + File.separator + "encryption_master_password";
}
private String HOPSWORKS_INSTALL_DIR = "/srv/hops/domains/domain1";
public synchronized String getHopsworksDomainDir() {
checkCache();
return HOPSWORKS_INSTALL_DIR;
}
private String SUDOERS_DIR = "/srv/hops/sbin";
public synchronized String getSudoersDir() {
checkCache();
return SUDOERS_DIR;
}
private String HOPSWORKS_USER = "glassfish";
public synchronized String getHopsworksUser() {
checkCache();
return HOPSWORKS_USER;
}
private String HDFS_SUPERUSER = "hdfs";
public synchronized String getHdfsSuperUser() {
checkCache();
return HDFS_SUPERUSER;
}
private String SPARK_USER = "spark";
public synchronized String getSparkUser() {
checkCache();
return SPARK_USER;
}
private String FLINK_USER = "flink";
public synchronized String getFlinkUser() {
checkCache();
return FLINK_USER;
}
private Integer YARN_DEFAULT_QUOTA = 60000;
public synchronized Integer getYarnDefaultQuota() {
checkCache();
return YARN_DEFAULT_QUOTA;
}
private PaymentType DEFAULT_PAYMENT_TYPE = PaymentType.NOLIMIT;
public synchronized PaymentType getDefaultPaymentType() {
checkCache();
return DEFAULT_PAYMENT_TYPE;
}
private long HDFS_DEFAULT_QUOTA_MBs = -1;
public synchronized long getHdfsDefaultQuotaInMBs() {
checkCache();
return HDFS_DEFAULT_QUOTA_MBs;
}
// Set the DIR_ROOT (/Projects) to have DB storage policy, i.e. - small files stored on db
private DistributedFileSystemOps.StoragePolicy HDFS_BASE_STORAGE_POLICY
= DistributedFileSystemOps.StoragePolicy.SMALL_FILES;
// To not fill the SSDs with Logs files that nobody access frequently
// We set the StoragePolicy for the LOGS dir to be DEFAULT
private DistributedFileSystemOps.StoragePolicy HDFS_LOG_STORAGE_POLICY
= DistributedFileSystemOps.StoragePolicy.DEFAULT;
private DistributedFileSystemOps.StoragePolicy setHdfsStoragePolicy(String policyName,
DistributedFileSystemOps.StoragePolicy defaultPolicy) {
Optional<Variables> policyOptional = findById(policyName);
if (!policyOptional.isPresent()) {
return defaultPolicy;
}
String existingPolicy = policyOptional.get().getValue();
if (!Strings.isNullOrEmpty(existingPolicy)) {
try {
return DistributedFileSystemOps.StoragePolicy.fromPolicy(existingPolicy);
} catch (IllegalArgumentException ex) {
LOGGER.warning("Error - not a valid storage policy! Value was:" + existingPolicy);
return defaultPolicy;
}
} else {
return defaultPolicy;
}
}
public synchronized DistributedFileSystemOps.StoragePolicy getHdfsBaseStoragePolicy() {
checkCache();
return HDFS_BASE_STORAGE_POLICY;
}
public synchronized DistributedFileSystemOps.StoragePolicy getHdfsLogStoragePolicy() {
checkCache();
return HDFS_LOG_STORAGE_POLICY;
}
private String AIRFLOW_WEB_UI_IP = "127.0.0.1";
private int AIRFLOW_WEB_UI_PORT = 12358;
public synchronized String getAirflowWebUIAddress() {
checkCache();
return AIRFLOW_WEB_UI_IP + ":" + AIRFLOW_WEB_UI_PORT + "/hopsworks-api/airflow";
}
private Integer MAX_NUM_PROJ_PER_USER = 5;
public synchronized Integer getMaxNumProjPerUser() {
checkCache();
return MAX_NUM_PROJ_PER_USER;
}
private String HADOOP_VERSION = "2.8.2";
public synchronized String getHadoopVersion() {
checkCache();
return HADOOP_VERSION;
}
//Hadoop locations
public synchronized String getHadoopConfDir() {
return hadoopConfDir(getHadoopSymbolicLinkDir());
}
private String hadoopConfDir(String hadoopDir) {
return hadoopDir + "/" + HADOOP_CONF_RELATIVE_DIR;
}
public String getHadoopConfDir(String hadoopDir) {
return hadoopConfDir(hadoopDir);
}
public synchronized String getYarnConfDir() {
return getHadoopConfDir();
}
public String getYarnConfDir(String hadoopDir) {
return hadoopConfDir(hadoopDir);
}
public String getHopsLeaderElectionJarPath() {
return getHadoopSymbolicLinkDir() + "/share/hadoop/hdfs/lib/hops-leader-election-" + getHadoopVersion() + ".jar";
}
//Default configuration file names
public static final String DEFAULT_YARN_CONFFILE_NAME = "yarn-site.xml";
public static final String DEFAULT_HADOOP_CONFFILE_NAME = "core-site.xml";
private static final String DEFAULT_HDFS_CONFFILE_NAME = "hdfs-site.xml";
public static final String DEFAULT_SPARK_CONFFILE_NAME = "spark-defaults.conf";
//Environment variable keys
//TODO: Check if ENV_KEY_YARN_CONF_DIR should be replaced with ENV_KEY_YARN_CONF
private static final String ENV_KEY_YARN_CONF_DIR = "hdfs";
public static final String ENV_KEY_HADOOP_CONF_DIR = "HADOOP_CONF_DIR";
public static final String ENV_KEY_YARN_CONF = "YARN_CONF_DIR";
public static final String ENV_KEY_SPARK_CONF_DIR = "SPARK_CONF_DIR";
//YARN constants
public static final int YARN_DEFAULT_APP_MASTER_MEMORY = 2048;
public static final String HADOOP_COMMON_HOME_KEY = "HADOOP_COMMON_HOME";
public static final String HADOOP_HOME_KEY = "HADOOP_HOME";
public static final String HADOOP_HDFS_HOME_KEY = "HADOOP_HDFS_HOME";
public static final String HADOOP_YARN_HOME_KEY = "HADOOP_YARN_HOME";
public static final String HADOOP_CONF_DIR_KEY = "HADOOP_CONF_DIR";
private static final String HADOOP_CONF_RELATIVE_DIR = "etc/hadoop";
public static final String SPARK_CONF_RELATIVE_DIR = "conf";
public static final String YARN_CONF_RELATIVE_DIR = HADOOP_CONF_RELATIVE_DIR;
//Spark constants
// Subdirectory where Spark libraries will be placed.
public static final String SPARK_LOCALIZED_LIB_DIR = "__spark_libs__";
public static final String SPARK_LOCALIZED_CONF_DIR = "__spark_conf__";
public static final String SPARK_LOCALIZED_PYTHON_DIR = "__pyfiles__";
public static final String SPARK_LOCRSC_APP_JAR = "__app__.jar";
public static final String HOPS_EXPERIMENTS_DATASET = "Experiments";
public static final String HOPS_MODELS_DATASET = "Models";
public static final String HOPS_MODELS_SCHEMA = "model_schema.json";
public static final String HOPS_MODELS_INPUT_EXAMPLE = "input_example.json";
public static final String HOPS_TOUR_DATASET = "TestJob";
public static final String HOPS_DL_TOUR_DATASET = "TourData";
public static final String HOPS_TOUR_DATASET_JUPYTER = "Jupyter";
// Distribution-defined classpath to add to processes
public static final String SPARK_AM_MAIN
= "org.apache.spark.deploy.yarn.ApplicationMaster";
public static final String SPARK_CONFIG_FILE = "conf/spark-defaults.conf";
public static final String SPARK_BLACKLISTED_PROPS
= "conf/spark-blacklisted-properties.txt";
public static final int SPARK_MIN_EXECS = 1;
public static final int SPARK_MAX_EXECS = 2;
public static final String SPARK_HADOOP_FS_PERMISSIONS_UMASK_DEFAULT = "0007";
// Spark executor min memory
private int SPARK_EXECUTOR_MIN_MEMORY = 1024;
//Flink constants
public static final String FLINK_LOCRSC_FLINK_JAR = "flink.jar";
public static final int FLINK_APP_MASTER_MEMORY = 768;
public static final String HOPS_DEEP_LEARNING_TOUR_DATA = "tensorflow_demo/data";
public static final String HOPS_DEEP_LEARNING_TOUR_NOTEBOOKS = "tensorflow_demo/notebooks";
public static final String FLINK_AM_MAIN = "org.apache.flink.yarn.ApplicationMaster";
public static final String FLINK_ENV_JAVA_OPTS = "env.java.opts";
public static final String FLINK_ENV_JAVA_OPTS_JOBMANAGER = "env.java.opts.jobmanager";
public static final String FLINK_ENV_JAVA_OPTS_TASKMANAGER = "env.java.opts.taskmanager";
public static final String FLINK_STATE_CHECKPOINTS_DIR = "state.checkpoints.dir";
public static final String FLINK_WEB_UPLOAD_DIR = "web.upload.dir";
//Featurestore constants
public static final String HOPS_FEATURESTORE_TOUR_DATA = "featurestore_demo";
public static final String HOPS_FEATURESTORE_TOUR_JOB_CLASS = "io.hops.examples.featurestore_tour.Main";
public static final String HOPS_FEATURESTORE_TOUR_JOB_NAME = "featurestore_tour_job";
public static final String HOPS_FEATURESTORE_TOUR_JOB_INPUT_PARAM = "--input ";
public static final String HSFS_UTIL_MAIN_CLASS = "com.logicalclocks.utils.MainClass";
//Serving constants
public static final String INFERENCE_SCHEMANAME = "inferenceschema";
public static final int INFERENCE_SCHEMAVERSION = 3;
//Kafka constants
public static final String PROJECT_COMPATIBILITY_SUBJECT = "projectcompatibility";
public static final Set<String> KAFKA_SUBJECT_BLACKLIST =
new HashSet<>(Arrays.asList(INFERENCE_SCHEMANAME, PROJECT_COMPATIBILITY_SUBJECT));
public synchronized String getLocalFlinkJarPath() {
return getFlinkDir() + "/flink.jar";
}
public synchronized String getFlinkJarPath() {
return hdfsFlinkJarPath(getFlinkUser());
}
private String hdfsFlinkJarPath(String flinkUser) {
return "hdfs:///user/" + flinkUser + "/flink.jar";
}
public synchronized String getFlinkDefaultClasspath() {
return flinkDefaultClasspath(getFlinkDir());
}
private String flinkDefaultClasspath(String flinkDir) {
return flinkDir + "/lib/*";
}
public String getFlinkDefaultClasspath(String flinkDir) {
return flinkDefaultClasspath(flinkDir);
}
public String getSparkLog4JPath() {
return "hdfs:///user/" + getSparkUser() + "/log4j.properties";
}
public synchronized String getSparkDefaultClasspath() {
return sparkDefaultClasspath(getSparkDir());
}
private String sparkDefaultClasspath(String sparkDir) {
return sparkDir + "/lib/*";
}
private static final String HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY = "HADOOP_GLOB";
private volatile String HADOOP_CLASSPATH_GLOB = null;
public String getHadoopClasspathGlob() throws IOException {
if (HADOOP_CLASSPATH_GLOB == null) {
synchronized (Settings.class) {
if (HADOOP_CLASSPATH_GLOB == null) {
String classpathGlob = System.getenv(HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY);
if (classpathGlob == null) {
LOGGER.log(Level.WARNING, HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY + " environment variable is not set. "
+ "Launching a subprocess to discover it");
String bin = Paths.get(getHadoopSymbolicLinkDir(), "bin", "hadoop").toString();
ProcessDescriptor processDescriptor = new ProcessDescriptor.Builder()
.addCommand(bin)
.addCommand("classpath")
.addCommand("--glob")
.build();
ProcessResult result = osProcessExecutor.execute(processDescriptor);
if (result.getExitCode() != 0) {
throw new IOException("Could not get Hadoop classpath, exit code " + result.getExitCode()
+ " Error: " + result.getStderr());
}
HADOOP_CLASSPATH_GLOB = result.getStdout();
} else {
HADOOP_CLASSPATH_GLOB = classpathGlob;
}
}
}
}
return HADOOP_CLASSPATH_GLOB;
}
/**
* Static final fields are allowed in session beans:
* http://stackoverflow.com/questions/9141673/static-variables-restriction-in-session-beans
*/
//Directory names in HDFS
public static final String DIR_ROOT = "Projects";
public static final String DIR_META_TEMPLATES = Path.SEPARATOR + "user" + Path.SEPARATOR + "metadata"
+ Path.SEPARATOR + "uploads" + Path.SEPARATOR;
public static final String PROJECT_STAGING_DIR = "Resources";
// Any word added to reserved words in DEFAULT_RESERVED_PROJECT_NAMES and DEFAULT_RESERVED_HIVE_NAMES should
// also be added in the documentation in:
// https://hopsworks.readthedocs.io/en/<hopsworksDocVersion>/user_guide/hopsworks/newProject.html
private final static String DEFAULT_RESERVED_PROJECT_NAMES = "hops-system,hopsworks,information_schema,airflow," +
"glassfish_timers,grafana,hops,metastore,mysql,ndbinfo,performance_schema,sqoop,sys," +
"base,python37,python38,filebeat";
//Hive reserved words can be found at:
//https://cwiki.apache.org/confluence/display/hive/LanguageManual+DDL#LanguageManualDDL-Keywords,Non-
//reservedKeywordsandReservedKeywords
private final static String DEFAULT_RESERVED_HIVE_NAMES = "ALL, ALTER, AND, ARRAY, AS, AUTHORIZATION, BETWEEN, " +
"BIGINT, BINARY, BOOLEAN, BOTH, BY, CASE, CAST, CHAR, COLUMN, CONF, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, " +
"CURRENT_TIMESTAMP, CURSOR, DATABASE, DATE, DECIMAL, DELETE, DESCRIBE, DISTINCT, DOUBLE, DROP, ELSE, END, " +
"EXCHANGE, EXISTS, EXTENDED, EXTERNAL, FALSE, FETCH, FLOAT, FOLLOWING, FOR, FROM, FULL, FUNCTION, GRANT, GROUP, " +
"GROUPING, HAVING, IF, IMPORT, IN, INNER, INSERT, INT, INTERSECT, INTERVAL, INTO, IS, JOIN, LATERAL, LEFT, LESS, " +
"LIKE, LOCAL, MACRO, MAP, MORE, NONE, NOT, NULL, OF, ON, OR, ORDER, OUT, OUTER, OVER, PARTIALSCAN, PARTITION, " +
"PERCENT, PRECEDING, PRESERVE, PROCEDURE, RANGE, READS, REDUCE, REVOKE, RIGHT, ROLLUP, ROW, ROWS, SELECT, SET, " +
"SMALLINT, TABLE, TABLESAMPLE, THEN, TIMESTAMP, TO, TRANSFORM, TRIGGER, TRUE, TRUNCATE, UNBOUNDED, UNION, " +
"UNIQUEJOIN, UPDATE, USER, USING, UTC_TMESTAMP, VALUES, VARCHAR, WHEN, WHERE, WINDOW, WITH, COMMIT, ONLY, " +
"REGEXP, RLIKE, ROLLBACK, START, CACHE, CONSTRAINT, FOREIGN, PRIMARY, REFERENCES, DAYOFWEEK, EXTRACT, FLOOR, " +
"INTEGER, PRECISION, VIEWS, TIME, NUMERIC, SYNC";
private Set<String> RESERVED_PROJECT_NAMES;
private String RESERVED_PROJECT_NAMES_STR;
public synchronized Set<String> getReservedProjectNames() {
checkCache();
RESERVED_PROJECT_NAMES = RESERVED_PROJECT_NAMES != null ? RESERVED_PROJECT_NAMES : new HashSet<>();
RESERVED_PROJECT_NAMES.addAll(getReservedHiveNames());
return RESERVED_PROJECT_NAMES;
}
public synchronized Set<String> getReservedHiveNames() {
return setStringHashSetLowerCase(DEFAULT_RESERVED_HIVE_NAMES, ",", true);
}
public synchronized String getProjectNameReservedWords() {
checkCache();
return (RESERVED_PROJECT_NAMES_STR + ", " + DEFAULT_RESERVED_HIVE_NAMES).toLowerCase();
}
//Only for unit test
public synchronized String getProjectNameReservedWordsTest() {
return (DEFAULT_RESERVED_PROJECT_NAMES + ", " + DEFAULT_RESERVED_HIVE_NAMES).toLowerCase();
}
// OpenSearch
OpenSearchSettings OPENSEARCH_SETTINGS;
public synchronized List<String> getOpenSearchIps(){
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchIps();
}
public synchronized int getOpenSearchPort() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchPort();
}
public synchronized int getOpenSearchRESTPort() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchRESTPort();
}
public synchronized String getOpenSearchEndpoint() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchEndpoint();
}
public synchronized String getOpenSearchRESTEndpoint() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchRESTEndpoint();
}
public synchronized boolean isOpenSearchSecurityEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isOpenSearchSecurityEnabled();
}
public synchronized boolean isOpenSearchHTTPSEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isHttpsEnabled();
}
public synchronized String getOpenSearchAdminUser() {
checkCache();
return OPENSEARCH_SETTINGS.getAdminUser();
}
public synchronized String getOpenSearchServiceLogUser() {
checkCache();
return OPENSEARCH_SETTINGS.getServiceLogUser();
}
public synchronized String getOpenSearchAdminPassword() {
checkCache();
return OPENSEARCH_SETTINGS.getAdminPassword();
}
public synchronized boolean isOpenSearchJWTEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isOpenSearchJWTEnabled();
}
public synchronized String getOpenSearchJwtUrlParameter() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchJWTURLParameter();
}
public synchronized long getOpenSearchJwtExpMs() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchJWTExpMs();
}
public synchronized Integer getOpenSearchDefaultScrollPageSize() {
checkCache();
return OPENSEARCH_SETTINGS.getDefaultScrollPageSize();
}
public synchronized Integer getOpenSearchMaxScrollPageSize() {
checkCache();
return OPENSEARCH_SETTINGS.getMaxScrollPageSize();
}
private long OpenSearch_LOGS_INDEX_EXPIRATION = 7 * 24 * 60 * 60 * 1000;
public synchronized long getOpenSearchLogsIndexExpiration() {
checkCache();
return OpenSearch_LOGS_INDEX_EXPIRATION;
}
private static final int JOB_LOGS_EXPIRATION = 604800;
/**
* TTL for job logs in opensearch, in seconds.
*
* @return
*/
public int getJobLogsExpiration() {
return JOB_LOGS_EXPIRATION;
}
private static final long JOB_LOGS_DISPLAY_SIZE = 1000000;
public long getJobLogsDisplaySize() {
return JOB_LOGS_DISPLAY_SIZE;
}
private static final String JOB_LOGS_ID_FIELD = "jobid";
public String getJobLogsIdField() {
return JOB_LOGS_ID_FIELD;
}
// CertificateMaterializer service. Delay for deleting crypto material from
// the local filesystem. The lower the value the more frequent we reach DB
// for materialization
// Suffix, defaults to minutes if omitted:
// ms: milliseconds
// s: seconds
// m: minutes (default)
// h: hours
// d: days
private String CERTIFICATE_MATERIALIZER_DELAY = "1m";
public synchronized String getCertificateMaterializerDelay() {
checkCache();
return CERTIFICATE_MATERIALIZER_DELAY;
}
private String CERTIFICATE_USER_VALID_DAYS = "12";
public synchronized String getCertificateUserValidDays() {
checkCache();
return CERTIFICATE_USER_VALID_DAYS;
}
private String SERVICE_DISCOVERY_DOMAIN = "consul";
public synchronized String getServiceDiscoveryDomain() {
checkCache();
return SERVICE_DISCOVERY_DOMAIN;
}
// Kibana
public static final String KIBANA_INDEX_PREFIX = ".kibana";
private String KIBANA_IP = "10.0.2.15";
private static final int KIBANA_PORT = 5601;
public synchronized String getKibanaUri() {
checkCache();
return (KIBANA_HTTPS_ENABELED ? "https" : "http") + "://" + KIBANA_IP +
":" + KIBANA_PORT;
}
public String getKibanaAppUri() {
return "/hopsworks-api/kibana/app/discover?";
}
public String getKibanaAppUri(String jwtToken) {
return getKibanaAppUri() + OPENSEARCH_SETTINGS.getOpenSearchJWTURLParameter() + "=" + jwtToken + "&";
}
/*
* Comma-separated list of user emails that should not be persisted in the
* userlogins table for auditing.
* kagent -> agent@hops.io
*/
private String WHITELIST_USERS_LOGIN = "agent@hops.io";
public synchronized String getWhitelistUsersLogin() {
checkCache();
return WHITELIST_USERS_LOGIN;
}
// Jupyter
private String JUPYTER_DIR = "/srv/hops/jupyter";
public synchronized String getJupyterDir() {
checkCache();
return JUPYTER_DIR;
}
private String JUPYTER_GROUP = "jupyter";
public synchronized String getJupyterGroup() {
checkCache();
return JUPYTER_GROUP;
}
private String JUPYTER_ORIGIN_SCHEME = "https";
public synchronized String getJupyterOriginScheme() {
checkCache();
return JUPYTER_ORIGIN_SCHEME;
}
private long JUPYTER_WS_PING_INTERVAL_MS = 10000L;
public synchronized long getJupyterWSPingInterval() {
checkCache();
return JUPYTER_WS_PING_INTERVAL_MS;
}
private Integer PROMETHEUS_PORT = 9089;
public synchronized Integer getPrometheusPort() {
checkCache();
return PROMETHEUS_PORT;
}
//Git
private String GIT_DIR = "/srv/hops/git";
public synchronized String getGitDir() {
checkCache();
return GIT_DIR;
}
private Integer GIT_MAX_COMMAND_TIMEOUT_MINUTES = 60;
public synchronized long getGitJwtExpMs() {
checkCache();
return GIT_MAX_COMMAND_TIMEOUT_MINUTES * 60 * 1000;
}
private String GIT_IMAGE_NAME = "git:0.2.0";
public synchronized String getGitImageName() {
return GIT_IMAGE_NAME;
}
private boolean DOCKER_CGROUP_ENABLED = false;
public synchronized boolean isDockerCgroupEnabled() {
checkCache();
return DOCKER_CGROUP_ENABLED;
}
private String DOCKER_CGROUP_MEMORY_LIMIT = "6GB";
public synchronized String getDockerCgroupMemoryLimit() {
checkCache();
return DOCKER_CGROUP_MEMORY_LIMIT;
}
private String DOCKER_CGROUP_MEMORY_SOFT_LIMIT = "2GB";
public synchronized String getDockerCgroupSoftLimit() {
checkCache();
return DOCKER_CGROUP_MEMORY_SOFT_LIMIT;
}
private Double DOCKER_CGROUP_CPU_QUOTA = 100.0;
public synchronized Double getDockerCgroupCpuQuota() {
checkCache();
return DOCKER_CGROUP_CPU_QUOTA;
}
private Integer DOCKER_CGROUP_CPU_PERIOD = 100000;
public synchronized Integer getDockerCgroupCpuPeriod() {
checkCache();
return DOCKER_CGROUP_CPU_PERIOD;
}
private String DOCKER_CGROUP_MONITOR_INTERVAL = "10m";
public synchronized String getDockerCgroupIntervalMonitor() {
checkCache();
return DOCKER_CGROUP_MONITOR_INTERVAL;
}
// Service key rotation interval
private static final String JUPYTER_SHUTDOWN_TIMER_INTERVAL = "jupyter_shutdown_timer_interval";
private String jupyterShutdownTimerInterval = "30m";
public synchronized String getJupyterShutdownTimerInterval() {
checkCache();
return jupyterShutdownTimerInterval;
}
private String KAFKA_USER = "kafka";
public synchronized String getKafkaUser() {
checkCache();
return KAFKA_USER;
}
private String KAFKA_DIR = "/srv/kafka";
public synchronized String getKafkaDir() {
checkCache();
return KAFKA_DIR;
}
private String ANACONDA_DIR = "/srv/hops/anaconda";
public synchronized String getAnacondaDir() {
checkCache();
return ANACONDA_DIR;
}
private String condaEnvName = "theenv";
/**
* Constructs the path to the project environment in Anaconda
*
* @return conda dir
*/
public String getAnacondaProjectDir() {
return getAnacondaDir() + File.separator + "envs" + File.separator + condaEnvName;
}
//TODO(Theofilos): Used by Flink. Will be removed as part of refactoring *YarnRunnerBuilders.
public String getCurrentCondaEnvironment() {
return condaEnvName;
}
private Boolean ANACONDA_ENABLED = true;
public synchronized Boolean isAnacondaEnabled() {
checkCache();
return ANACONDA_ENABLED;
}
private Boolean DOWNLOAD_ALLOWED = true;
public synchronized Boolean isDownloadAllowed() {
checkCache();
return DOWNLOAD_ALLOWED;
}
/**
* kagent liveness monitor settings
*/
private String KAGENT_USER = "kagent";
public synchronized String getKagentUser() {
checkCache();
return KAGENT_USER;
}
private boolean KAGENT_LIVENESS_MONITOR_ENABLED = false;
public synchronized boolean isKagentLivenessMonitorEnabled() {
checkCache();
return KAGENT_LIVENESS_MONITOR_ENABLED;
}
private String KAGENT_LIVENESS_THRESHOLD = "10s";
public synchronized String getKagentLivenessThreshold() {
checkCache();
return KAGENT_LIVENESS_THRESHOLD;
}
private RESTLogLevel HOPSWORKS_REST_LOG_LEVEL = RESTLogLevel.PROD;
public synchronized RESTLogLevel getHopsworksRESTLogLevel() {
checkCache();
return HOPSWORKS_REST_LOG_LEVEL;
}
private String FIRST_TIME_LOGIN = "0";
public synchronized String getFirstTimeLogin() {
checkCache();
return FIRST_TIME_LOGIN;
}
private String ADMIN_EMAIL = "admin@hopsworks.ai";
public synchronized String getAdminEmail() {
checkCache();
return ADMIN_EMAIL;
}
public synchronized boolean isDefaultAdminPasswordChanged() {
Users user = userFacade.findByEmail(ADMIN_EMAIL);
if (user != null) {
String DEFAULT_ADMIN_PWD = "12fa520ec8f65d3a6feacfa97a705e622e1fea95b80b521ec016e43874dfed5a";
return !DEFAULT_ADMIN_PWD.equals(user.getPassword());
}
return false;
}
private String HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD = "adminpw";
public synchronized String getHopsworksMasterPasswordSsl() {
checkCache();
return HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD;
}
private Integer KAFKA_DEFAULT_NUM_PARTITIONS = 2;
private Integer KAFKA_DEFAULT_NUM_REPLICAS = 1;
public synchronized Integer getKafkaDefaultNumPartitions() {
checkCache();
return KAFKA_DEFAULT_NUM_PARTITIONS;
}
public synchronized Integer getKafkaDefaultNumReplicas() {
checkCache();
return KAFKA_DEFAULT_NUM_REPLICAS;
}
private String CLUSTER_CERT = "asdasxasx8as6dx8a7sx7asdta8dtasxa8";
public synchronized String getCLUSTER_CERT() {
checkCache();
return CLUSTER_CERT;
}
// HOPSWORKS-3158
private String HOPSWORKS_PUBLIC_HOST = "";
public String getHopsworksPublicHost() {
checkCache();
return HOPSWORKS_PUBLIC_HOST;
}
// Hopsworks
public static final Charset ENCODING = StandardCharsets.UTF_8;
public static final String HOPS_USERS_HOMEDIR = "/home/";
public static final String HOPS_USERNAME_SEPARATOR = "__";
public static final String UNZIP_FILES_SCRIPTNAME = "unzip-hdfs-files.sh";
public static final int USERNAME_LEN = 8;
public static final int MAX_USERNAME_SUFFIX = 99;
public static final int MAX_RETRIES = 500;
public static final String META_NAME_FIELD = "name";
public static final String META_USAGE_TIME = "usage_time";
public static final String META_DESCRIPTION_FIELD = "description";
public static final String META_INDEX = "projects";
public static final String META_PROJECT_ID_FIELD = "project_id";
public static final String META_DATASET_ID_FIELD = "dataset_id";
public static final String META_DOC_TYPE_FIELD = "doc_type";
public static final String DOC_TYPE_PROJECT = "proj";
public static final String DOC_TYPE_DATASET = "ds";
public static final String DOC_TYPE_INODE = "inode";
public static final String META_ID = "_id";
public static final String META_DATA_NESTED_FIELD = "xattr";
public static final String META_NOTEBOOK_JUPYTER_CONFIG_XATTR_NAME = "jupyter_configuration";
public static final String META_DATA_FIELDS = META_DATA_NESTED_FIELD + ".*";
//Filename conventions
public static final String FILENAME_DISALLOWED_CHARS = " /\\?*:|'\"<>%()&;#öäåÖÅÄàáéèâîïüÜ@${}[]+~^$`";
public static final String SUBDIR_DISALLOWED_CHARS = "/\\?*:|'\"<>%()&;#öäåÖÅÄàáéèâîïüÜ@${}[]+~^$`";
public static final String SHARED_FILE_SEPARATOR = "::";
public static final String DOUBLE_UNDERSCORE = "__";
// Authentication Constants
// POSIX compliant usernake length
public static final int USERNAME_LENGTH = 8;
// Strating user id from 1000 to create a POSIX compliant username: meb1000
public static final int STARTING_USER = 1000;
public static final int PASSWORD_MIN_LENGTH = 6;
public static final int DEFAULT_SECURITY_ANSWER_LEN = 16;
public static final String DEFAULT_ROLE = "HOPS_USER";
// POSIX compliant usernake length
public static final int ACCOUNT_VALIDATION_TRIES = 5;
// Issuer of the QrCode
public static final String ISSUER = "hops.io";
// Used to indicate that a python version is unknown
public static final String UNKNOWN_LIBRARY_VERSION = "UNKNOWN";
public static final String PROJECT_PYTHON_DIR = PROJECT_STAGING_DIR + "/.python";
public static final String ENVIRONMENT_FILE = "environment.yml";
public static final String PROJECT_PYTHON_ENVIRONMENT_FILE = PROJECT_PYTHON_DIR + "/" + ENVIRONMENT_FILE;
// when user is loged in 1 otherwise 0
public static final int IS_ONLINE = 1;
public static final int IS_OFFLINE = 0;
public static final int ALLOWED_FALSE_LOGINS = 5;
public static final int ALLOWED_AGENT_FALSE_LOGINS = 20;
//hopsworks user prefix username prefix
public static final String USERNAME_PREFIX = "meb";
public static final String KEYSTORE_SUFFIX = "__kstore.jks";
public static final String TRUSTSTORE_SUFFIX = "__tstore.jks";
public static final String CERT_PASS_SUFFIX = "__cert.key";
public static final String K_CERTIFICATE = "k_certificate";
public static final String T_CERTIFICATE = "t_certificate";
public static final String DOMAIN_CA_TRUSTSTORE = "t_certificate";
//Glassfish truststore, used by hopsutil to initialize https connection to Hopsworks
public static final String CRYPTO_MATERIAL_PASSWORD = "material_passwd";
//Used by HopsUtil
public static final String HOPSWORKS_PROJECTID_PROPERTY = "hopsworks.projectid";
public static final String HOPSWORKS_PROJECTNAME_PROPERTY = "hopsworks.projectname";
public static final String HOPSWORKS_PROJECTUSER_PROPERTY = "hopsworks.projectuser";
public static final String HOPSWORKS_JOBNAME_PROPERTY = "hopsworks.job.name";
public static final String HOPSWORKS_JOBTYPE_PROPERTY = "hopsworks.job.type";
public static final String HOPSWORKS_APPID_PROPERTY = "hopsworks.job.appid";
public static final String KAFKA_BROKERADDR_PROPERTY = "hopsworks.kafka.brokeraddress";
public static final String KAFKA_JOB_TOPICS_PROPERTY = "hopsworks.kafka.job.topics";
public static final String SERVER_TRUSTSTORE_PROPERTY = "server.truststore";
public static final String KAFKA_CONSUMER_GROUPS = "hopsworks.kafka.consumergroups";
public static final String HOPSWORKS_REST_ENDPOINT_PROPERTY = "hopsworks.restendpoint";
public static final String HOPSUTIL_INSECURE_PROPERTY = "hopsutil.insecure";
public static final String HOPSWORKS_OPENSEARCH_ENDPOINT_PROPERTY = "hopsworks.opensearch.endpoint";
public static final String HOPSWORKS_DOMAIN_CA_TRUSTSTORE_PROPERTY = "hopsworks.domain.truststore";
private int FILE_PREVIEW_IMAGE_SIZE = 10000000;
private int FILE_PREVIEW_TXT_SIZE = 100;
public static final int FILE_PREVIEW_TXT_SIZE_BYTES = 1024 * 384;
public static final String README_TEMPLATE = "*This is an auto-generated README.md"
+ " file for your Dataset!*\n"
+ "To replace it, go into your DataSet and edit the README.md file.\n"
+ "\n" + "*%s* DataSet\n" + "===\n" + "\n"
+ "## %s";
public static final String FILE_PREVIEW_TEXT_TYPE = "text";
public static final String FILE_PREVIEW_HTML_TYPE = "html";
public static final String FILE_PREVIEW_IMAGE_TYPE = "image";
public static final String FILE_PREVIEW_MODE_TAIL = "tail";
//OpenSearch
// log index pattern
public static final String OPENSEARCH_LOGS_INDEX = "logs";
public static final String OPENSEARCH_PYPI_LIBRARIES_INDEX_PATTERN_PREFIX = "pypi_libraries_";
public static final String OPENSEARCH_LOGS_INDEX_PATTERN = "_" + Settings.OPENSEARCH_LOGS_INDEX + "-*";
public static final String OPENSEARCH_SERVING_INDEX = "serving";
public static final String OPENSEARCH_SERVICES_INDEX = ".services";
public static final String OPENSEARCH_LOG_INDEX_REGEX = ".*_" + OPENSEARCH_LOGS_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_SERVING_INDEX_REGEX =
".*_" + OPENSEARCH_SERVING_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_SERVICES_INDEX_REGEX = OPENSEARCH_SERVICES_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_PYPI_LIBRARIES_INDEX_REGEX =
OPENSEARCH_PYPI_LIBRARIES_INDEX_PATTERN_PREFIX + "*";
//Other OpenSearch indexes
public static final String OPENSEARCH_INDEX_APP_PROVENANCE = "app_provenance";
//OpenSearch aliases
public static final String OPENSEARCH_PYPI_LIBRARIES_ALIAS = "pypi_libraries";
public String getHopsworksTmpCertDir() {
return Paths.get(getCertsDir(), "transient").toString();
}
public String getHdfsTmpCertDir() {
return "/user/" + getHdfsSuperUser() + "/" + "kafkacerts";
}
public String getHopsworksTrueTempCertDir() {
return "/tmp/usercerts/";
}
//Dataset request subject
public static final String MESSAGE_DS_REQ_SUBJECT = "Dataset access request.";
// QUOTA
public static final float DEFAULT_YARN_MULTIPLICATOR = 1.0f;
/**
* Returns the maximum image size in bytes that can be previewed in the browser.
*
* @return file size
*/
public synchronized int getFilePreviewImageSize() {
checkCache();
return FILE_PREVIEW_IMAGE_SIZE;
}
/**
* Returns the maximum number of lines of the file that can be previewed in the browser.
*
* @return file size
*/
public synchronized int getFilePreviewTxtSize() {
checkCache();
return FILE_PREVIEW_TXT_SIZE;
}
//Project creation: default datasets
public static enum BaseDataset {
LOGS("Logs",
"Contains the logs for jobs that have been run through the Hopsworks platform."),
RESOURCES("Resources",
"Contains resources used by jobs, for example, jar files.");
private final String name;
private final String description;
private BaseDataset(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
public static enum ServiceDataset {
JUPYTER("Jupyter", "Contains Jupyter notebooks."),
SERVING("Models", "Contains models to be used for serving."),
EXPERIMENTS("Experiments", "Contains experiments from using the hops python api"),
TRAININGDATASETS("Training_Datasets", "Contains curated training datasets created from the feature store"),
STATISTICS("Statistics", "Contains the statistics for feature groups and training datasets"),
DATAVALIDATION("DataValidation", "Contains rules and results for Features validation"),
INGESTION("Ingestion", "Temporary dataset to store feature data ready for ingestion");
private final String name;
private final String description;
private ServiceDataset(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
public static final String JUPYTER_PIDS = "/tmp/jupyterNotebookServer.pids";
private String RESOURCE_DIRS = ".sparkStaging;spark-warehouse";
public synchronized String getResourceDirs() {
checkCache();
return RESOURCE_DIRS;
}
private String FS_JOB_ACTIVITY_TIME = "5m";
public synchronized String getFsJobActivityTime() {
checkCache();
return FS_JOB_ACTIVITY_TIME;
}
public Settings() {
}
/**
* Get the variable value with the given name.
*
* @param id
* @return The user with given email, or null if no such user exists.
*/
public Optional<Variables> findById(String id) {
try {
return Optional.of(em.createNamedQuery("Variables.findById", Variables.class)
.setParameter("id", id)
.getSingleResult());
} catch (NoResultException e) {
return Optional.empty();
}
}
/**
* Get all variables from the database.
*
* @return List with all the variables
*/
public List<Variables> getAllVariables() {
return em.createNamedQuery("Variables.findAll", Variables.class).getResultList();
}
/**
* Update a variable in the database.
*
* @param variableName name
* @param variableValue value
*/
private void updateVariableInternal(String variableName, String variableValue, VariablesVisibility visibility) {
Variables var = findById(variableName)
.orElseThrow(() -> new NoResultException("Variable <" + variableName + "> does not exist in the database"));
if (!var.getValue().equals(variableValue) || !var.getVisibility().equals(visibility)) {
var.setValue(variableValue);
var.setVisibility(visibility);
em.persist(var);
}
}
public void detach(Variables variable) {
em.detach(variable);
}
Configuration conf;
public Configuration getConfiguration() throws IllegalStateException {
if (conf == null) {
String hadoopDir = getHadoopSymbolicLinkDir();
//Get the path to the Yarn configuration file from environment variables
String yarnConfDir = System.getenv(Settings.ENV_KEY_YARN_CONF_DIR);
//If not found in environment variables: warn and use default,
if (yarnConfDir == null) {
yarnConfDir = getYarnConfDir(hadoopDir);
}
Path confPath = new Path(yarnConfDir);
File confFile = new File(confPath + File.separator
+ Settings.DEFAULT_YARN_CONFFILE_NAME);
if (!confFile.exists()) {
throw new IllegalStateException("No Yarn conf file");
}
//Also add the hadoop config
String hadoopConfDir = System.getenv(Settings.ENV_KEY_HADOOP_CONF_DIR);
//If not found in environment variables: warn and use default
if (hadoopConfDir == null) {
hadoopConfDir = hadoopDir + "/" + Settings.HADOOP_CONF_RELATIVE_DIR;
}
confPath = new Path(hadoopConfDir);
File hadoopConf = new File(confPath + "/"
+ Settings.DEFAULT_HADOOP_CONFFILE_NAME);
if (!hadoopConf.exists()) {
throw new IllegalStateException("No Hadoop conf file");
}
File hdfsConf = new File(confPath + "/"
+ Settings.DEFAULT_HDFS_CONFFILE_NAME);
if (!hdfsConf.exists()) {
throw new IllegalStateException("No HDFS conf file");
}
//Set the Configuration object for the returned YarnClient
conf = new Configuration();
conf.addResource(new Path(confFile.getAbsolutePath()));
conf.addResource(new Path(hadoopConf.getAbsolutePath()));
conf.addResource(new Path(hdfsConf.getAbsolutePath()));
addPathToConfig(conf, confFile);
addPathToConfig(conf, hadoopConf);
setDefaultConfValues(conf);
}
return conf;
}
private void addPathToConfig(Configuration conf, File path) {
// chain-in a new classloader
URL fileUrl = null;
try {
fileUrl = path.toURL();
} catch (MalformedURLException e) {
throw new RuntimeException("Erroneous config file path", e);
}
URL[] urls = {fileUrl};
ClassLoader cl = new URLClassLoader(urls, conf.getClassLoader());
conf.setClassLoader(cl);
}
private void setDefaultConfValues(Configuration conf) {
if (conf.get("fs.hdfs.impl", null) == null) {
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
}
if (conf.get("fs.file.impl", null) == null) {
conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
}
}
private int KAFKA_MAX_NUM_TOPICS = 10;
public synchronized int getKafkaMaxNumTopics() {
checkCache();
return KAFKA_MAX_NUM_TOPICS;
}
private int MAX_STATUS_POLL_RETRY = 5;
public synchronized int getMaxStatusPollRetry() {
checkCache();
return MAX_STATUS_POLL_RETRY;
}
/**
* Returns aggregated log dir path for an application with the the given appId.
*
* @param hdfsUser user
* @param appId appId
* @return path
*/
public String getAggregatedLogPath(String hdfsUser, String appId) {
boolean logPathsAreAggregated = conf.getBoolean(
YarnConfiguration.LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED);
String aggregatedLogPath = null;
if (logPathsAreAggregated) {
String[] nmRemoteLogDirs = conf.getStrings(
YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR);
String[] nmRemoteLogDirSuffix = conf.getStrings(
YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
aggregatedLogPath = nmRemoteLogDirs[0] + File.separator + hdfsUser
+ File.separator + nmRemoteLogDirSuffix[0] + File.separator
+ appId;
}
return aggregatedLogPath;
}
// For performance reasons, we have an in-memory cache of files being unzipped
// Lazily remove them from the cache, when we check the FS and they aren't there.
private Set<CompressionInfo> zippingFiles = new HashSet<>();
public synchronized void addZippingState(CompressionInfo compressionInfo) {
zippingFiles.add(compressionInfo);
}
private Set<CompressionInfo> unzippingFiles = new HashSet<>();
public synchronized void addUnzippingState(CompressionInfo compressionInfo) {
unzippingFiles.add(compressionInfo);
}
public synchronized String getZipState(String hdfsPath) {
boolean zipOperation = false;
boolean unzipOperation = false;
CompressionInfo zipInfo = zippingFiles.stream()
.filter(zinfo -> zinfo.getHdfsPath().toString().equals(hdfsPath))
.findAny()
.orElse(null);
CompressionInfo unzipInfo = unzippingFiles.stream()
.filter(uzinfo -> uzinfo.getHdfsPath().toString().equals(hdfsPath))
.findAny()
.orElse(null);
String compressionDir = null;
String fsmPath = null;
if(zipInfo != null) {
compressionDir = getStagingDir() + File.separator + zipInfo.getStagingDirectory();
fsmPath = compressionDir + "/fsm.txt";
zipOperation = true;
} else if(unzipInfo != null) {
compressionDir = getStagingDir() + File.separator + unzipInfo.getStagingDirectory();
fsmPath = compressionDir + "/fsm.txt";
unzipOperation = true;
} else {
return "NONE";
}
String state = "NOT_FOUND";
try {
state = new String(java.nio.file.Files.readAllBytes(Paths.get(fsmPath)));
state = state.trim();
} catch (IOException ex) {
if (!java.nio.file.Files.exists(Paths.get(compressionDir))) {
state = "NONE";
// lazily remove the file, probably because it has finished zipping/unzipping
if (zipOperation) {
zippingFiles.remove(zipInfo);
} else if (unzipOperation) {
unzippingFiles.remove(unzipInfo);
}
}
}
// If a terminal state has been reached, removed the entry and the file.
if (state.isEmpty() || state.compareTo("FAILED") == 0 || state.compareTo("SUCCESS") == 0) {
try {
if (zipOperation) {
zippingFiles.remove(zipInfo);
} else if (unzipOperation) {
unzippingFiles.remove(unzipInfo);
}
java.nio.file.Files.deleteIfExists(Paths.get(fsmPath));
} catch (IOException ex) {
Logger.getLogger(Settings.class.getName()).log(Level.SEVERE, null, ex);
}
}
if (state.isEmpty()) {
state = "NOT_FOUND";
}
return state;
}
private boolean PYTHON_KERNEL = true;
public synchronized boolean isPythonKernelEnabled() {
checkCache();
return PYTHON_KERNEL;
}
private String PYPI_REST_ENDPOINT = "https://pypi.org/pypi/{package}/json";
public synchronized String getPyPiRESTEndpoint() {
checkCache();
return PYPI_REST_ENDPOINT;
}
private String PYPI_INDEXER_TIMER_INTERVAL = "1d";
public synchronized String getPyPiIndexerTimerInterval() {
checkCache();
return PYPI_INDEXER_TIMER_INTERVAL;
}
private String PYPI_SIMPLE_ENDPOINT = "https://pypi.org/simple/";
public synchronized String getPyPiSimpleEndpoint() {
checkCache();
return PYPI_SIMPLE_ENDPOINT;
}
private boolean PYPI_INDEXER_TIMER_ENABLED = true;
public synchronized boolean isPyPiIndexerTimerEnabled() {
checkCache();
return PYPI_INDEXER_TIMER_ENABLED;
}
private String PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL = "1d";
public synchronized String getPythonLibraryUpdatesMonitorInterval() {
checkCache();
return PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL;
}
private String HOPS_EXAMPLES_VERSION = "0.3.0";
public synchronized String getHopsExamplesSparkFilename() {
checkCache();
return "hops-examples-spark-" + HOPS_EXAMPLES_VERSION + ".jar";
}
private String VERIFICATION_PATH = "/hopsworks-admin/security/validate_account.xhtml";
public synchronized String getEmailVerificationEndpoint() {
checkCache();
return VERIFICATION_PATH;
}
//Dela START
private static final String VARIABLE_HOPSSITE_BASE_URI = "hops_site_endpoint";
private static final String VARIABLE_HOPSSITE_BASE_URI_HOST = "hops_site_host";
private static final String VARIABLE_CLUSTER_CERT = "hopsworks_certificate";
private static final String VARIABLE_DELA_ENABLED = "dela_enabled";
private static final String VARIABLE_DELA_CLIENT_TYPE = "dela_client_type";
private static final String VARIABLE_HOPSSITE_HEARTBEAT_INTERVAL = "hopssite_heartbeat_interval";
private static final String VARIABLE_DELA_CLUSTER_ID = "cluster_id";
private static final String VARIABLE_DELA_CLUSTER_IP = "dela_cluster_ip";
private static final String VARIABLE_DELA_CLUSTER_HTTP_PORT = "dela_cluster_http_port";
private static final String VARIABLE_DELA_PUBLIC_HOPSWORKS_PORT = "dela_hopsworks_public_port";
private static final String VARIABLE_PUBLIC_HTTPS_PORT = "public_https_port";
private static final String VARIABLE_DELA_SEARCH_ENDPOINT = "dela_search_endpoint";
private static final String VARIABLE_DELA_TRANSFER_ENDPOINT = "dela_transfer_endpoint";
private static final String VARIABLE_HOPSWORKS_PUBLIC_HOST = "hopsworks_public_host";
public static final Level DELA_DEBUG = Level.INFO;
private String HOPSSITE_HOST = "hops.site";
private String HOPSSITE = "http://hops.site:5081/hops-site/api";
private Boolean DELA_ENABLED = false; // set to false if not found in variables table
private DelaClientType DELA_CLIENT_TYPE = DelaClientType.FULL_CLIENT;
private long HOPSSITE_HEARTBEAT_RETRY = 10 * 1000l; //10s
private long HOPSSITE_HEARTBEAT_INTERVAL = 10 * 60 * 1000l;//10min
private String DELA_TRANSFER_IP = "localhost";
private String DELA_TRANSFER_HTTP_PORT = "42000";
private String DELA_PUBLIC_HOPSWORK_PORT = "8080";
private String PUBLIC_HTTPS_PORT = "8181";
//set on registration after Dela is contacted to detect public port
private String DELA_SEARCH_ENDPOINT = "";
private String DELA_TRANSFER_ENDPOINT = "";
//set on cluster registration
private String DELA_CLUSTER_ID = null;
//
private AddressJSON DELA_PUBLIC_ENDPOINT = null;
//
public static final String MANIFEST_FILE = "manifest.json";
public static final String README_FILE = "README.md";
private void populateDelaCache() {
DELA_ENABLED = setBoolVar(VARIABLE_DELA_ENABLED, DELA_ENABLED);
DELA_CLIENT_TYPE = DelaClientType.from(setVar(VARIABLE_DELA_CLIENT_TYPE, DELA_CLIENT_TYPE.type));
HOPSSITE_CLUSTER_NAME = setVar(VARIABLE_HOPSSITE_CLUSTER_NAME, HOPSSITE_CLUSTER_NAME);
HOPSSITE_CLUSTER_PSWD = setVar(VARIABLE_HOPSSITE_CLUSTER_PSWD, HOPSSITE_CLUSTER_PSWD);
HOPSSITE_CLUSTER_PSWD_AUX = setVar(VARIABLE_HOPSSITE_CLUSTER_PSWD_AUX, HOPSSITE_CLUSTER_PSWD_AUX);
HOPSSITE_HOST = setVar(VARIABLE_HOPSSITE_BASE_URI_HOST, HOPSSITE_HOST);
HOPSSITE = setVar(VARIABLE_HOPSSITE_BASE_URI, HOPSSITE);
HOPSSITE_HEARTBEAT_INTERVAL = setLongVar(VARIABLE_HOPSSITE_HEARTBEAT_INTERVAL, HOPSSITE_HEARTBEAT_INTERVAL);
DELA_TRANSFER_IP = setStrVar(VARIABLE_DELA_CLUSTER_IP, DELA_TRANSFER_IP);
DELA_TRANSFER_HTTP_PORT = setStrVar(VARIABLE_DELA_CLUSTER_HTTP_PORT, DELA_TRANSFER_HTTP_PORT);
DELA_SEARCH_ENDPOINT = setStrVar(VARIABLE_DELA_SEARCH_ENDPOINT, DELA_SEARCH_ENDPOINT);
DELA_TRANSFER_ENDPOINT = setStrVar(VARIABLE_DELA_TRANSFER_ENDPOINT, DELA_TRANSFER_ENDPOINT);
DELA_PUBLIC_HOPSWORK_PORT = setStrVar(VARIABLE_DELA_PUBLIC_HOPSWORKS_PORT, DELA_PUBLIC_HOPSWORK_PORT);
PUBLIC_HTTPS_PORT = setStrVar(VARIABLE_PUBLIC_HTTPS_PORT, PUBLIC_HTTPS_PORT);
DELA_CLUSTER_ID = setStrVar(VARIABLE_DELA_CLUSTER_ID, DELA_CLUSTER_ID);
}
private void populateServiceJWTCache() {
SERVICE_MASTER_JWT = setStrVar(VARIABLE_SERVICE_MASTER_JWT, SERVICE_MASTER_JWT);
RENEW_TOKENS = new String[NUM_OF_SERVICE_RENEW_TOKENS];
for (int i = 0; i < NUM_OF_SERVICE_RENEW_TOKENS; i++) {
String variableKey = String.format(SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE, i);
String token = setStrVar(variableKey, "");
RENEW_TOKENS[i] = token;
}
}
public synchronized Boolean isDelaEnabled() {
checkCache();
return DELA_ENABLED;
}
public synchronized DelaClientType getDelaClientType() {
return DELA_CLIENT_TYPE;
}
public synchronized String getHOPSSITE_HOST() {
checkCache();
return HOPSSITE_HOST;
}
public synchronized String getHOPSSITE() {
checkCache();
return HOPSSITE;
}
public synchronized long getHOPSSITE_HEARTBEAT_RETRY() {
checkCache();
return HOPSSITE_HEARTBEAT_RETRY;
}
public synchronized long getHOPSSITE_HEARTBEAT_INTERVAL() {
checkCache();
return HOPSSITE_HEARTBEAT_INTERVAL;
}
public synchronized String getDELA_TRANSFER_IP() {
checkCache();
return DELA_TRANSFER_IP;
}
public synchronized String getDELA_TRANSFER_HTTP_PORT() {
checkCache();
return DELA_TRANSFER_HTTP_PORT;
}
public synchronized String getDELA_TRANSFER_HTTP_ENDPOINT() {
checkCache();
return "http://" + DELA_TRANSFER_IP + ":" + DELA_TRANSFER_HTTP_PORT + "/";
}
public synchronized String getDELA_HOPSWORKS_PORT() {
checkCache();
return DELA_PUBLIC_HOPSWORK_PORT;
}
public synchronized String getPUBLIC_HTTPS_PORT() {
checkCache();
return PUBLIC_HTTPS_PORT;
}
public synchronized AddressJSON getDELA_PUBLIC_ENDPOINT() {
return DELA_PUBLIC_ENDPOINT;
}
public synchronized String getDELA_SEARCH_ENDPOINT() {
checkCache();
if (DELA_SEARCH_ENDPOINT != null) {
return DELA_SEARCH_ENDPOINT;
}
return setStrVar(DELA_SEARCH_ENDPOINT, null);
}
public synchronized String getDELA_TRANSFER_ENDPOINT() {
checkCache();
if (DELA_TRANSFER_ENDPOINT != null) {
return DELA_TRANSFER_ENDPOINT;
}
return setStrVar(DELA_TRANSFER_ENDPOINT, null);
}
public synchronized void setDELA_PUBLIC_ENDPOINT(AddressJSON endpoint) {
DELA_PUBLIC_ENDPOINT = endpoint;
String delaSearchEndpoint = "https://" + endpoint.getIp() + ":"
+ getPUBLIC_HTTPS_PORT() + "/hopsworks-api/api";
String delaTransferEndpoint = endpoint.getIp() + ":" + endpoint.getPort() + "/" + endpoint.getId();
if (getDELA_SEARCH_ENDPOINT() == null) {
em.persist(new Variables(VARIABLE_DELA_SEARCH_ENDPOINT, delaSearchEndpoint));
} else {
em.merge(new Variables(VARIABLE_DELA_SEARCH_ENDPOINT, delaSearchEndpoint));
}
DELA_SEARCH_ENDPOINT = delaSearchEndpoint;
if (getDELA_TRANSFER_ENDPOINT() == null) {
em.persist(new Variables(VARIABLE_DELA_TRANSFER_ENDPOINT, delaTransferEndpoint));
} else {
em.merge(new Variables(VARIABLE_DELA_TRANSFER_ENDPOINT, delaTransferEndpoint));
}
DELA_TRANSFER_ENDPOINT = delaTransferEndpoint;
}
public synchronized void setDELA_CLUSTER_ID(String id) {
if (getDELA_CLUSTER_ID() == null) {
em.persist(new Variables(VARIABLE_DELA_CLUSTER_ID, id));
} else {
em.merge(new Variables(VARIABLE_DELA_CLUSTER_ID, id));
}
DELA_CLUSTER_ID = id;
}
public synchronized String getDELA_CLUSTER_ID() {
checkCache();
if (DELA_CLUSTER_ID != null) {
return DELA_CLUSTER_ID;
}
return setStrVar(VARIABLE_DELA_CLUSTER_ID, null);
}
public synchronized String getDELA_DOMAIN() {
if (DELA_PUBLIC_ENDPOINT != null) {
return DELA_PUBLIC_ENDPOINT.getIp();
}
return null;
}
//************************************************CERTIFICATES********************************************************
private static final String HOPS_SITE_CA_DIR = "hops-site-certs";
private final static String HOPS_SITE_CERTFILE = "/pub.pem";
private final static String HOPS_SITE_CA_CERTFILE = "/ca_pub.pem";
private final static String HOPS_SITE_INTERMEDIATE_CERTFILE = "/intermediate_ca_pub.pem";
private final static String HOPS_SITE_KEY_STORE = "/keystores/keystore.jks";
private final static String HOPS_SITE_TRUST_STORE = "/keystores/truststore.jks";
private static final String VARIABLE_HOPSSITE_CLUSTER_NAME = "hops_site_cluster_name";
private static final String VARIABLE_HOPSSITE_CLUSTER_PSWD = "hops_site_cluster_pswd";
private static final String VARIABLE_HOPSSITE_CLUSTER_PSWD_AUX = "hops_site_cluster_pswd_aux";
private String HOPSSITE_CLUSTER_NAME = null;
private String HOPSSITE_CLUSTER_PSWD = null;
private String HOPSSITE_CLUSTER_PSWD_AUX = "1234";
public synchronized Optional<String> getHopsSiteClusterName() {
checkCache();
return Optional.ofNullable(HOPSSITE_CLUSTER_NAME);
}
public synchronized void setHopsSiteClusterName(String clusterName) {
if (getHopsSiteClusterName().isPresent()) {
em.merge(new Variables(VARIABLE_HOPSSITE_CLUSTER_NAME, clusterName));
} else {
em.persist(new Variables(VARIABLE_HOPSSITE_CLUSTER_NAME, clusterName));
}
HOPSSITE_CLUSTER_NAME = clusterName;
}
public synchronized void deleteHopsSiteClusterName() {
if (getHopsSiteClusterName().isPresent()) {
Optional<Variables> v = findById(VARIABLE_HOPSSITE_CLUSTER_NAME);
if (v.isPresent()) {
em.remove(v);
HOPSSITE_CLUSTER_NAME = null;
}
}
}
public synchronized String getHopsSiteClusterPswdAux() {
checkCache();
return HOPSSITE_CLUSTER_PSWD_AUX;
}
public synchronized Optional<String> getHopsSiteClusterPswd() {
checkCache();
return Optional.ofNullable(HOPSSITE_CLUSTER_PSWD);
}
public synchronized void setHopsSiteClusterPswd(String pswd) {
if (getHopsSiteClusterPswd().isPresent()) {
em.merge(new Variables(VARIABLE_HOPSSITE_CLUSTER_PSWD, pswd));
} else {
em.persist(new Variables(VARIABLE_HOPSSITE_CLUSTER_PSWD, pswd));
}
HOPSSITE_CLUSTER_PSWD = pswd;
}
public synchronized String getHopsSiteCaDir() {
return getCertsDir() + File.separator + HOPS_SITE_CA_DIR;
}
public synchronized String getHopsSiteCaScript() {
return getSudoersDir() + File.separator + "ca-keystore.sh";
}
public synchronized String getHopsSiteCert() {
return getHopsSiteCaDir() + HOPS_SITE_CERTFILE;
}
public synchronized String getHopsSiteCaCert() {
return getHopsSiteCaDir() + HOPS_SITE_CA_CERTFILE;
}
public synchronized String getHopsSiteIntermediateCert() {
return getHopsSiteCaDir() + HOPS_SITE_INTERMEDIATE_CERTFILE;
}
public synchronized String getHopsSiteKeyStorePath() {
return getHopsSiteCaDir() + HOPS_SITE_KEY_STORE;
}
public synchronized String getHopsSiteTrustStorePath() {
return getHopsSiteCaDir() + HOPS_SITE_TRUST_STORE;
}
//Dela END
//************************************************ZOOKEEPER********************************************************
public static final int ZOOKEEPER_SESSION_TIMEOUT_MS = 30 * 1000;//30 seconds
public static final int ZOOKEEPER_CONNECTION_TIMEOUT_MS = 30 * 1000;// 30 seconds
//Zookeeper END
//************************************************KAFKA********************************************************
public static final String KAFKA_ACL_WILDCARD = "*";
//Kafka END
//-------------------------Remote auth [OAuth2, KRB, LDAP]----------------------------
private static final String VARIABLE_KRB_AUTH = "kerberos_auth";
private static final String VARIABLE_LDAP_AUTH = "ldap_auth";
private static final String VARIABLE_LDAP_GROUP_MAPPING = "ldap_group_mapping";
private static final String VARIABLE_LDAP_USER_ID = "ldap_user_id";
private static final String VARIABLE_LDAP_USER_GIVEN_NAME = "ldap_user_givenName";
private static final String VARIABLE_LDAP_USER_SURNAME = "ldap_user_surname";
private static final String VARIABLE_LDAP_USER_EMAIL = "ldap_user_email";
private static final String VARIABLE_LDAP_USER_SEARCH_FILTER = "ldap_user_search_filter";
private static final String VARIABLE_LDAP_GROUP_SEARCH_FILTER = "ldap_group_search_filter";
private static final String VARIABLE_LDAP_KRB_USER_SEARCH_FILTER = "ldap_krb_search_filter";
private static final String VARIABLE_LDAP_ATTR_BINARY = "ldap_attr_binary";
private static final String VARIABLE_LDAP_GROUP_TARGET = "ldap_group_target";
private static final String VARIABLE_LDAP_DYNAMIC_GROUP_TARGET = "ldap_dyn_group_target";
private static final String VARIABLE_LDAP_USERDN = "ldap_user_dn";
private static final String VARIABLE_LDAP_GROUPDN = "ldap_group_dn";
private static final String VARIABLE_LDAP_ACCOUNT_STATUS = "ldap_account_status";
private static final String VARIABLE_LDAP_GROUPS_SEARCH_FILTER = "ldap_groups_search_filter";
private static final String VARIABLE_LDAP_GROUP_MEMBERS_SEARCH_FILTER = "ldap_group_members_filter";
private static final String VARIABLE_LDAP_GROUPS_TARGET = "ldap_groups_target";
private static final String VARIABLE_OAUTH_ENABLED = "oauth_enabled";
private static final String VARIABLE_OAUTH_REDIRECT_URI = "oauth_redirect_uri";
private static final String VARIABLE_OAUTH_LOGOUT_REDIRECT_URI = "oauth_logout_redirect_uri";
private static final String VARIABLE_OAUTH_ACCOUNT_STATUS = "oauth_account_status";
private static final String VARIABLE_OAUTH_GROUP_MAPPING = "oauth_group_mapping";
private static final String VARIABLE_REMOTE_AUTH_NEED_CONSENT = "remote_auth_need_consent";
private static final String VARIABLE_DISABLE_PASSWORD_LOGIN = "disable_password_login";
private static final String VARIABLE_DISABLE_REGISTRATION = "disable_registration";
private static final String VARIABLE_DISABLE_REGISTRATION_UI = "disable_registration_ui";
private static final String VARIABLE_LDAP_GROUP_MAPPING_SYNC_INTERVAL = "ldap_group_mapping_sync_interval";
private static final String VARIABLE_VALIDATE_REMOTE_USER_EMAIL_VERIFIED = "validate_email_verified";
private static final String VARIABLE_MANAGED_CLOUD_REDIRECT_URI = "managed_cloud_redirect_uri";
private static final String VARIABLE_MANAGED_CLOUD_PROVIDER_NAME = "managed_cloud_provider_name";
private String KRB_AUTH = "false";
private String LDAP_AUTH = "false";
private boolean IS_KRB_ENABLED = false;
private boolean IS_LDAP_ENABLED = false;
private String LDAP_GROUP_MAPPING = "";
private String LDAP_USER_ID = "uid"; //login name
private String LDAP_USER_GIVEN_NAME = "givenName";
private String LDAP_USER_SURNAME = "sn";
private String LDAP_USER_EMAIL = "mail";
private String LDAP_USER_SEARCH_FILTER = "uid=%s";
private String LDAP_GROUP_SEARCH_FILTER = "member=%d";
private String LDAP_KRB_USER_SEARCH_FILTER = "krbPrincipalName=%s";
private String LDAP_ATTR_BINARY = "java.naming.ldap.attributes.binary";
private String LDAP_GROUP_TARGET = "cn";
private String LDAP_DYNAMIC_GROUP_TARGET = "memberOf";
private String LDAP_USER_DN_DEFAULT = "";
private String LDAP_GROUP_DN_DEFAULT = "";
private String LDAP_USER_DN = LDAP_USER_DN_DEFAULT;
private String LDAP_GROUP_DN = LDAP_GROUP_DN_DEFAULT;
private String LDAP_GROUPS_TARGET = "distinguishedName";
private String LDAP_GROUPS_SEARCH_FILTER = "(&(objectCategory=group)(cn=%c))";
private String LDAP_GROUP_MEMBERS_SEARCH_FILTER = "(&(objectCategory=user)(memberOf=%d))";
private int LDAP_ACCOUNT_STATUS = 1;
private String OAUTH_ENABLED = "false";
private boolean IS_OAUTH_ENABLED = false;
private String OAUTH_GROUP_MAPPING = "";
private String OAUTH_REDIRECT_URI_PATH = "hopsworks/callback";
private String OAUTH_LOGOUT_REDIRECT_URI_PATH = "hopsworks/";
private String OAUTH_REDIRECT_URI = OAUTH_REDIRECT_URI_PATH;
private String OAUTH_LOGOUT_REDIRECT_URI = OAUTH_LOGOUT_REDIRECT_URI_PATH;
private int OAUTH_ACCOUNT_STATUS = 1;
private long LDAP_GROUP_MAPPING_SYNC_INTERVAL = 0;
private boolean REMOTE_AUTH_NEED_CONSENT = true;
private boolean DISABLE_PASSWORD_LOGIN = false;
private boolean DISABLE_REGISTRATION = false;
private boolean VALIDATE_REMOTE_USER_EMAIL_VERIFIED = false;
private String MANAGED_CLOUD_REDIRECT_URI = "";
private String MANAGED_CLOUD_PROVIDER_NAME = "hopsworks.ai";
private void populateLDAPCache() {
KRB_AUTH = setVar(VARIABLE_KRB_AUTH, KRB_AUTH);
LDAP_AUTH = setVar(VARIABLE_LDAP_AUTH, LDAP_AUTH);
LDAP_GROUP_MAPPING = setVar(VARIABLE_LDAP_GROUP_MAPPING, LDAP_GROUP_MAPPING);
LDAP_USER_ID = setVar(VARIABLE_LDAP_USER_ID, LDAP_USER_ID);
LDAP_USER_GIVEN_NAME = setVar(VARIABLE_LDAP_USER_GIVEN_NAME, LDAP_USER_GIVEN_NAME);
LDAP_USER_SURNAME = setVar(VARIABLE_LDAP_USER_SURNAME, LDAP_USER_SURNAME);
LDAP_USER_EMAIL = setVar(VARIABLE_LDAP_USER_EMAIL, LDAP_USER_EMAIL);
LDAP_ACCOUNT_STATUS = setIntVar(VARIABLE_LDAP_ACCOUNT_STATUS, LDAP_ACCOUNT_STATUS);
LDAP_USER_SEARCH_FILTER = setVar(VARIABLE_LDAP_USER_SEARCH_FILTER, LDAP_USER_SEARCH_FILTER);
LDAP_GROUP_SEARCH_FILTER = setVar(VARIABLE_LDAP_GROUP_SEARCH_FILTER, LDAP_GROUP_SEARCH_FILTER);
LDAP_KRB_USER_SEARCH_FILTER = setVar(VARIABLE_LDAP_KRB_USER_SEARCH_FILTER, LDAP_KRB_USER_SEARCH_FILTER);
LDAP_ATTR_BINARY = setVar(VARIABLE_LDAP_ATTR_BINARY, LDAP_ATTR_BINARY);
LDAP_GROUP_TARGET = setVar(VARIABLE_LDAP_GROUP_TARGET, LDAP_GROUP_TARGET);
LDAP_DYNAMIC_GROUP_TARGET = setVar(VARIABLE_LDAP_DYNAMIC_GROUP_TARGET, LDAP_DYNAMIC_GROUP_TARGET);
LDAP_USER_DN = setStrVar(VARIABLE_LDAP_USERDN, LDAP_USER_DN_DEFAULT);
LDAP_GROUP_DN = setStrVar(VARIABLE_LDAP_GROUPDN, LDAP_GROUP_DN_DEFAULT);
LDAP_GROUPS_TARGET = setVar(VARIABLE_LDAP_GROUPS_TARGET, LDAP_GROUPS_TARGET);
LDAP_GROUPS_SEARCH_FILTER = setStrVar(VARIABLE_LDAP_GROUPS_SEARCH_FILTER, LDAP_GROUPS_SEARCH_FILTER);
LDAP_GROUP_MEMBERS_SEARCH_FILTER =
setStrVar(VARIABLE_LDAP_GROUP_MEMBERS_SEARCH_FILTER, LDAP_GROUP_MEMBERS_SEARCH_FILTER);
IS_KRB_ENABLED = setBoolVar(VARIABLE_KRB_AUTH, IS_KRB_ENABLED);
IS_LDAP_ENABLED = setBoolVar(VARIABLE_LDAP_AUTH, IS_LDAP_ENABLED);
OAUTH_ENABLED = setStrVar(VARIABLE_OAUTH_ENABLED, OAUTH_ENABLED);
IS_OAUTH_ENABLED = setBoolVar(VARIABLE_OAUTH_ENABLED, IS_OAUTH_ENABLED);
OAUTH_REDIRECT_URI = setStrVar(VARIABLE_OAUTH_REDIRECT_URI, OAUTH_REDIRECT_URI);
OAUTH_LOGOUT_REDIRECT_URI = setStrVar(VARIABLE_OAUTH_LOGOUT_REDIRECT_URI, OAUTH_LOGOUT_REDIRECT_URI);
OAUTH_ACCOUNT_STATUS = setIntVar(VARIABLE_OAUTH_ACCOUNT_STATUS, OAUTH_ACCOUNT_STATUS);
OAUTH_GROUP_MAPPING = setStrVar(VARIABLE_OAUTH_GROUP_MAPPING, OAUTH_GROUP_MAPPING);
REMOTE_AUTH_NEED_CONSENT = setBoolVar(VARIABLE_REMOTE_AUTH_NEED_CONSENT, REMOTE_AUTH_NEED_CONSENT);
DISABLE_PASSWORD_LOGIN = setBoolVar(VARIABLE_DISABLE_PASSWORD_LOGIN, DISABLE_PASSWORD_LOGIN);
DISABLE_REGISTRATION = setBoolVar(VARIABLE_DISABLE_REGISTRATION, DISABLE_REGISTRATION);
DISABLE_REGISTRATION_UI = setBoolVar(VARIABLE_DISABLE_REGISTRATION_UI, DISABLE_REGISTRATION_UI);
LDAP_GROUP_MAPPING_SYNC_INTERVAL = setLongVar(VARIABLE_LDAP_GROUP_MAPPING_SYNC_INTERVAL,
LDAP_GROUP_MAPPING_SYNC_INTERVAL);
VALIDATE_REMOTE_USER_EMAIL_VERIFIED =
setBoolVar(VARIABLE_VALIDATE_REMOTE_USER_EMAIL_VERIFIED, VALIDATE_REMOTE_USER_EMAIL_VERIFIED);
MANAGED_CLOUD_REDIRECT_URI = setStrVar(VARIABLE_MANAGED_CLOUD_REDIRECT_URI, MANAGED_CLOUD_REDIRECT_URI);
MANAGED_CLOUD_PROVIDER_NAME = setStrVar(VARIABLE_MANAGED_CLOUD_PROVIDER_NAME, MANAGED_CLOUD_PROVIDER_NAME);
}
public synchronized String getKRBAuthStatus() {
checkCache();
return KRB_AUTH;
}
public synchronized String getLDAPAuthStatus() {
checkCache();
return LDAP_AUTH;
}
public synchronized boolean isKrbEnabled() {
checkCache();
return IS_KRB_ENABLED;
}
public synchronized boolean isLdapEnabled() {
checkCache();
return IS_LDAP_ENABLED;
}
public synchronized String getLdapGroupMapping() {
checkCache();
return LDAP_GROUP_MAPPING;
}
public synchronized String getLdapUserId() {
checkCache();
return LDAP_USER_ID;
}
public synchronized String getLdapUserGivenName() {
checkCache();
return LDAP_USER_GIVEN_NAME;
}
public synchronized String getLdapUserSurname() {
checkCache();
return LDAP_USER_SURNAME;
}
public synchronized String getLdapUserMail() {
checkCache();
return LDAP_USER_EMAIL;
}
public synchronized String getLdapUserSearchFilter() {
checkCache();
return LDAP_USER_SEARCH_FILTER;
}
public synchronized String getLdapGroupSearchFilter() {
checkCache();
return LDAP_GROUP_SEARCH_FILTER;
}
public synchronized String getKrbUserSearchFilter() {
checkCache();
return LDAP_KRB_USER_SEARCH_FILTER;
}
public synchronized String getLdapAttrBinary() {
checkCache();
return LDAP_ATTR_BINARY;
}
public synchronized String getLdapGroupTarget() {
checkCache();
return LDAP_GROUP_TARGET;
}
public synchronized String getLdapDynGroupTarget() {
checkCache();
return LDAP_DYNAMIC_GROUP_TARGET;
}
public synchronized String getLdapUserDN() {
checkCache();
return LDAP_USER_DN;
}
public synchronized String getLdapGroupDN() {
checkCache();
return LDAP_GROUP_DN;
}
public synchronized int getLdapAccountStatus() {
checkCache();
return LDAP_ACCOUNT_STATUS;
}
public synchronized String getLdapGroupsTarget() {
checkCache();
return LDAP_GROUPS_TARGET;
}
public synchronized String getLdapGroupsSearchFilter() {
checkCache();
return LDAP_GROUPS_SEARCH_FILTER;
}
public synchronized String getLdapGroupMembersFilter() {
checkCache();
return LDAP_GROUP_MEMBERS_SEARCH_FILTER;
}
public synchronized String getOAuthEnabled() {
checkCache();
return OAUTH_ENABLED;
}
public synchronized boolean isOAuthEnabled() {
checkCache();
return IS_OAUTH_ENABLED;
}
public synchronized String getOAuthGroupMapping() {
checkCache();
return OAUTH_GROUP_MAPPING;
}
public void updateOAuthGroupMapping(String mapping) {
updateVariableInternal(VARIABLE_OAUTH_GROUP_MAPPING, mapping, VariablesVisibility.ADMIN);
}
public synchronized String getOauthRedirectUri(String providerName) {
return getOauthRedirectUri(providerName, false);
}
/*
* when using oauth for hopsworks.ai we need to first redirect to hopsworks.ai
* which then redirect to hopsworks.
*/
public synchronized String getOauthRedirectUri(String providerName, boolean skipManagedCloud) {
checkCache();
if (MANAGED_CLOUD_REDIRECT_URI.isEmpty() || skipManagedCloud || !Objects.equals(MANAGED_CLOUD_PROVIDER_NAME,
providerName)) {
return OAUTH_REDIRECT_URI;
}
return MANAGED_CLOUD_REDIRECT_URI;
}
public synchronized String getManagedCloudRedirectUri() {
checkCache();
return MANAGED_CLOUD_REDIRECT_URI;
}
public synchronized String getManagedCloudProviderName() {
checkCache();
return MANAGED_CLOUD_PROVIDER_NAME;
}
public void updateOauthRedirectUri(String uri) {
updateVariableInternal(VARIABLE_OAUTH_REDIRECT_URI, uri + OAUTH_REDIRECT_URI_PATH,
VariablesVisibility.ADMIN);
}
public synchronized String getOauthLogoutRedirectUri() {
checkCache();
return OAUTH_LOGOUT_REDIRECT_URI;
}
public void addPathAndupdateOauthLogoutRedirectUri(String uri) {
updateOauthLogoutRedirectUri(uri + OAUTH_LOGOUT_REDIRECT_URI_PATH);
}
public void updateOauthLogoutRedirectUri(String uri) {
updateVariableInternal(VARIABLE_OAUTH_LOGOUT_REDIRECT_URI, uri,
VariablesVisibility.ADMIN);
}
public void updateManagedCloudRedirectUri(String uri) {
updateVariableInternal(VARIABLE_MANAGED_CLOUD_REDIRECT_URI, uri , VariablesVisibility.ADMIN);
}
public synchronized int getOAuthAccountStatus() {
checkCache();
return OAUTH_ACCOUNT_STATUS;
}
public void updateOAuthAccountStatus(Integer val) {
updateVariableInternal(VARIABLE_OAUTH_ACCOUNT_STATUS, val.toString(), VariablesVisibility.ADMIN);
}
public synchronized boolean shouldValidateEmailVerified() {
checkCache();
return VALIDATE_REMOTE_USER_EMAIL_VERIFIED;
}
public synchronized boolean remoteAuthNeedConsent() {
checkCache();
return REMOTE_AUTH_NEED_CONSENT;
}
public void updateRemoteAuthNeedConsent(boolean needConsent) {
updateVariableInternal(VARIABLE_REMOTE_AUTH_NEED_CONSENT, Boolean.toString(needConsent), VariablesVisibility.ADMIN);
}
public synchronized String getVarLdapAccountStatus() {
return VARIABLE_LDAP_ACCOUNT_STATUS;
}
public synchronized String getVarLdapGroupMapping() {
return VARIABLE_LDAP_GROUP_MAPPING;
}
public synchronized String getVarLdapUserId() {
return VARIABLE_LDAP_USER_ID;
}
public synchronized String getVarLdapUserGivenName() {
return VARIABLE_LDAP_USER_GIVEN_NAME;
}
public synchronized String getVarLdapUserSurname() {
return VARIABLE_LDAP_USER_SURNAME;
}
public synchronized String getVarLdapUserMail() {
return VARIABLE_LDAP_USER_EMAIL;
}
public synchronized String getVarLdapUserSearchFilter() {
return VARIABLE_LDAP_USER_SEARCH_FILTER;
}
public synchronized String getVarLdapGroupSearchFilter() {
return VARIABLE_LDAP_GROUP_SEARCH_FILTER;
}
public synchronized String getVarKrbUserSearchFilter() {
return VARIABLE_LDAP_KRB_USER_SEARCH_FILTER;
}
public synchronized String getVarLdapAttrBinary() {
return VARIABLE_LDAP_ATTR_BINARY;
}
public synchronized String getVarLdapGroupTarget() {
return VARIABLE_LDAP_GROUP_TARGET;
}
public synchronized String getVarLdapDynGroupTarget() {
return VARIABLE_LDAP_DYNAMIC_GROUP_TARGET;
}
public synchronized String getVarLdapUserDN() {
return VARIABLE_LDAP_USERDN;
}
public synchronized String getVarLdapGroupDN() {
return VARIABLE_LDAP_GROUPDN;
}
public synchronized boolean isPasswordLoginDisabled() {
checkCache();
return DISABLE_PASSWORD_LOGIN;
}
public synchronized boolean isRegistrationDisabled() {
checkCache();
return DISABLE_REGISTRATION;
}
public void updateRegistrationDisabled(boolean disable) {
updateVariableInternal(VARIABLE_DISABLE_REGISTRATION, Boolean.toString(disable), VariablesVisibility.ADMIN);
}
// Special flag to disable only registration UI but not the backend
// It is used in managed cloud when user management is MANAGED by hopsworks.ai
// Variable value is set during instance initialization by ec2-init
private boolean DISABLE_REGISTRATION_UI = false;
public synchronized boolean isRegistrationUIDisabled() {
checkCache();
return isRegistrationDisabled() || DISABLE_REGISTRATION_UI;
}
public synchronized long ldapGroupMappingSyncInterval() {
checkCache();
return LDAP_GROUP_MAPPING_SYNC_INTERVAL;
}
//----------------------------END remote user------------------------------------
// Service key rotation enabled
private static final String SERVICE_KEY_ROTATION_ENABLED_KEY = "service_key_rotation_enabled";
private boolean serviceKeyRotationEnabled = false;
public synchronized boolean isServiceKeyRotationEnabled() {
checkCache();
return serviceKeyRotationEnabled;
}
// Service key rotation interval
private static final String SERVICE_KEY_ROTATION_INTERVAL_KEY = "service_key_rotation_interval";
private String serviceKeyRotationInterval = "3d";
public synchronized String getServiceKeyRotationInterval() {
checkCache();
return serviceKeyRotationInterval;
}
// TensorBoard kill rotation interval in milliseconds (should be lower than the TensorBoardKillTimer)
private static final String TENSORBOARD_MAX_LAST_ACCESSED = "tensorboard_max_last_accessed";
private int tensorBoardMaxLastAccessed = 1140000;
public synchronized int getTensorBoardMaxLastAccessed() {
checkCache();
return tensorBoardMaxLastAccessed;
}
// TensorBoard kill rotation interval in milliseconds
private static final String SPARK_UI_LOGS_OFFSET = "spark_ui_logs_offset";
private int sparkUILogsOffset = 512000;
public synchronized int getSparkUILogsOffset() {
checkCache();
return sparkUILogsOffset;
}
public Long getConfTimeValue(String configurationTime) {
Matcher matcher = TIME_CONF_PATTERN.matcher(configurationTime.toLowerCase());
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid time in configuration: " + configurationTime);
}
return Long.parseLong(matcher.group(1));
}
public TimeUnit getConfTimeTimeUnit(String configurationTime) {
Matcher matcher = TIME_CONF_PATTERN.matcher(configurationTime.toLowerCase());
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid time in configuration: " + configurationTime);
}
String timeUnitStr = matcher.group(2);
if (null != timeUnitStr && !TIME_SUFFIXES.containsKey(timeUnitStr.toLowerCase())) {
throw new IllegalArgumentException("Invalid time suffix in configuration: " + configurationTime);
}
return timeUnitStr == null ? TimeUnit.MINUTES : TIME_SUFFIXES.get(timeUnitStr.toLowerCase());
}
private Set<String> toSetFromCsv(String csv, String separator) {
return new HashSet<>(Splitter.on(separator).trimResults().splitToList(csv));
}
// Libraries that should not be uninstallable
private Set<String> IMMUTABLE_PYTHON_LIBRARY_NAMES;
private static final String VARIABLE_IMMUTABLE_PYTHON_LIBRARY_NAMES = "preinstalled_python_lib_names";
private static final String DEFAULT_IMMUTABLE_PYTHON_LIBRARY_NAMES = "pydoop, pyspark, jupyterlab, sparkmagic, " +
"hdfscontents, pyjks, hops-apache-beam, pyopenssl";
public synchronized Set<String> getImmutablePythonLibraryNames() {
checkCache();
return IMMUTABLE_PYTHON_LIBRARY_NAMES;
}
private String HOPSWORKS_VERSION;
public synchronized String getHopsworksVersion() {
checkCache();
return HOPSWORKS_VERSION;
}
private String KUBE_KSERVE_TENSORFLOW_VERSION;
public synchronized String getKServeTensorflowVersion() {
checkCache();
return KUBE_KSERVE_TENSORFLOW_VERSION;
}
private String TENSORFLOW_VERSION;
public synchronized String getTensorflowVersion() {
checkCache();
return TENSORFLOW_VERSION;
}
private String OPENSEARCH_VERSION;
public synchronized String getOpenSearchVersion() {
checkCache();
return OPENSEARCH_VERSION;
}
private String KAFKA_VERSION;
public synchronized String getKafkaVersion() {
checkCache();
return KAFKA_VERSION;
}
private String DELA_VERSION;
public synchronized String getDelaVersion() {
checkCache();
return DELA_VERSION;
}
private String EPIPE_VERSION;
public synchronized String getEpipeVersion() {
checkCache();
return EPIPE_VERSION;
}
private String FLINK_VERSION;
public synchronized String getFlinkVersion() {
checkCache();
return FLINK_VERSION;
}
private String SPARK_VERSION;
public synchronized String getSparkVersion() {
checkCache();
return SPARK_VERSION;
}
private String TEZ_VERSION;
public synchronized String getTezVersion() {
checkCache();
return TEZ_VERSION;
}
private String HIVE2_VERSION;
public synchronized String getHive2Version() {
checkCache();
return HIVE2_VERSION;
}
private String LIVY_VERSION;
public synchronized String getLivyVersion() {
checkCache();
return LIVY_VERSION;
}
private String NDB_VERSION;
public synchronized String getNdbVersion() {
checkCache();
return NDB_VERSION;
}
private String FILEBEAT_VERSION;
public synchronized String getFilebeatVersion() {
checkCache();
return FILEBEAT_VERSION;
}
private String KIBANA_VERSION;
public synchronized String getKibanaVersion() {
checkCache();
return KIBANA_VERSION;
}
private String LOGSTASH_VERSION;
public synchronized String getLogstashVersion() {
checkCache();
return LOGSTASH_VERSION;
}
private String GRAFANA_VERSION;
public synchronized String getGrafanaVersion() {
checkCache();
return GRAFANA_VERSION;
}
private String ZOOKEEPER_VERSION;
public synchronized String getZookeeperVersion() {
checkCache();
return ZOOKEEPER_VERSION;
}
// -------------------------------- Kubernetes ----------------------------------------------//
private String KUBE_USER = "kubernetes";
public synchronized String getKubeUser() {
checkCache();
return KUBE_USER;
}
private String KUBE_HOPSWORKS_USER = "hopsworks";
public synchronized String getKubeHopsworksUser() {
checkCache();
return KUBE_HOPSWORKS_USER;
}
private String KUBEMASTER_URL = "https://192.168.68.102:6443";
public synchronized String getKubeMasterUrl() {
checkCache();
return KUBEMASTER_URL;
}
private String KUBE_CA_CERTFILE = "/srv/hops/certs-dir/certs/ca.cert.pem";
public synchronized String getKubeCaCertfile() {
checkCache();
return KUBE_CA_CERTFILE;
}
private String KUBE_CLIENT_KEYFILE = "/srv/hops/certs-dir/kube/hopsworks/hopsworks.key.pem";
public synchronized String getKubeClientKeyfile() {
checkCache();
return KUBE_CLIENT_KEYFILE;
}
private String KUBE_CLIENT_CERTFILE = "/srv/hops/certs-dir/kube/hopsworks/hopsworks.cert.pem";
public synchronized String getKubeClientCertfile() {
checkCache();
return KUBE_CLIENT_CERTFILE;
}
private String KUBE_CLIENT_KEYPASS = "adminpw";
public synchronized String getKubeClientKeypass() {
checkCache();
return KUBE_CLIENT_KEYPASS;
}
private String KUBE_TRUSTSTORE_PATH = "/srv/hops/certs-dir/kube/hopsworks/hopsworks__tstore.jks";
public synchronized String getKubeTruststorePath() {
checkCache();
return KUBE_TRUSTSTORE_PATH;
}
private String KUBE_TRUSTSTORE_KEY = "adminpw";
public synchronized String getKubeTruststoreKey() {
checkCache();
return KUBE_TRUSTSTORE_KEY;
}
private String KUBE_KEYSTORE_PATH = "/srv/hops/certs-dir/kube/hopsworks/hopsworks__kstore.jks";
public synchronized String getKubeKeystorePath() {
checkCache();
return KUBE_KEYSTORE_PATH;
}
private String KUBE_KEYSTORE_KEY = "adminpw";
public synchronized String getKubeKeystoreKey() {
checkCache();
return KUBE_KEYSTORE_KEY;
}
private String KUBE_PULL_POLICY = "Always";
public synchronized String getKubeImagePullPolicy() {
checkCache();
return KUBE_PULL_POLICY;
}
private Integer KUBE_API_MAX_ATTEMPTS = 12;
public synchronized Integer getKubeAPIMaxAttempts() {
checkCache();
return KUBE_API_MAX_ATTEMPTS;
}
private Boolean ONLINE_FEATURESTORE = false;
public synchronized Boolean isOnlineFeaturestore() {
checkCache();
return ONLINE_FEATURESTORE;
}
private String ONLINE_FEATURESTORE_TS = "";
public synchronized String getOnlineFeatureStoreTableSpace() {
checkCache();
return ONLINE_FEATURESTORE_TS;
}
private Integer ONLINEFS_THREAD_NUMBER = 10;
public synchronized Integer getOnlineFsThreadNumber() {
checkCache();
return ONLINEFS_THREAD_NUMBER;
}
private Integer KUBE_DOCKER_MAX_MEMORY_ALLOCATION = 8192;
public synchronized Integer getKubeDockerMaxMemoryAllocation() {
checkCache();
return KUBE_DOCKER_MAX_MEMORY_ALLOCATION;
}
private Double KUBE_DOCKER_MAX_CORES_ALLOCATION = 4.0;
public synchronized Double getKubeDockerMaxCoresAllocation() {
checkCache();
return KUBE_DOCKER_MAX_CORES_ALLOCATION;
}
private Integer KUBE_DOCKER_MAX_GPUS_ALLOCATION = 1;
public synchronized Integer getKubeDockerMaxGpusAllocation() {
checkCache();
return KUBE_DOCKER_MAX_GPUS_ALLOCATION;
}
private Boolean KUBE_INSTALLED = false;
public synchronized Boolean getKubeInstalled() {
checkCache();
return KUBE_INSTALLED;
}
private Boolean KUBE_KSERVE_INSTALLED = false;
public synchronized Boolean getKubeKServeInstalled() {
checkCache();
return KUBE_KSERVE_INSTALLED;
}
private String KUBE_SERVING_NODE_LABELS = "";
public synchronized String getKubeServingNodeLabels() {
checkCache();
return KUBE_SERVING_NODE_LABELS;
}
private String KUBE_SERVING_NODE_TOLERATIONS = "";
public synchronized String getKubeServingNodeTolerations() {
checkCache();
return KUBE_SERVING_NODE_TOLERATIONS;
}
private Integer KUBE_SERVING_MAX_MEMORY_ALLOCATION = -1; // no upper limit
public synchronized Integer getKubeServingMaxMemoryAllocation() {
checkCache();
return KUBE_SERVING_MAX_MEMORY_ALLOCATION;
}
private Double KUBE_SERVING_MAX_CORES_ALLOCATION = -1.0; // no upper limit
public synchronized Double getKubeServingMaxCoresAllocation() {
checkCache();
return KUBE_SERVING_MAX_CORES_ALLOCATION;
}
private Integer KUBE_SERVING_MAX_GPUS_ALLOCATION = -1; // no upper limit
public synchronized Integer getKubeServingMaxGpusAllocation() {
checkCache();
return KUBE_SERVING_MAX_GPUS_ALLOCATION;
}
// Maximum number of instances. Possible values >=-1 where -1 means no limit.
private Integer KUBE_SERVING_MAX_NUM_INSTANCES = -1;
public synchronized Integer getKubeServingMaxNumInstances() {
checkCache();
return KUBE_SERVING_MAX_NUM_INSTANCES;
}
// Minimum number of instances. Possible values: >=-1 where -1 means no limit and 0 enforces scale-to-zero
// capabilities when available
private Integer KUBE_SERVING_MIN_NUM_INSTANCES = -1;
public synchronized Integer getKubeServingMinNumInstances() {
checkCache();
return KUBE_SERVING_MIN_NUM_INSTANCES;
}
private String KUBE_KNATIVE_DOMAIN_NAME = "";
public synchronized String getKubeKnativeDomainName() {
checkCache();
return KUBE_KNATIVE_DOMAIN_NAME;
}
//comma seperated list of tainted nodes
private String KUBE_TAINTED_NODES = "";
public synchronized String getKubeTaintedNodes() {
checkCache();
return KUBE_TAINTED_NODES;
}
private String KUBE_TAINTED_NODES_MONITOR_INTERVAL = "30m";
public synchronized String getKubeTaintedMonitorInterval() {
checkCache();
return KUBE_TAINTED_NODES_MONITOR_INTERVAL;
}
private Boolean HOPSWORKS_ENTERPRISE = false;
public synchronized Boolean getHopsworksEnterprise() {
checkCache();
return HOPSWORKS_ENTERPRISE;
}
private String SERVING_MONITOR_INT = "30s";
public synchronized String getServingMonitorInt() {
checkCache();
return SERVING_MONITOR_INT;
}
private int SERVING_CONNECTION_POOL_SIZE = 40;
public synchronized int getServingConnectionPoolSize() {
checkCache();
return SERVING_CONNECTION_POOL_SIZE;
}
private int SERVING_MAX_ROUTE_CONNECTIONS = 10;
public synchronized int getServingMaxRouteConnections() {
checkCache();
return SERVING_MAX_ROUTE_CONNECTIONS;
}
private int TENSORBOARD_MAX_RELOAD_THREADS = 1;
public synchronized int getTensorBoardMaxReloadThreads() {
checkCache();
return TENSORBOARD_MAX_RELOAD_THREADS;
}
private String JUPYTER_HOST = "localhost";
public synchronized String getJupyterHost() {
checkCache();
return JUPYTER_HOST;
}
//These dependencies were collected by installing jupyterlab in a new environment
public static List<String> JUPYTER_DEPENDENCIES = Arrays.asList("urllib3", "chardet", "idna", "requests", "attrs",
"zipp", "importlib-metadata", "pyrsistent", "six", "jsonschema", "prometheus-client", "pycparser",
"cffi", "argon2-cffi", "pyzmq", "ipython-genutils", "decorator", "traitlets", "jupyter-core", "Send2Trash",
"tornado", "pygments", "pickleshare", "wcwidth", "prompt-toolkit", "backcall", "ptyprocess", "pexpect",
"parso", "jedi", "ipython", "python-dateutil", "jupyter-client", "ipykernel", "terminado", "MarkupSafe",
"jinja2", "mistune", "defusedxml", "jupyterlab-pygments", "pandocfilters", "entrypoints", "pyparsing",
"packaging", "webencodings", "bleach", "testpath", "nbformat", "nest-asyncio", "async-generator",
"nbclient", "nbconvert", "notebook", "json5", "jupyterlab-server", "jupyterlab", "sparkmagic");
private String JWT_SIGNATURE_ALGORITHM = "HS512";
private String JWT_SIGNING_KEY_NAME = "apiKey";
private String JWT_ISSUER = "hopsworks@logicalclocks.com";
private long JWT_LIFETIME_MS = 1800000l;
private int JWT_EXP_LEEWAY_SEC = 900;
private long SERVICE_JWT_LIFETIME_MS = 86400000l;
private int SERVICE_JWT_EXP_LEEWAY_SEC = 43200;
public synchronized String getJWTSignatureAlg() {
checkCache();
return JWT_SIGNATURE_ALGORITHM;
}
public synchronized long getJWTLifetimeMs() {
checkCache();
return JWT_LIFETIME_MS;
}
public synchronized int getJWTExpLeewaySec() {
checkCache();
return JWT_EXP_LEEWAY_SEC;
}
public synchronized long getJWTLifetimeMsPlusLeeway() {
checkCache();
return JWT_LIFETIME_MS + (JWT_EXP_LEEWAY_SEC * 1000L);
}
public synchronized long getServiceJWTLifetimeMS() {
checkCache();
return SERVICE_JWT_LIFETIME_MS;
}
public synchronized int getServiceJWTExpLeewaySec() {
checkCache();
return SERVICE_JWT_EXP_LEEWAY_SEC;
}
public synchronized String getJWTSigningKeyName() {
checkCache();
return JWT_SIGNING_KEY_NAME;
}
public synchronized String getJWTIssuer() {
checkCache();
return JWT_ISSUER;
}
private String SERVICE_MASTER_JWT = "";
public synchronized String getServiceMasterJWT() {
checkCache();
return SERVICE_MASTER_JWT;
}
public synchronized void setServiceMasterJWT(String JWT) {
updateVariableInternal(VARIABLE_SERVICE_MASTER_JWT, JWT, VariablesVisibility.ADMIN);
em.flush();
SERVICE_MASTER_JWT = JWT;
}
private final int NUM_OF_SERVICE_RENEW_TOKENS = 5;
private final static String SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE = "service_renew_token_%d";
private String[] RENEW_TOKENS = new String[0];
public synchronized String[] getServiceRenewJWTs() {
checkCache();
return RENEW_TOKENS;
}
public synchronized void setServiceRenewJWTs(String[] renewTokens) {
for (int i = 0; i < renewTokens.length; i++) {
String variableKey = String.format(SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE, i);
updateVariableInternal(variableKey, renewTokens[i], VariablesVisibility.ADMIN);
}
RENEW_TOKENS = renewTokens;
}
private int CONNECTION_KEEPALIVE_TIMEOUT = 30;
public synchronized int getConnectionKeepAliveTimeout() {
checkCache();
return CONNECTION_KEEPALIVE_TIMEOUT;
}
private int MAGGY_CLEANUP_INTERVAL = 24 * 60 * 1000;
public synchronized int getMaggyCleanupInterval() {
checkCache();
return MAGGY_CLEANUP_INTERVAL;
}
private String HIVE_CONF_PATH = "/srv/hops/apache-hive/conf/hive-site.xml";
public synchronized String getHiveConfPath() {
checkCache();
return HIVE_CONF_PATH;
}
private String FS_PY_JOB_UTIL_PATH = "hdfs:///user/spark/hsfs_util-2.1.0-SNAPSHOT.py";
public synchronized String getFSPyJobUtilPath() {
checkCache();
return FS_PY_JOB_UTIL_PATH;
}
private String FS_JAVA_JOB_UTIL_PATH = "hdfs:///user/spark/hsfs-utils-2.1.0-SNAPSHOT.jar";
public synchronized String getFSJavaJobUtilPath() {
checkCache();
return FS_JAVA_JOB_UTIL_PATH;
}
private long FEATURESTORE_DB_DEFAULT_QUOTA = -1;
public synchronized long getFeaturestoreDbDefaultQuota() {
checkCache();
return FEATURESTORE_DB_DEFAULT_QUOTA;
}
private String FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT = "ORC";
public synchronized String getFeaturestoreDbDefaultStorageFormat() {
checkCache();
return FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT;
}
private Boolean LOCALHOST = false;
public synchronized Boolean isLocalHost() {
checkCache();
return LOCALHOST;
}
private String CLOUD = "";
public synchronized String getCloudProvider() {
checkCache();
return CLOUD;
}
public Boolean isCloud() {
return !getCloudProvider().isEmpty();
}
public synchronized CLOUD_TYPES getCloudType() {
checkCache();
if (CLOUD.isEmpty()) {
return CLOUD_TYPES.NONE;
}
return CLOUD_TYPES.fromString(CLOUD);
}
public static enum CLOUD_TYPES {
NONE,
AWS,
GCP,
AZURE;
public static CLOUD_TYPES fromString(String type) {
return CLOUD_TYPES.valueOf(type.toUpperCase());
}
}
public Boolean isHopsUtilInsecure() {
return isCloud() || isLocalHost();
}
private String FEATURESTORE_JDBC_URL = "jdbc:mysql://onlinefs.mysql.service.consul:3306/";
public synchronized String getFeaturestoreJdbcUrl() {
checkCache();
return FEATURESTORE_JDBC_URL;
}
private Boolean REQUESTS_VERIFY = false;
/**
* Whether to verify HTTP requests in hops-util-py. Accepted values are "true", "false"
*
*/
public synchronized Boolean getRequestsVerify() {
checkCache();
return REQUESTS_VERIFY;
}
private Boolean KIBANA_HTTPS_ENABELED = false;
public synchronized Boolean isKibanaHTTPSEnabled() {
checkCache();
return KIBANA_HTTPS_ENABELED;
}
private Boolean KIBANA_MULTI_TENANCY_ENABELED = false;
public synchronized Boolean isKibanaMultiTenancyEnabled() {
checkCache();
return KIBANA_MULTI_TENANCY_ENABELED;
}
public static final int OPENSEARCH_KIBANA_NO_CONNECTIONS = 5;
//-------------------------------- PROVENANCE ----------------------------------------------//
private static final String VARIABLE_PROVENANCE_TYPE = "provenance_type"; //disabled/meta/min/full
private static final String VARIABLE_PROVENANCE_ARCHIVE_SIZE = "provenance_archive_size";
private static final String VARIABLE_PROVENANCE_GRAPH_MAX_SIZE = "provenance_graph_max_size";
private static final String VARIABLE_PROVENANCE_ARCHIVE_DELAY = "provenance_archive_delay";
private static final String VARIABLE_PROVENANCE_CLEANUP_SIZE = "provenance_cleanup_size";
private static final String VARIABLE_PROVENANCE_CLEANER_PERIOD = "provenance_cleaner_period";
public static final String PROV_FILE_INDEX_SUFFIX = "__file_prov";
private Provenance.Type PROVENANCE_TYPE = Provenance.Type.MIN;
private String PROVENANCE_TYPE_S = PROVENANCE_TYPE.name();
private Integer PROVENANCE_CLEANUP_SIZE = 5;
private Integer PROVENANCE_ARCHIVE_SIZE = 100;
private Integer PROVENANCE_GRAPH_MAX_SIZE = 10000;
private Long PROVENANCE_CLEANER_PERIOD = 3600L; //1h in s
private Long PROVENANCE_ARCHIVE_DELAY = 0l;
private Integer PROVENANCE_OPENSEARCH_ARCHIVAL_PAGE_SIZE = 50;
public static final Integer PROVENANCE_OPENSEARCH_PAGE_DEFAULT_SIZE = 1000;
public String getProvFileIndex(Long projectIId) {
return projectIId.toString() + Settings.PROV_FILE_INDEX_SUFFIX;
}
private void populateProvenanceCache() {
PROVENANCE_TYPE_S = setStrVar(VARIABLE_PROVENANCE_TYPE, PROVENANCE_TYPE_S);
try {
PROVENANCE_TYPE = ProvTypeDTO.provTypeFromString(PROVENANCE_TYPE_S);
} catch(ProvenanceException e) {
LOGGER.log(Level.WARNING, "unknown prov type:" + PROVENANCE_TYPE_S + ", using default");
PROVENANCE_TYPE = Provenance.Type.MIN;
PROVENANCE_TYPE_S = PROVENANCE_TYPE.name();
}
PROVENANCE_ARCHIVE_SIZE = setIntVar(VARIABLE_PROVENANCE_ARCHIVE_SIZE, PROVENANCE_ARCHIVE_SIZE);
PROVENANCE_GRAPH_MAX_SIZE = setIntVar(VARIABLE_PROVENANCE_GRAPH_MAX_SIZE, PROVENANCE_GRAPH_MAX_SIZE);
PROVENANCE_ARCHIVE_DELAY = setLongVar(VARIABLE_PROVENANCE_ARCHIVE_DELAY, PROVENANCE_ARCHIVE_DELAY);
PROVENANCE_CLEANUP_SIZE = setIntVar(VARIABLE_PROVENANCE_CLEANUP_SIZE, PROVENANCE_CLEANUP_SIZE);
PROVENANCE_CLEANER_PERIOD = setLongVar(VARIABLE_PROVENANCE_CLEANER_PERIOD, PROVENANCE_CLEANER_PERIOD);
}
public synchronized Provenance.Type getProvType() {
checkCache();
return PROVENANCE_TYPE;
}
public synchronized Integer getProvArchiveSize() {
checkCache();
return PROVENANCE_ARCHIVE_SIZE;
}
public synchronized void setProvArchiveSize(Integer size) {
if(!PROVENANCE_ARCHIVE_SIZE.equals(size)) {
em.merge(new Variables(VARIABLE_PROVENANCE_ARCHIVE_SIZE, size.toString()));
PROVENANCE_ARCHIVE_SIZE = size;
}
}
public synchronized Integer getProvenanceGraphMaxSize() {
checkCache();
return PROVENANCE_GRAPH_MAX_SIZE;
}
public synchronized void setProvenanceGraphMaxSize(Integer size) {
if(!PROVENANCE_GRAPH_MAX_SIZE.equals(size)) {
em.merge(new Variables(VARIABLE_PROVENANCE_GRAPH_MAX_SIZE, size.toString()));
PROVENANCE_GRAPH_MAX_SIZE = size;
}
}
public synchronized Long getProvArchiveDelay() {
checkCache();
return PROVENANCE_ARCHIVE_DELAY;
}
public synchronized void setProvArchiveDelay(Long delay) {
if(!PROVENANCE_ARCHIVE_DELAY.equals(delay)) {
em.merge(new Variables(VARIABLE_PROVENANCE_ARCHIVE_DELAY, delay.toString()));
PROVENANCE_ARCHIVE_DELAY = delay;
}
}
public synchronized Integer getProvCleanupSize() {
checkCache();
return PROVENANCE_CLEANUP_SIZE;
}
public synchronized Integer getProvOpenSearchArchivalPageSize() {
checkCache();
return PROVENANCE_OPENSEARCH_ARCHIVAL_PAGE_SIZE;
}
public synchronized Long getProvCleanerPeriod() {
checkCache();
return PROVENANCE_CLEANER_PERIOD;
}
public synchronized void setProvCleanerPeriod(Long period) {
if(!PROVENANCE_CLEANER_PERIOD.equals(period)) {
em.merge(new Variables(VARIABLE_PROVENANCE_CLEANER_PERIOD, period.toString()));
PROVENANCE_CLEANER_PERIOD = period;
}
}
//------------------------------ END PROVENANCE --------------------------------------------//
private String CLIENT_PATH = "/srv/hops/client.tar.gz";
public synchronized String getClientPath() {
checkCache();
return CLIENT_PATH;
}
// CLOUD
private String CLOUD_EVENTS_ENDPOINT = "";
public synchronized String getCloudEventsEndPoint() {
checkCache();
return CLOUD_EVENTS_ENDPOINT;
}
private String CLOUD_EVENTS_ENDPOINT_API_KEY = "";
public synchronized String getCloudEventsEndPointAPIKey() {
checkCache();
return CLOUD_EVENTS_ENDPOINT_API_KEY;
}
private int FG_PREVIEW_LIMIT = 100;
public synchronized int getFGPreviewLimit() {
checkCache();
return FG_PREVIEW_LIMIT;
}
public static final String FEATURESTORE_INDEX = "featurestore";
public static final String FEATURESTORE_PROJECT_ID_FIELD = "project_id";
//-----------------------------YARN DOCKER-------------------------------------------------//
private static String YARN_RUNTIME = "docker";
public synchronized String getYarnRuntime(){
checkCache();
return YARN_RUNTIME;
}
//----------------------------YARN NODEMANAGER--------------------------------------------//
private boolean checkNodemanagersStatus = false;
public synchronized boolean isCheckingForNodemanagerStatusEnabled() {
checkCache();
return checkNodemanagersStatus;
}
private static String DOCKER_MOUNTS =
"/srv/hops/hadoop/etc/hadoop,/srv/hops/spark,/srv/hops/flink";
public synchronized String getDockerMounts() {
checkCache();
String result = "";
for(String mountPoint: DOCKER_MOUNTS.split(",")){
result += mountPoint + ":" + mountPoint + ":ro,";
}
return result.substring(0, result.length() - 1);
}
private String DOCKER_BASE_IMAGE_PYTHON_NAME = "python38";
public synchronized String getBaseDockerImagePythonName() {
checkCache();
if(isManagedDockerRegistry()){
return DOCKER_BASE_NON_PYTHON_IMAGE + ":" + DOCKER_BASE_IMAGE_PYTHON_NAME +
"_" + HOPSWORKS_VERSION;
}else{
return DOCKER_BASE_IMAGE_PYTHON_NAME + ":" + HOPSWORKS_VERSION;
}
}
private String DOCKER_BASE_IMAGE_PYTHON_VERSION = "3.7";
public synchronized String getDockerBaseImagePythonVersion() {
checkCache();
return DOCKER_BASE_IMAGE_PYTHON_VERSION;
}
private final static String DOCKER_BASE_NON_PYTHON_IMAGE = "base";
public synchronized String getBaseNonPythonDockerImage() {
return DOCKER_BASE_NON_PYTHON_IMAGE + ":" + HOPSWORKS_VERSION;
}
private long YARN_APP_UID = 1235L;
public long getYarnAppUID() {
checkCache();
return YARN_APP_UID;
}
//-----------------------------END YARN DOCKER-------------------------------------------------//
private KubeType KUBE_TYPE = KubeType.Local;
public synchronized KubeType getKubeType() {
checkCache();
return KUBE_TYPE;
}
private String DOCKER_NAMESPACE = "";
public synchronized String getDockerNamespace(){
checkCache();
return DOCKER_NAMESPACE;
}
private Boolean MANAGED_DOCKER_REGISTRY = false;
public synchronized Boolean isManagedDockerRegistry(){
checkCache();
return MANAGED_DOCKER_REGISTRY && isCloud();
}
public synchronized String getBaseNonPythonDockerImageWithNoTag(){
checkCache();
return DOCKER_BASE_NON_PYTHON_IMAGE;
}
private String DOCKER_JOB_MOUNTS_LIST;
public synchronized List<String> getDockerMountsList(){
checkCache();
return Arrays.asList(DOCKER_JOB_MOUNTS_LIST.split(","));
}
private Boolean DOCKER_JOB_MOUNT_ALLOWED = true;
public synchronized Boolean isDockerJobMountAllowed(){
checkCache();
return DOCKER_JOB_MOUNT_ALLOWED;
}
private Boolean DOCKER_JOB_UID_STRICT = true;
public synchronized Boolean isDockerJobUidStrict(){
checkCache();
return DOCKER_JOB_UID_STRICT;
}
private int EXECUTIONS_PER_JOB_LIMIT = 10000;
public synchronized int getExecutionsPerJobLimit(){
checkCache();
return EXECUTIONS_PER_JOB_LIMIT;
}
private int EXECUTIONS_CLEANER_BATCH_SIZE = 1000;
public synchronized int getExecutionsCleanerBatchSize(){
checkCache();
return EXECUTIONS_CLEANER_BATCH_SIZE;
}
private int EXECUTIONS_CLEANER_INTERVAL_MS = 600000;
public synchronized int getExecutionsCleanerInterval(){
checkCache();
return EXECUTIONS_CLEANER_INTERVAL_MS;
}
private int MAX_ENV_YML_BYTE_SIZE = 20000;
public synchronized int getMaxEnvYmlByteSize() {
checkCache();
return MAX_ENV_YML_BYTE_SIZE;
}
private int LIVY_STARTUP_TIMEOUT = 240;
public synchronized int getLivyStartupTimeout() {
checkCache();
return LIVY_STARTUP_TIMEOUT;
}
private boolean USER_SEARCH_ENABLED = true;
public synchronized boolean isUserSearchEnabled() {
checkCache();
return USER_SEARCH_ENABLED;
}
/*
* When a user try to connect for the first time with OAuth or LDAP
* do not create the user if it does not bellong to any group.
* This is to avoid having users that belong to no group poluting the users table
*/
private boolean REJECT_REMOTE_USER_NO_GROUP = false;
public synchronized boolean getRejectRemoteNoGroup() {
checkCache();
return REJECT_REMOTE_USER_NO_GROUP;
}
public void updateRejectRemoteNoGroup(boolean reject) {
updateVariableInternal(VARIABLE_REJECT_REMOTE_USER_NO_GROUP, Boolean.toString(reject), VariablesVisibility.ADMIN);
}
private boolean SKIP_NAMESPACE_CREATION = false;
public synchronized boolean shouldSkipNamespaceCreation() {
checkCache();
return SKIP_NAMESPACE_CREATION;
}
private long QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = -1L;
public synchronized long getQuotasOnlineEnabledFeaturegroups() {
checkCache();
return QUOTAS_ONLINE_ENABLED_FEATUREGROUPS;
}
private long QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = -1L;
public synchronized long getQuotasOnlineDisabledFeaturegroups() {
checkCache();
return QUOTAS_ONLINE_DISABLED_FEATUREGROUPS;
}
private long QUOTAS_TRAINING_DATASETS = -1L;
public synchronized long getQuotasTrainingDatasets() {
checkCache();
return QUOTAS_TRAINING_DATASETS;
}
private long QUOTAS_RUNNING_MODEL_DEPLOYMENTS = -1L;
public synchronized long getQuotasRunningModelDeployments() {
checkCache();
return QUOTAS_RUNNING_MODEL_DEPLOYMENTS;
}
private long QUOTAS_TOTAL_MODEL_DEPLOYMENTS = -1L;
public synchronized long getQuotasTotalModelDeployments() {
checkCache();
return QUOTAS_TOTAL_MODEL_DEPLOYMENTS;
}
private long QUOTAS_MAX_PARALLEL_EXECUTIONS = -1L;
public synchronized long getQuotasMaxParallelExecutions() {
checkCache();
return QUOTAS_MAX_PARALLEL_EXECUTIONS;
}
}
|
hopsworks-common/src/main/java/io/hops/hopsworks/common/util/Settings.java
|
/*
* Changes to this file committed after and not including commit-id: ccc0d2c5f9a5ac661e60e6eaf138de7889928b8b
* are released under the following license:
*
* This file is part of Hopsworks
* Copyright (C) 2018, Logical Clocks AB. All rights reserved
*
* Hopsworks is free software: you can redistribute it and/or modify it under the terms of
* the GNU Affero General Public License as published by the Free Software Foundation,
* either version 3 of the License, or (at your option) any later version.
*
* Hopsworks is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see <https://www.gnu.org/licenses/>.
*
* Changes to this file committed before and including commit-id: ccc0d2c5f9a5ac661e60e6eaf138de7889928b8b
* are released under the following license:
*
* Copyright (C) 2013 - 2018, Logical Clocks AB and RISE SICS AB. All rights reserved
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.hops.hopsworks.common.util;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import io.hops.hopsworks.common.dao.user.UserFacade;
import io.hops.hopsworks.common.dataset.util.CompressionInfo;
import io.hops.hopsworks.common.dela.AddressJSON;
import io.hops.hopsworks.common.dela.DelaClientType;
import io.hops.hopsworks.common.hdfs.DistributedFileSystemOps;
import io.hops.hopsworks.common.provenance.core.Provenance;
import io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO;
import io.hops.hopsworks.exceptions.ProvenanceException;
import io.hops.hopsworks.persistence.entity.project.PaymentType;
import io.hops.hopsworks.persistence.entity.user.Users;
import io.hops.hopsworks.persistence.entity.util.Variables;
import io.hops.hopsworks.persistence.entity.util.VariablesVisibility;
import io.hops.hopsworks.restutils.RESTLogLevel;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import javax.ejb.ConcurrencyManagement;
import javax.ejb.ConcurrencyManagementType;
import javax.ejb.EJB;
import javax.ejb.Singleton;
import javax.ejb.Startup;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Singleton
@Startup
@ConcurrencyManagement(ConcurrencyManagementType.BEAN)
public class Settings implements Serializable {
private static final Logger LOGGER = Logger.getLogger(Settings.class.
getName());
@EJB
private UserFacade userFacade;
@EJB
private OSProcessExecutor osProcessExecutor;
@PersistenceContext(unitName = "kthfsPU")
private EntityManager em;
private static final Map<String, TimeUnit> TIME_SUFFIXES;
static {
TIME_SUFFIXES = new HashMap<>(5);
TIME_SUFFIXES.put("ms", TimeUnit.MILLISECONDS);
TIME_SUFFIXES.put("s", TimeUnit.SECONDS);
TIME_SUFFIXES.put("m", TimeUnit.MINUTES);
TIME_SUFFIXES.put("h", TimeUnit.HOURS);
TIME_SUFFIXES.put("d", TimeUnit.DAYS);
}
private static final Pattern TIME_CONF_PATTERN = Pattern.compile("([0-9]+)([a-z]+)?");
public static final String AGENT_EMAIL = "kagent@hops.io";
/**
* Global Variables taken from the DB
*/
private static final String VARIABLE_ADMIN_EMAIL = "admin_email";
private static final String VARIABLE_PYPI_REST_ENDPOINT = "pypi_rest_endpoint";
private static final String VARIABLE_PYPI_INDEXER_TIMER_INTERVAL = "pypi_indexer_timer_interval";
private static final String VARIABLE_PYPI_INDEXER_TIMER_ENABLED = "pypi_indexer_timer_enabled";
private static final String VARIABLE_PYPI_SIMPLE_ENDPOINT = "pypi_simple_endpoint";
private static final String VARIABLE_PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL =
"python_library_updates_monitor_interval";
private static final String VARIABLE_PYTHON_KERNEL = "python_kernel";
private static final String VARIABLE_HADOOP_VERSION = "hadoop_version";
private static final String VARIABLE_KIBANA_IP = "kibana_ip";
private static final String VARIABLE_LOCALHOST = "localhost";
private static final String VARIABLE_REQUESTS_VERIFY = "requests_verify";
private static final String VARIABLE_CLOUD= "cloud";
private static final String VARIABLE_OPENSEARCH_IP = "elastic_ip";
private static final String VARIABLE_OPENSEARCH_PORT = "elastic_port";
private static final String VARIABLE_OPENSEARCH_REST_PORT = "elastic_rest_port";
private static final String VARIABLE_OPENSEARCH_LOGS_INDEX_EXPIRATION = "elastic_logs_index_expiration";
private static final String VARIABLE_SPARK_USER = "spark_user";
private static final String VARIABLE_HDFS_SUPERUSER = "hdfs_user";
private static final String VARIABLE_HOPSWORKS_USER = "hopsworks_user";
private static final String VARIABLE_JUPYTER_GROUP = "jupyter_group";
private static final String VARIABLE_STAGING_DIR = "staging_dir";
private static final String VARIABLE_AIRFLOW_DIR = "airflow_dir";
private static final String VARIABLE_JUPYTER_DIR = "jupyter_dir";
private static final String VARIABLE_JUPYTER_WS_PING_INTERVAL = "jupyter_ws_ping_interval";
private static final String VARIABLE_SPARK_DIR = "spark_dir";
private static final String VARIABLE_FLINK_DIR = "flink_dir";
private static final String VARIABLE_FLINK_USER = "flink_user";
private static final String VARIABLE_HADOOP_DIR = "hadoop_dir";
private static final String VARIABLE_HOPSWORKS_DIR = "hopsworks_dir";
private static final String VARIABLE_SUDOERS_DIR = "sudoers_dir";
private static final String VARIABLE_YARN_DEFAULT_QUOTA = "yarn_default_quota";
private static final String VARIABLE_HDFS_DEFAULT_QUOTA = "hdfs_default_quota";
private static final String VARIABLE_PROJECT_PAYMENT_TYPE = "yarn_default_payment_type";
private static final String VARIABLE_HDFS_BASE_STORAGE_POLICY = "hdfs_base_storage_policy";
private static final String VARIABLE_HDFS_LOG_STORAGE_POLICY = "hdfs_log_storage_policy";
private static final String VARIABLE_MAX_NUM_PROJ_PER_USER
= "max_num_proj_per_user";
private static final String VARIABLE_RESERVED_PROJECT_NAMES = "reserved_project_names";
private static final String VARIABLE_HOPSWORKS_ENTERPRISE = "hopsworks_enterprise";
private static final String VARIABLE_SPARK_EXECUTOR_MIN_MEMORY = "spark_executor_min_memory";
// HIVE configuration variables
private static final String VARIABLE_HIVE_SUPERUSER = "hive_superuser";
private static final String VARIABLE_HIVE_WAREHOUSE = "hive_warehouse";
private static final String VARIABLE_HIVE_SCRATCHDIR = "hive_scratchdir";
private static final String VARIABLE_HIVE_SCRATCHDIR_DELAY = "hive_scratchdir_delay";
private static final String VARIABLE_HIVE_SCRATCHDIR_CLEANER_INTERVAL = "hive_scratchdir_cleaner_interval";
private static final String VARIABLE_HIVE_DEFAULT_QUOTA = "hive_default_quota";
private static final String VARIABLE_TWOFACTOR_AUTH = "twofactor_auth";
private static final String VARIABLE_TWOFACTOR_EXCLUD = "twofactor-excluded-groups";
private static final String VARIABLE_KAFKA_DIR = "kafka_dir";
private static final String VARIABLE_KAFKA_USER = "kafka_user";
private static final String VARIABLE_KAFKA_MAX_NUM_TOPICS = "kafka_max_num_topics";
private static final String VARIABLE_FILE_PREVIEW_IMAGE_SIZE
= "file_preview_image_size";
private static final String VARIABLE_FILE_PREVIEW_TXT_SIZE
= "file_preview_txt_size";
private static final String VARIABLE_HOPS_RPC_TLS = "hops_rpc_tls";
public static final String ERASURE_CODING_CONFIG = "erasure-coding-site.xml";
private static final String VARIABLE_KAFKA_NUM_PARTITIONS
= "kafka_num_partitions";
private static final String VARIABLE_KAFKA_NUM_REPLICAS = "kafka_num_replicas";
private static final String VARIABLE_HOPSWORKS_SSL_MASTER_PASSWORD = "hopsworks_master_password";
private static final String VARIABLE_ANACONDA_DIR = "anaconda_dir";
private static final String VARIABLE_ANACONDA_ENABLED = "anaconda_enabled";
private static final String VARIABLE_ANACONDA_DEFAULT_REPO = "conda_default_repo";
private static final String VARIABLE_DOWNLOAD_ALLOWED = "download_allowed";
private static final String VARIABLE_HOPSEXAMPLES_VERSION = "hopsexamples_version";
private static final String VARIABLE_KAGENT_USER = "kagent_user";
private static final String VARIABLE_KAGENT_LIVENESS_MONITOR_ENABLED = "kagent_liveness_monitor_enabled";
private static final String VARIABLE_KAGENT_LIVENESS_THRESHOLD = "kagent_liveness_threshold";
private static final String VARIABLE_RESOURCE_DIRS = "resources";
private static final String VARIABLE_CERTS_DIRS = "certs_dir";
private static final String VARIABLE_MAX_STATUS_POLL_RETRY = "max_status_poll_retry";
private static final String VARIABLE_CERT_MATER_DELAY = "cert_mater_delay";
private static final String VARIABLE_WHITELIST_USERS_LOGIN = "whitelist_users";
private static final String VARIABLE_VERIFICATION_PATH = "verification_endpoint";
private static final String VARIABLE_FIRST_TIME_LOGIN = "first_time_login";
private static final String VARIABLE_CERTIFICATE_USER_VALID_DAYS = "certificate_user_valid_days";
private static final String VARIABLE_SERVICE_DISCOVERY_DOMAIN = "service_discovery_domain";
private static final String VARIABLE_ZOOKEEPER_VERSION = "zookeeper_version";
private static final String VARIABLE_GRAFANA_VERSION = "grafana_version";
private static final String VARIABLE_LOGSTASH_VERSION = "logstash_version";
private static final String VARIABLE_KIBANA_VERSION = "kibana_version";
private static final String VARIABLE_FILEBEAT_VERSION = "filebeat_version";
private static final String VARIABLE_NDB_VERSION = "ndb_version";
private static final String VARIABLE_LIVY_VERSION = "livy_version";
private static final String VARIABLE_HIVE2_VERSION = "hive2_version";
private static final String VARIABLE_TEZ_VERSION = "tez_version";
private static final String VARIABLE_SPARK_VERSION = "spark_version";
private static final String VARIABLE_FLINK_VERSION = "flink_version";
private static final String VARIABLE_EPIPE_VERSION = "epipe_version";
private static final String VARIABLE_DELA_VERSION = "dela_version";
private static final String VARIABLE_KAFKA_VERSION = "kafka_version";
private static final String VARIABLE_OPENSEARCH_VERSION = "elastic_version";
private static final String VARIABLE_TENSORFLOW_VERSION = "tensorflow_version";
private static final String VARIABLE_KUBE_KSERVE_TENSORFLOW_VERSION = "kube_kserve_tensorflow_version";
private static final String VARIABLE_HOPSWORKS_VERSION = "hopsworks_version";
private final static String VARIABLE_LIVY_STARTUP_TIMEOUT = "livy_startup_timeout";
private final static String VARIABLE_USER_SEARCH = "enable_user_search";
private final static String VARIABLE_REJECT_REMOTE_USER_NO_GROUP = "reject_remote_user_no_group";
//Used by RESTException to include devMsg or not in response
private static final String VARIABLE_HOPSWORKS_REST_LOG_LEVEL = "hopsworks_rest_log_level";
/*
* -------------------- Serving ---------------
*/
private static final String VARIABLE_SERVING_MONITOR_INT = "serving_monitor_int";
private static final String VARIABLE_SERVING_CONNECTION_POOL_SIZE = "serving_connection_pool_size";
private static final String VARIABLE_SERVING_MAX_ROUTE_CONNECTIONS = "serving_max_route_connections";
/*
* -------------------- TensorBoard ---------------
*/
private static final String VARIABLE_TENSORBOARD_MAX_RELOAD_THREADS = "tensorboard_max_reload_threads";
/*
* -------------------- Kubernetes ---------------
*/
private static final String VARIABLE_KUBEMASTER_URL = "kube_master_url";
private static final String VARIABLE_KUBE_USER = "kube_user";
private static final String VARIABLE_KUBE_HOPSWORKS_USER = "kube_hopsworks_user";
private static final String VARIABLE_KUBE_CA_CERTFILE = "kube_ca_certfile";
private static final String VARIABLE_KUBE_CLIENT_KEYFILE = "kube_client_keyfile";
private static final String VARIABLE_KUBE_CLIENT_CERTFILE = "kube_client_certfile";
private static final String VARIABLE_KUBE_CLIENT_KEYPASS = "kube_client_keypass";
private static final String VARIABLE_KUBE_TRUSTSTORE_PATH = "kube_truststore_path";
private static final String VARIABLE_KUBE_TRUSTSTORE_KEY = "kube_truststore_key";
private static final String VARIABLE_KUBE_KEYSTORE_PATH = "kube_keystore_path";
private static final String VARIABLE_KUBE_KEYSTORE_KEY = "kube_keystore_key";
private static final String VARIABLE_KUBE_PULL_POLICY = "kube_img_pull_policy";
private static final String VARIABLE_KUBE_API_MAX_ATTEMPTS = "kube_api_max_attempts";
private static final String VARIABLE_KUBE_DOCKER_MAX_MEMORY_ALLOCATION = "kube_docker_max_memory_allocation";
private static final String VARIABLE_KUBE_DOCKER_MAX_GPUS_ALLOCATION = "kube_docker_max_gpus_allocation";
private static final String VARIABLE_KUBE_DOCKER_MAX_CORES_ALLOCATION = "kube_docker_max_cores_allocation";
private static final String VARIABLE_KUBE_INSTALLED = "kubernetes_installed";
private static final String VARIABLE_KUBE_KSERVE_INSTALLED = "kube_kserve_installed";
private static final String VARIABLE_KUBE_KNATIVE_DOMAIN_NAME = "kube_knative_domain_name";
private static final String VARIABLE_KUBE_SERVING_NODE_LABELS = "kube_serving_node_labels";
private static final String VARIABLE_KUBE_SERVING_NODE_TOLERATIONS = "kube_serving_node_tolerations";
private static final String VARIABLE_KUBE_SERVING_MAX_MEMORY_ALLOCATION = "kube_serving_max_memory_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_CORES_ALLOCATION = "kube_serving_max_cores_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_GPUS_ALLOCATION = "kube_serving_max_gpus_allocation";
private static final String VARIABLE_KUBE_SERVING_MAX_NUM_INSTANCES = "kube_serving_max_num_instances";
private static final String VARIABLE_KUBE_SERVING_MIN_NUM_INSTANCES = "kube_serving_min_num_instances";
private static final String VARIABLE_KUBE_TAINTED_NODES = "kube_tainted_nodes";
private static final String VARIABLE_KUBE_TAINTED_NODES_MONITOR_INTERVAL =
"kube_node_taints_monitor_interval";
/*
* -------------------- Jupyter ---------------
*/
private static final String VARIABLE_JUPYTER_HOST = "jupyter_host";
private static final String VARIABLE_JUPYTER_ORIGIN_SCHEME = "jupyter_origin_scheme";
// JWT Variables
private static final String VARIABLE_JWT_SIGNATURE_ALGORITHM = "jwt_signature_algorithm";
private static final String VARIABLE_JWT_LIFETIME_MS = "jwt_lifetime_ms";
private static final String VARIABLE_JWT_EXP_LEEWAY_SEC = "jwt_exp_leeway_sec";
private static final String VARIABLE_JWT_SIGNING_KEY_NAME = "jwt_signing_key_name";
private static final String VARIABLE_JWT_ISSUER_KEY = "jwt_issuer";
private static final String VARIABLE_SERVICE_MASTER_JWT = "service_master_jwt";
private static final String VARIABLE_SERVICE_JWT_LIFETIME_MS = "service_jwt_lifetime_ms";
private static final String VARIABLE_SERVICE_JWT_EXP_LEEWAY_SEC = "service_jwt_exp_leeway_sec";
private static final String VARIABLE_CONNECTION_KEEPALIVE_TIMEOUT = "keepalive_timeout";
/* -------------------- Featurestore --------------- */
private static final String VARIABLE_FEATURESTORE_DEFAULT_QUOTA = "featurestore_default_quota";
private static final String VARIABLE_FEATURESTORE_DEFAULT_STORAGE_FORMAT = "featurestore_default_storage_format";
private static final String VARIABLE_FEATURESTORE_JDBC_URL = "featurestore_jdbc_url";
private static final String VARIABLE_ONLINE_FEATURESTORE = "featurestore_online_enabled";
private static final String VARIABLE_FG_PREVIEW_LIMIT = "fg_preview_limit";
private static final String VARIABLE_ONLINE_FEATURESTORE_TS = "featurestore_online_tablespace";
private static final String VARIABLE_FS_JOB_ACTIVITY_TIME = "fs_job_activity_time";
private static final String VARIABLE_ONLINEFS_THREAD_NUMBER = "onlinefs_service_thread_number";
private static final String VARIABLE_HIVE_CONF_PATH = "hive_conf_path";
private static final String VARIABLE_FS_PY_JOB_UTIL_PATH = "fs_py_job_util";
private static final String VARIABLE_FS_JAVA_JOB_UTIL_PATH = "fs_java_job_util";
//OpenSearch Security
private static final String VARIABLE_OPENSEARCH_SECURITY_ENABLED = "elastic_opendistro_security_enabled";
private static final String VARIABLE_OPENSEARCH_HTTPS_ENABLED = "elastic_https_enabled";
private static final String VARIABLE_OPENSEARCH_ADMIN_USER = "elastic_admin_user";
private static final String VARIABLE_OPENSEARCH_SERVICE_LOG_USER = "kibana_service_log_viewer";
private static final String VARIABLE_OPENSEARCH_ADMIN_PASSWORD = "elastic_admin_password";
private static final String VARIABLE_KIBANA_HTTPS_ENABLED = "kibana_https_enabled";
private static final String VARIABLE_OPENSEARCH_JWT_ENABLED = "elastic_jwt_enabled";
private static final String VARIABLE_OPENSEARCH_JWT_URL_PARAMETER = "elastic_jwt_url_parameter";
private static final String VARIABLE_OPENSEARCH_JWT_EXP_MS = "elastic_jwt_exp_ms";
private static final String VARIABLE_KIBANA_MULTI_TENANCY_ENABLED = "kibana_multi_tenancy_enabled";
private static final String VARIABLE_CLIENT_PATH = "client_path";
//Cloud
private static final String VARIABLE_CLOUD_EVENTS_ENDPOINT=
"cloud_events_endpoint";
private static final String VARIABLE_CLOUD_EVENTS_ENDPOINT_API_KEY=
"cloud_events_endpoint_api_key";
/*-----------------------Yarn Docker------------------------*/
private final static String VARIABLE_YARN_RUNTIME = "yarn_runtime";
private final static String VARIABLE_DOCKER_MOUNTS = "docker_mounts";
private final static String VARIABLE_DOCKER_JOB_MOUNTS_LIST = "docker_job_mounts_list";
private final static String VARIABLE_DOCKER_JOB_MOUNT_ALLOWED = "docker_job_mounts_allowed";
private final static String VARIABLE_DOCKER_JOB_UID_STRICT = "docker_job_uid_strict";
private final static String VARIABLE_DOCKER_BASE_IMAGE_PYTHON_NAME = "docker_base_image_python_name";
private final static String VARIABLE_DOCKER_BASE_IMAGE_PYTHON_VERSION = "docker_base_image_python_version";
private final static String VARIABLE_YARN_APP_UID = "yarn_app_uid";
/*-----------------------Jobs - Executions-------------------*/
private final static String VARIABLE_EXECUTIONS_PER_JOB_LIMIT = "executions_per_job_limit";
private final static String VARIABLE_EXECUTIONS_CLEANER_BATCH_SIZE = "executions_cleaner_batch_size";
private final static String VARIABLE_EXECUTIONS_CLEANER_INTERVAL_MS = "executions_cleaner_interval_ms";
/*----------------------Yarn Nodemanager status------------*/
private static final String VARIABLE_CHECK_NODEMANAGERS_STATUS = "check_nodemanagers_status";
/*----------------------- Python ------------------------*/
private final static String VARIABLE_MAX_ENV_YML_BYTE_SIZE = "max_env_yml_byte_size";
//Git
private static final String VARIABLE_GIT_COMMAND_TIMEOUT_MINUTES_DEFAULT = "git_command_timeout_minutes";
/*
* ------------------ QUOTAS ------------------
*/
private static final String QUOTAS_PREFIX = "quotas";
private static final String QUOTAS_FEATUREGROUPS_PREFIX = String.format("%s_featuregroups", QUOTAS_PREFIX);
private static final String VARIABLE_QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = String.format("%s_online_enabled",
QUOTAS_FEATUREGROUPS_PREFIX);
private static final String VARIABLE_QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = String.format("%s_online_disabled",
QUOTAS_FEATUREGROUPS_PREFIX);
private static final String VARIABLE_QUOTAS_TRAINING_DATASETS = String.format("%s_training_datasets", QUOTAS_PREFIX);
private static final String QUOTAS_MODEL_DEPLOYMENTS_PREFIX = String.format("%s_model_deployments", QUOTAS_PREFIX);
private static final String VARIABLE_QUOTAS_RUNNING_MODEL_DEPLOYMENTS = String.format("%s_running",
QUOTAS_MODEL_DEPLOYMENTS_PREFIX);
private static final String VARIABLE_QUOTAS_TOTAL_MODEL_DEPLOYMENTS = String.format("%s_total",
QUOTAS_MODEL_DEPLOYMENTS_PREFIX);
private static final String VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS = String.format("%s_max_parallel_executions",
QUOTAS_PREFIX);
//Docker cgroups
private static final String VARIABLE_DOCKER_CGROUP_ENABLED = "docker_cgroup_enabled";
private static final String VARIABLE_DOCKER_CGROUP_HARD_LIMIT_MEMORY = "docker_cgroup_memory_limit_gb";
private static final String VARIABLE_DOCKER_CGROUP_SOFT_LIMIT_MEMORY = "docker_cgroup_soft_limit_memory_gb";
private static final String VARIABLE_DOCKER_CGROUP_CPU_QUOTA = "docker_cgroup_cpu_quota_percentage";
private static final String VARIABLE_DOCKER_CGROUP_CPU_PERIOD = "docker_cgroup_cpu_period";
private static final String VARIABLE_DOCKER_CGROUP_MONITOR_INTERVAL = "docker_cgroup_monitor_interval";
private static final String VARIABLE_PROMETHEUS_PORT = "prometheus_port";
private static final String VARIABLE_SKIP_NAMESPACE_CREATION =
"kube_skip_namespace_creation";
public enum KubeType{
Local("local"),
EKS("eks"),
AKS("aks");
private String name;
KubeType(String name){
this.name = name;
}
static KubeType fromString(String str){
if(str.equals(Local.name)){
return Local;
}else if(str.equals(EKS.name)){
return EKS;
}else if(str.equals(AKS.name)) {
return AKS;
}
return Local;
}
}
private static final String VARIABLE_KUBE_TYPE = "kube_type";
private static final String VARIABLE_DOCKER_NAMESPACE = "docker_namespace";
private static final String VARIABLE_MANAGED_DOCKER_REGISTRY =
"managed_docker_registry";
private String setVar(String varName, String defaultValue) {
return setStrVar(varName, defaultValue);
}
private String setStrVar(String varName, String defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return value;
}
}
return defaultValue;
}
private String setDirVar(String varName, String defaultValue) {
Optional<Variables> dirName = findById(varName);
if (dirName.isPresent()) {
String value = dirName.get().getValue();
if (!Strings.isNullOrEmpty(value) && new File(value).isDirectory()) {
return value;
}
}
return defaultValue;
}
private String setIpVar(String varName, String defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value) && Ip.validIp(value)) {
return value;
}
}
return defaultValue;
}
private Boolean setBoolVar(String varName, Boolean defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return Boolean.parseBoolean(value);
}
}
return defaultValue;
}
private Integer setIntVar(String varName, Integer defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Integer.parseInt(value);
}
} catch(NumberFormatException ex){
LOGGER.log(Level.WARNING,
"Error - not an integer! " + varName + " should be an integer. Value was " + value);
}
}
return defaultValue;
}
private Double setDoubleVar(String varName, Double defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Double.parseDouble(value);
}
} catch(NumberFormatException ex){
LOGGER.log(Level.WARNING, "Error - not a double! " + varName + " should be a double. Value was " + value);
}
}
return defaultValue;
}
private long setLongVar(String varName, Long defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
try {
if (!Strings.isNullOrEmpty(value)) {
return Long.parseLong(value);
}
} catch (NumberFormatException ex) {
LOGGER.log(Level.WARNING, "Error - not a long! " + varName + " should be an integer. Value was " + value);
}
}
return defaultValue;
}
private RESTLogLevel setLogLevelVar(String varName, RESTLogLevel defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return RESTLogLevel.valueOf(value);
}
}
return defaultValue;
}
private long setMillisecondVar(String varName, Long defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
long timeValue = getConfTimeValue(value);
TimeUnit timeUnit = getConfTimeTimeUnit(value);
return timeUnit.toMillis(timeValue);
}
}
return defaultValue;
}
private PaymentType setPaymentType(String varName, PaymentType defaultValue) {
Optional<Variables> variable = findById(varName);
if (variable.isPresent()) {
String value = variable.get().getValue();
if (!Strings.isNullOrEmpty(value)) {
return PaymentType.valueOf(value);
}
}
return defaultValue;
}
private Set<String> setStringHashSetLowerCase(String varName, String defaultValue, String separator) {
RESERVED_PROJECT_NAMES_STR = setStrVar(varName, defaultValue);
return setStringHashSetLowerCase(RESERVED_PROJECT_NAMES_STR, separator, true);
}
private Set<String> setStringHashSetLowerCase(String values, String separator, boolean toLowerCase) {
StringTokenizer tokenizer = new StringTokenizer(values, separator);
HashSet<String> tokens = new HashSet<>(tokenizer.countTokens());
while (tokenizer.hasMoreTokens()) {
tokens.add(toLowerCase? tokenizer.nextToken().trim().toLowerCase() : tokenizer.nextToken().trim());
}
return tokens;
}
private boolean cached = false;
private void populateCache() {
if (!cached) {
ADMIN_EMAIL = setVar(VARIABLE_ADMIN_EMAIL, ADMIN_EMAIL);
LOCALHOST = setBoolVar(VARIABLE_LOCALHOST, LOCALHOST);
CLOUD = setStrVar(VARIABLE_CLOUD, CLOUD);
REQUESTS_VERIFY = setBoolVar(VARIABLE_REQUESTS_VERIFY, REQUESTS_VERIFY);
PYTHON_KERNEL = setBoolVar(VARIABLE_PYTHON_KERNEL, PYTHON_KERNEL);
TWOFACTOR_AUTH = setVar(VARIABLE_TWOFACTOR_AUTH, TWOFACTOR_AUTH);
TWOFACTOR_EXCLUDE = setVar(VARIABLE_TWOFACTOR_EXCLUD, TWOFACTOR_EXCLUDE);
HOPSWORKS_USER = setVar(VARIABLE_HOPSWORKS_USER, HOPSWORKS_USER);
JUPYTER_GROUP = setVar(VARIABLE_JUPYTER_GROUP, JUPYTER_GROUP);
JUPYTER_ORIGIN_SCHEME = setVar(VARIABLE_JUPYTER_ORIGIN_SCHEME, JUPYTER_ORIGIN_SCHEME);
HDFS_SUPERUSER = setVar(VARIABLE_HDFS_SUPERUSER, HDFS_SUPERUSER);
SPARK_USER = setVar(VARIABLE_SPARK_USER, SPARK_USER);
SPARK_DIR = setDirVar(VARIABLE_SPARK_DIR, SPARK_DIR);
FLINK_USER = setVar(VARIABLE_FLINK_USER, FLINK_USER);
FLINK_DIR = setDirVar(VARIABLE_FLINK_DIR, FLINK_DIR);
STAGING_DIR = setDirVar(VARIABLE_STAGING_DIR, STAGING_DIR);
HOPS_EXAMPLES_VERSION = setVar(VARIABLE_HOPSEXAMPLES_VERSION, HOPS_EXAMPLES_VERSION);
HIVE_SUPERUSER = setStrVar(VARIABLE_HIVE_SUPERUSER, HIVE_SUPERUSER);
HIVE_WAREHOUSE = setStrVar(VARIABLE_HIVE_WAREHOUSE, HIVE_WAREHOUSE);
HIVE_SCRATCHDIR = setStrVar(VARIABLE_HIVE_SCRATCHDIR, HIVE_SCRATCHDIR);
HIVE_SCRATCHDIR_DELAY = setStrVar(VARIABLE_HIVE_SCRATCHDIR_DELAY, HIVE_SCRATCHDIR_DELAY);
HIVE_SCRATCHDIR_CLEANER_INTERVAL = setStrVar(VARIABLE_HIVE_SCRATCHDIR_CLEANER_INTERVAL,
HIVE_SCRATCHDIR_CLEANER_INTERVAL);
HIVE_DB_DEFAULT_QUOTA = setLongVar(VARIABLE_HIVE_DEFAULT_QUOTA, HIVE_DB_DEFAULT_QUOTA);
HADOOP_VERSION = setVar(VARIABLE_HADOOP_VERSION, HADOOP_VERSION);
JUPYTER_DIR = setDirVar(VARIABLE_JUPYTER_DIR, JUPYTER_DIR);
JUPYTER_WS_PING_INTERVAL_MS = setMillisecondVar(VARIABLE_JUPYTER_WS_PING_INTERVAL, JUPYTER_WS_PING_INTERVAL_MS);
HADOOP_DIR = setDirVar(VARIABLE_HADOOP_DIR, HADOOP_DIR);
HOPSWORKS_INSTALL_DIR = setDirVar(VARIABLE_HOPSWORKS_DIR, HOPSWORKS_INSTALL_DIR);
CERTS_DIR = setDirVar(VARIABLE_CERTS_DIRS, CERTS_DIR);
SUDOERS_DIR = setDirVar(VARIABLE_SUDOERS_DIR, SUDOERS_DIR);
CERTIFICATE_USER_VALID_DAYS = setStrVar(VARIABLE_CERTIFICATE_USER_VALID_DAYS, CERTIFICATE_USER_VALID_DAYS);
SERVICE_DISCOVERY_DOMAIN = setStrVar(VARIABLE_SERVICE_DISCOVERY_DOMAIN, SERVICE_DISCOVERY_DOMAIN);
AIRFLOW_DIR = setDirVar(VARIABLE_AIRFLOW_DIR, AIRFLOW_DIR);
String openSearchIps = setStrVar(VARIABLE_OPENSEARCH_IP,
OpenSearchSettings.OPENSEARCH_IP_DEFAULT);
int openSearchPort = setIntVar(VARIABLE_OPENSEARCH_PORT, OpenSearchSettings.OPENSEARCH_PORT_DEFAULT);
int openSearchRestPort = setIntVar(VARIABLE_OPENSEARCH_REST_PORT,
OpenSearchSettings.OPENSEARCH_REST_PORT_DEFAULT);
boolean openSearchSecurityEnabled =
setBoolVar(VARIABLE_OPENSEARCH_SECURITY_ENABLED,
OpenSearchSettings.OPENSEARCH_SECURTIY_ENABLED_DEFAULT);
boolean openSearchHttpsEnabled = setBoolVar(VARIABLE_OPENSEARCH_HTTPS_ENABLED
, OpenSearchSettings.OPENSEARCH_HTTPS_ENABLED_DEFAULT);
String openSearchAdminUser = setStrVar(VARIABLE_OPENSEARCH_ADMIN_USER,
OpenSearchSettings.OPENSEARCH_ADMIN_USER_DEFAULT);
String openSearchServiceLogUser = setStrVar(VARIABLE_OPENSEARCH_SERVICE_LOG_USER,
OpenSearchSettings.OPENSEARCH_SERVICE_LOG_ROLE);
String openSearchAdminPassword = setStrVar(VARIABLE_OPENSEARCH_ADMIN_PASSWORD,
OpenSearchSettings.OPENSEARCH_ADMIN_PASSWORD_DEFAULT);
boolean openSearchJWTEnabled = setBoolVar(VARIABLE_OPENSEARCH_JWT_ENABLED
, OpenSearchSettings.OPENSEARCH_JWT_ENABLED_DEFAULT);
String openSearchJWTUrlParameter = setStrVar(VARIABLE_OPENSEARCH_JWT_URL_PARAMETER,
OpenSearchSettings.OPENSEARCH_JWT_URL_PARAMETER_DEFAULT);
long openSearchJWTEXPMS = setLongVar(VARIABLE_OPENSEARCH_JWT_EXP_MS,
OpenSearchSettings.OPENSEARCH_JWT_EXP_MS_DEFAULT);
OPENSEARCH_SETTINGS = new OpenSearchSettings(openSearchIps, openSearchPort,
openSearchRestPort, openSearchSecurityEnabled, openSearchHttpsEnabled,
openSearchAdminUser, openSearchAdminPassword, openSearchJWTEnabled,
openSearchJWTUrlParameter, openSearchJWTEXPMS, openSearchServiceLogUser);
OpenSearch_LOGS_INDEX_EXPIRATION = setLongVar(VARIABLE_OPENSEARCH_LOGS_INDEX_EXPIRATION,
OpenSearch_LOGS_INDEX_EXPIRATION);
KIBANA_IP = setIpVar(VARIABLE_KIBANA_IP, KIBANA_IP);
KAFKA_MAX_NUM_TOPICS = setIntVar(VARIABLE_KAFKA_MAX_NUM_TOPICS, KAFKA_MAX_NUM_TOPICS);
HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD = setVar(VARIABLE_HOPSWORKS_SSL_MASTER_PASSWORD,
HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD);
KAFKA_USER = setVar(VARIABLE_KAFKA_USER, KAFKA_USER);
KAFKA_DIR = setDirVar(VARIABLE_KAFKA_DIR, KAFKA_DIR);
KAFKA_DEFAULT_NUM_PARTITIONS = setIntVar(VARIABLE_KAFKA_NUM_PARTITIONS, KAFKA_DEFAULT_NUM_PARTITIONS);
KAFKA_DEFAULT_NUM_REPLICAS = setIntVar(VARIABLE_KAFKA_NUM_REPLICAS, KAFKA_DEFAULT_NUM_REPLICAS);
YARN_DEFAULT_QUOTA = setIntVar(VARIABLE_YARN_DEFAULT_QUOTA, YARN_DEFAULT_QUOTA);
DEFAULT_PAYMENT_TYPE = setPaymentType(VARIABLE_PROJECT_PAYMENT_TYPE, DEFAULT_PAYMENT_TYPE);
HDFS_DEFAULT_QUOTA_MBs = setLongVar(VARIABLE_HDFS_DEFAULT_QUOTA, HDFS_DEFAULT_QUOTA_MBs);
HDFS_BASE_STORAGE_POLICY = setHdfsStoragePolicy(VARIABLE_HDFS_BASE_STORAGE_POLICY, HDFS_BASE_STORAGE_POLICY);
HDFS_LOG_STORAGE_POLICY = setHdfsStoragePolicy(VARIABLE_HDFS_LOG_STORAGE_POLICY, HDFS_LOG_STORAGE_POLICY);
MAX_NUM_PROJ_PER_USER = setIntVar(VARIABLE_MAX_NUM_PROJ_PER_USER, MAX_NUM_PROJ_PER_USER);
CLUSTER_CERT = setVar(VARIABLE_CLUSTER_CERT, CLUSTER_CERT);
FILE_PREVIEW_IMAGE_SIZE = setIntVar(VARIABLE_FILE_PREVIEW_IMAGE_SIZE, 10000000);
FILE_PREVIEW_TXT_SIZE = setIntVar(VARIABLE_FILE_PREVIEW_TXT_SIZE, 100);
ANACONDA_DIR = setDirVar(VARIABLE_ANACONDA_DIR, ANACONDA_DIR);
ANACONDA_DEFAULT_REPO = setStrVar(VARIABLE_ANACONDA_DEFAULT_REPO, ANACONDA_DEFAULT_REPO);
ANACONDA_ENABLED = Boolean.parseBoolean(setStrVar(
VARIABLE_ANACONDA_ENABLED, ANACONDA_ENABLED.toString()));
KAGENT_USER = setStrVar(VARIABLE_KAGENT_USER, KAGENT_USER);
KAGENT_LIVENESS_MONITOR_ENABLED = setBoolVar(VARIABLE_KAGENT_LIVENESS_MONITOR_ENABLED,
KAGENT_LIVENESS_MONITOR_ENABLED);
KAGENT_LIVENESS_THRESHOLD = setStrVar(VARIABLE_KAGENT_LIVENESS_THRESHOLD, KAGENT_LIVENESS_THRESHOLD);
DOWNLOAD_ALLOWED = Boolean.parseBoolean(setStrVar(VARIABLE_DOWNLOAD_ALLOWED, DOWNLOAD_ALLOWED.toString()));
RESOURCE_DIRS = setStrVar(VARIABLE_RESOURCE_DIRS, RESOURCE_DIRS);
MAX_STATUS_POLL_RETRY = setIntVar(VARIABLE_MAX_STATUS_POLL_RETRY, MAX_STATUS_POLL_RETRY);
HOPS_RPC_TLS = setStrVar(VARIABLE_HOPS_RPC_TLS, HOPS_RPC_TLS);
CERTIFICATE_MATERIALIZER_DELAY = setStrVar(VARIABLE_CERT_MATER_DELAY,
CERTIFICATE_MATERIALIZER_DELAY);
WHITELIST_USERS_LOGIN = setStrVar(VARIABLE_WHITELIST_USERS_LOGIN,
WHITELIST_USERS_LOGIN);
FIRST_TIME_LOGIN = setStrVar(VARIABLE_FIRST_TIME_LOGIN, FIRST_TIME_LOGIN);
VERIFICATION_PATH = setStrVar(VARIABLE_VERIFICATION_PATH, VERIFICATION_PATH);
serviceKeyRotationEnabled = setBoolVar(SERVICE_KEY_ROTATION_ENABLED_KEY, serviceKeyRotationEnabled);
serviceKeyRotationInterval = setStrVar(SERVICE_KEY_ROTATION_INTERVAL_KEY, serviceKeyRotationInterval);
tensorBoardMaxLastAccessed = setIntVar(TENSORBOARD_MAX_LAST_ACCESSED, tensorBoardMaxLastAccessed);
sparkUILogsOffset = setIntVar(SPARK_UI_LOGS_OFFSET, sparkUILogsOffset);
jupyterShutdownTimerInterval = setStrVar(JUPYTER_SHUTDOWN_TIMER_INTERVAL, jupyterShutdownTimerInterval);
checkNodemanagersStatus = setBoolVar(VARIABLE_CHECK_NODEMANAGERS_STATUS, checkNodemanagersStatus);
populateDelaCache();
populateLDAPCache();
ZOOKEEPER_VERSION = setStrVar(VARIABLE_ZOOKEEPER_VERSION, ZOOKEEPER_VERSION);
GRAFANA_VERSION = setStrVar(VARIABLE_GRAFANA_VERSION, GRAFANA_VERSION);
LOGSTASH_VERSION = setStrVar(VARIABLE_LOGSTASH_VERSION, LOGSTASH_VERSION);
KIBANA_VERSION = setStrVar(VARIABLE_KIBANA_VERSION, KIBANA_VERSION);
FILEBEAT_VERSION = setStrVar(VARIABLE_FILEBEAT_VERSION, FILEBEAT_VERSION);
NDB_VERSION = setStrVar(VARIABLE_NDB_VERSION, NDB_VERSION);
LIVY_VERSION = setStrVar(VARIABLE_LIVY_VERSION, LIVY_VERSION);
HIVE2_VERSION = setStrVar(VARIABLE_HIVE2_VERSION, HIVE2_VERSION);
TEZ_VERSION = setStrVar(VARIABLE_TEZ_VERSION, TEZ_VERSION);
SPARK_VERSION = setStrVar(VARIABLE_SPARK_VERSION, SPARK_VERSION);
FLINK_VERSION = setStrVar(VARIABLE_FLINK_VERSION, FLINK_VERSION);
EPIPE_VERSION = setStrVar(VARIABLE_EPIPE_VERSION, EPIPE_VERSION);
DELA_VERSION = setStrVar(VARIABLE_DELA_VERSION, DELA_VERSION);
KAFKA_VERSION = setStrVar(VARIABLE_KAFKA_VERSION, KAFKA_VERSION);
OPENSEARCH_VERSION = setStrVar(VARIABLE_OPENSEARCH_VERSION, OPENSEARCH_VERSION);
TENSORFLOW_VERSION = setStrVar(VARIABLE_TENSORFLOW_VERSION, TENSORFLOW_VERSION);
KUBE_KSERVE_TENSORFLOW_VERSION = setStrVar(VARIABLE_KUBE_KSERVE_TENSORFLOW_VERSION,
KUBE_KSERVE_TENSORFLOW_VERSION);
HOPSWORKS_VERSION = setStrVar(VARIABLE_HOPSWORKS_VERSION, HOPSWORKS_VERSION);
HOPSWORKS_REST_LOG_LEVEL = setLogLevelVar(VARIABLE_HOPSWORKS_REST_LOG_LEVEL, HOPSWORKS_REST_LOG_LEVEL);
HOPSWORKS_PUBLIC_HOST = setStrVar(VARIABLE_HOPSWORKS_PUBLIC_HOST, HOPSWORKS_PUBLIC_HOST);
PYPI_REST_ENDPOINT = setStrVar(VARIABLE_PYPI_REST_ENDPOINT, PYPI_REST_ENDPOINT);
PYPI_SIMPLE_ENDPOINT = setStrVar(VARIABLE_PYPI_SIMPLE_ENDPOINT, PYPI_SIMPLE_ENDPOINT);
PYPI_INDEXER_TIMER_INTERVAL = setStrVar(VARIABLE_PYPI_INDEXER_TIMER_INTERVAL, PYPI_INDEXER_TIMER_INTERVAL);
PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL = setStrVar(VARIABLE_PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL,
PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL);
PYPI_INDEXER_TIMER_ENABLED = setBoolVar(VARIABLE_PYPI_INDEXER_TIMER_ENABLED, PYPI_INDEXER_TIMER_ENABLED);
IMMUTABLE_PYTHON_LIBRARY_NAMES = toSetFromCsv(
setStrVar(VARIABLE_IMMUTABLE_PYTHON_LIBRARY_NAMES, DEFAULT_IMMUTABLE_PYTHON_LIBRARY_NAMES),
",");
SERVING_MONITOR_INT = setStrVar(VARIABLE_SERVING_MONITOR_INT, SERVING_MONITOR_INT);
SERVING_CONNECTION_POOL_SIZE = setIntVar(VARIABLE_SERVING_CONNECTION_POOL_SIZE,
SERVING_CONNECTION_POOL_SIZE);
SERVING_MAX_ROUTE_CONNECTIONS = setIntVar(VARIABLE_SERVING_MAX_ROUTE_CONNECTIONS,
SERVING_MAX_ROUTE_CONNECTIONS);
TENSORBOARD_MAX_RELOAD_THREADS = setIntVar(VARIABLE_TENSORBOARD_MAX_RELOAD_THREADS,
TENSORBOARD_MAX_RELOAD_THREADS);
KUBE_USER = setStrVar(VARIABLE_KUBE_USER, KUBE_USER);
KUBE_HOPSWORKS_USER = setStrVar(VARIABLE_KUBE_HOPSWORKS_USER, KUBE_HOPSWORKS_USER);
KUBEMASTER_URL = setStrVar(VARIABLE_KUBEMASTER_URL, KUBEMASTER_URL);
KUBE_CA_CERTFILE = setStrVar(VARIABLE_KUBE_CA_CERTFILE, KUBE_CA_CERTFILE);
KUBE_CLIENT_KEYFILE = setStrVar(VARIABLE_KUBE_CLIENT_KEYFILE, KUBE_CLIENT_KEYFILE);
KUBE_CLIENT_CERTFILE = setStrVar(VARIABLE_KUBE_CLIENT_CERTFILE, KUBE_CLIENT_CERTFILE);
KUBE_CLIENT_KEYPASS = setStrVar(VARIABLE_KUBE_CLIENT_KEYPASS, KUBE_CLIENT_KEYPASS);
KUBE_TRUSTSTORE_PATH = setStrVar(VARIABLE_KUBE_TRUSTSTORE_PATH, KUBE_TRUSTSTORE_PATH);
KUBE_TRUSTSTORE_KEY = setStrVar(VARIABLE_KUBE_TRUSTSTORE_KEY, KUBE_TRUSTSTORE_KEY);
KUBE_KEYSTORE_PATH = setStrVar(VARIABLE_KUBE_KEYSTORE_PATH, KUBE_KEYSTORE_PATH);
KUBE_KEYSTORE_KEY = setStrVar(VARIABLE_KUBE_KEYSTORE_KEY, KUBE_KEYSTORE_KEY);
KUBE_PULL_POLICY = setStrVar(VARIABLE_KUBE_PULL_POLICY, KUBE_PULL_POLICY);
KUBE_API_MAX_ATTEMPTS = setIntVar(VARIABLE_KUBE_API_MAX_ATTEMPTS, KUBE_API_MAX_ATTEMPTS);
KUBE_DOCKER_MAX_MEMORY_ALLOCATION = setIntVar(VARIABLE_KUBE_DOCKER_MAX_MEMORY_ALLOCATION,
KUBE_DOCKER_MAX_MEMORY_ALLOCATION);
KUBE_DOCKER_MAX_CORES_ALLOCATION = setDoubleVar(VARIABLE_KUBE_DOCKER_MAX_CORES_ALLOCATION,
KUBE_DOCKER_MAX_CORES_ALLOCATION);
KUBE_DOCKER_MAX_GPUS_ALLOCATION = setIntVar(VARIABLE_KUBE_DOCKER_MAX_GPUS_ALLOCATION,
KUBE_DOCKER_MAX_GPUS_ALLOCATION);
KUBE_INSTALLED = setBoolVar(VARIABLE_KUBE_INSTALLED, KUBE_INSTALLED);
KUBE_KSERVE_INSTALLED = setBoolVar(VARIABLE_KUBE_KSERVE_INSTALLED, KUBE_KSERVE_INSTALLED);
KUBE_SERVING_NODE_LABELS = setStrVar(VARIABLE_KUBE_SERVING_NODE_LABELS, KUBE_SERVING_NODE_LABELS);
KUBE_SERVING_NODE_TOLERATIONS = setStrVar(VARIABLE_KUBE_SERVING_NODE_TOLERATIONS, KUBE_SERVING_NODE_TOLERATIONS);
KUBE_SERVING_MAX_MEMORY_ALLOCATION = setIntVar(VARIABLE_KUBE_SERVING_MAX_MEMORY_ALLOCATION,
KUBE_SERVING_MAX_MEMORY_ALLOCATION);
KUBE_SERVING_MAX_CORES_ALLOCATION = setDoubleVar(VARIABLE_KUBE_SERVING_MAX_CORES_ALLOCATION,
KUBE_SERVING_MAX_CORES_ALLOCATION);
KUBE_SERVING_MAX_GPUS_ALLOCATION = setIntVar(VARIABLE_KUBE_SERVING_MAX_GPUS_ALLOCATION,
KUBE_SERVING_MAX_GPUS_ALLOCATION);
KUBE_SERVING_MAX_NUM_INSTANCES = setIntVar(VARIABLE_KUBE_SERVING_MAX_NUM_INSTANCES,
KUBE_SERVING_MAX_NUM_INSTANCES);
KUBE_SERVING_MIN_NUM_INSTANCES = setIntVar(VARIABLE_KUBE_SERVING_MIN_NUM_INSTANCES,
KUBE_SERVING_MIN_NUM_INSTANCES);
KUBE_KNATIVE_DOMAIN_NAME = setStrVar(VARIABLE_KUBE_KNATIVE_DOMAIN_NAME, KUBE_KNATIVE_DOMAIN_NAME);
KUBE_TAINTED_NODES = setStrVar(VARIABLE_KUBE_TAINTED_NODES, KUBE_TAINTED_NODES);
KUBE_TAINTED_NODES_MONITOR_INTERVAL = setStrVar(VARIABLE_KUBE_TAINTED_NODES_MONITOR_INTERVAL,
KUBE_TAINTED_NODES_MONITOR_INTERVAL);
HOPSWORKS_ENTERPRISE = setBoolVar(VARIABLE_HOPSWORKS_ENTERPRISE, HOPSWORKS_ENTERPRISE);
JUPYTER_HOST = setStrVar(VARIABLE_JUPYTER_HOST, JUPYTER_HOST);
JWT_SIGNATURE_ALGORITHM = setStrVar(VARIABLE_JWT_SIGNATURE_ALGORITHM, JWT_SIGNATURE_ALGORITHM);
JWT_LIFETIME_MS = setLongVar(VARIABLE_JWT_LIFETIME_MS, JWT_LIFETIME_MS);
JWT_EXP_LEEWAY_SEC = setIntVar(VARIABLE_JWT_EXP_LEEWAY_SEC, JWT_EXP_LEEWAY_SEC);
JWT_SIGNING_KEY_NAME = setStrVar(VARIABLE_JWT_SIGNING_KEY_NAME, JWT_SIGNING_KEY_NAME);
JWT_ISSUER = setStrVar(VARIABLE_JWT_ISSUER_KEY, JWT_ISSUER);
SERVICE_JWT_LIFETIME_MS = setLongVar(VARIABLE_SERVICE_JWT_LIFETIME_MS, SERVICE_JWT_LIFETIME_MS);
SERVICE_JWT_EXP_LEEWAY_SEC = setIntVar(VARIABLE_SERVICE_JWT_EXP_LEEWAY_SEC, SERVICE_JWT_EXP_LEEWAY_SEC);
populateServiceJWTCache();
CONNECTION_KEEPALIVE_TIMEOUT = setIntVar(VARIABLE_CONNECTION_KEEPALIVE_TIMEOUT, CONNECTION_KEEPALIVE_TIMEOUT);
FEATURESTORE_DB_DEFAULT_QUOTA = setLongVar(VARIABLE_FEATURESTORE_DEFAULT_QUOTA, FEATURESTORE_DB_DEFAULT_QUOTA);
FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT =
setStrVar(VARIABLE_FEATURESTORE_DEFAULT_STORAGE_FORMAT, FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT);
FEATURESTORE_JDBC_URL = setStrVar(VARIABLE_FEATURESTORE_JDBC_URL, FEATURESTORE_JDBC_URL);
ONLINE_FEATURESTORE = setBoolVar(VARIABLE_ONLINE_FEATURESTORE, ONLINE_FEATURESTORE);
ONLINE_FEATURESTORE_TS = setStrVar(VARIABLE_ONLINE_FEATURESTORE_TS, ONLINE_FEATURESTORE_TS);
FS_JOB_ACTIVITY_TIME = setStrVar(VARIABLE_FS_JOB_ACTIVITY_TIME, FS_JOB_ACTIVITY_TIME);
ONLINEFS_THREAD_NUMBER = setIntVar(VARIABLE_ONLINEFS_THREAD_NUMBER, ONLINEFS_THREAD_NUMBER);
KIBANA_HTTPS_ENABELED = setBoolVar(VARIABLE_KIBANA_HTTPS_ENABLED,
KIBANA_HTTPS_ENABELED);
KIBANA_MULTI_TENANCY_ENABELED = setBoolVar(VARIABLE_KIBANA_MULTI_TENANCY_ENABLED,
KIBANA_MULTI_TENANCY_ENABELED);
RESERVED_PROJECT_NAMES =
setStringHashSetLowerCase(VARIABLE_RESERVED_PROJECT_NAMES, DEFAULT_RESERVED_PROJECT_NAMES, ",");
CLOUD_EVENTS_ENDPOINT = setStrVar(VARIABLE_CLOUD_EVENTS_ENDPOINT,
CLOUD_EVENTS_ENDPOINT);
CLOUD_EVENTS_ENDPOINT_API_KEY =
setStrVar(VARIABLE_CLOUD_EVENTS_ENDPOINT_API_KEY, CLOUD_EVENTS_ENDPOINT_API_KEY);
FG_PREVIEW_LIMIT = setIntVar(VARIABLE_FG_PREVIEW_LIMIT, FG_PREVIEW_LIMIT);
HIVE_CONF_PATH = setStrVar(VARIABLE_HIVE_CONF_PATH, HIVE_CONF_PATH);
FS_PY_JOB_UTIL_PATH = setStrVar(VARIABLE_FS_PY_JOB_UTIL_PATH, FS_PY_JOB_UTIL_PATH);
FS_JAVA_JOB_UTIL_PATH = setStrVar(VARIABLE_FS_JAVA_JOB_UTIL_PATH, FS_JAVA_JOB_UTIL_PATH);
YARN_RUNTIME = setStrVar(VARIABLE_YARN_RUNTIME, YARN_RUNTIME);
DOCKER_MOUNTS = setStrVar(VARIABLE_DOCKER_MOUNTS, DOCKER_MOUNTS);
DOCKER_JOB_MOUNTS_LIST = setStrVar(VARIABLE_DOCKER_JOB_MOUNTS_LIST, DOCKER_JOB_MOUNTS_LIST);
DOCKER_JOB_MOUNT_ALLOWED = setBoolVar(VARIABLE_DOCKER_JOB_MOUNT_ALLOWED, DOCKER_JOB_MOUNT_ALLOWED);
DOCKER_JOB_UID_STRICT = setBoolVar(VARIABLE_DOCKER_JOB_UID_STRICT, DOCKER_JOB_UID_STRICT);
DOCKER_BASE_IMAGE_PYTHON_NAME = setStrVar(VARIABLE_DOCKER_BASE_IMAGE_PYTHON_NAME, DOCKER_BASE_IMAGE_PYTHON_NAME);
DOCKER_BASE_IMAGE_PYTHON_VERSION = setStrVar(VARIABLE_DOCKER_BASE_IMAGE_PYTHON_VERSION,
DOCKER_BASE_IMAGE_PYTHON_VERSION);
// Job executions cleaner variables
EXECUTIONS_PER_JOB_LIMIT = setIntVar(VARIABLE_EXECUTIONS_PER_JOB_LIMIT, EXECUTIONS_PER_JOB_LIMIT);
EXECUTIONS_CLEANER_BATCH_SIZE = setIntVar(VARIABLE_EXECUTIONS_CLEANER_BATCH_SIZE, EXECUTIONS_CLEANER_BATCH_SIZE);
EXECUTIONS_CLEANER_INTERVAL_MS = setIntVar(VARIABLE_EXECUTIONS_CLEANER_INTERVAL_MS,
EXECUTIONS_CLEANER_INTERVAL_MS);
YARN_APP_UID = setLongVar(VARIABLE_YARN_APP_UID, YARN_APP_UID);
populateProvenanceCache();
CLIENT_PATH = setStrVar(VARIABLE_CLIENT_PATH, CLIENT_PATH);
KUBE_TYPE = KubeType.fromString(setStrVar(VARIABLE_KUBE_TYPE, KUBE_TYPE.name));
DOCKER_NAMESPACE = setStrVar(VARIABLE_DOCKER_NAMESPACE, DOCKER_NAMESPACE);
MANAGED_DOCKER_REGISTRY = setBoolVar(VARIABLE_MANAGED_DOCKER_REGISTRY,
MANAGED_DOCKER_REGISTRY);
MAX_ENV_YML_BYTE_SIZE = setIntVar(VARIABLE_MAX_ENV_YML_BYTE_SIZE, MAX_ENV_YML_BYTE_SIZE);
SPARK_EXECUTOR_MIN_MEMORY = setIntVar(VARIABLE_SPARK_EXECUTOR_MIN_MEMORY, SPARK_EXECUTOR_MIN_MEMORY);
LIVY_STARTUP_TIMEOUT = setIntVar(VARIABLE_LIVY_STARTUP_TIMEOUT, LIVY_STARTUP_TIMEOUT);
USER_SEARCH_ENABLED = setBoolVar(VARIABLE_USER_SEARCH, USER_SEARCH_ENABLED);
REJECT_REMOTE_USER_NO_GROUP = setBoolVar(VARIABLE_REJECT_REMOTE_USER_NO_GROUP, REJECT_REMOTE_USER_NO_GROUP);
//Git
GIT_MAX_COMMAND_TIMEOUT_MINUTES = setIntVar(VARIABLE_GIT_COMMAND_TIMEOUT_MINUTES_DEFAULT,
GIT_MAX_COMMAND_TIMEOUT_MINUTES);
DOCKER_CGROUP_ENABLED = setBoolVar(VARIABLE_DOCKER_CGROUP_ENABLED, DOCKER_CGROUP_ENABLED);
DOCKER_CGROUP_MEMORY_LIMIT = setStrVar(VARIABLE_DOCKER_CGROUP_HARD_LIMIT_MEMORY,
DOCKER_CGROUP_MEMORY_LIMIT);
DOCKER_CGROUP_MEMORY_SOFT_LIMIT = setStrVar(VARIABLE_DOCKER_CGROUP_SOFT_LIMIT_MEMORY,
DOCKER_CGROUP_MEMORY_SOFT_LIMIT);
DOCKER_CGROUP_CPU_QUOTA = setDoubleVar(VARIABLE_DOCKER_CGROUP_CPU_QUOTA, DOCKER_CGROUP_CPU_QUOTA);
DOCKER_CGROUP_CPU_PERIOD = setIntVar(VARIABLE_DOCKER_CGROUP_CPU_PERIOD, DOCKER_CGROUP_CPU_PERIOD);
DOCKER_CGROUP_MONITOR_INTERVAL = setStrVar(VARIABLE_DOCKER_CGROUP_MONITOR_INTERVAL,
DOCKER_CGROUP_MONITOR_INTERVAL);
PROMETHEUS_PORT = setIntVar(VARIABLE_PROMETHEUS_PORT, PROMETHEUS_PORT);
SKIP_NAMESPACE_CREATION = setBoolVar(VARIABLE_SKIP_NAMESPACE_CREATION,
SKIP_NAMESPACE_CREATION);
QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = setLongVar(VARIABLE_QUOTAS_ONLINE_ENABLED_FEATUREGROUPS,
QUOTAS_ONLINE_ENABLED_FEATUREGROUPS);
QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = setLongVar(VARIABLE_QUOTAS_ONLINE_DISABLED_FEATUREGROUPS,
QUOTAS_ONLINE_DISABLED_FEATUREGROUPS);
QUOTAS_TRAINING_DATASETS = setLongVar(VARIABLE_QUOTAS_TRAINING_DATASETS, QUOTAS_TRAINING_DATASETS);
QUOTAS_RUNNING_MODEL_DEPLOYMENTS = setLongVar(VARIABLE_QUOTAS_RUNNING_MODEL_DEPLOYMENTS,
QUOTAS_RUNNING_MODEL_DEPLOYMENTS);
QUOTAS_TOTAL_MODEL_DEPLOYMENTS = setLongVar(VARIABLE_QUOTAS_TOTAL_MODEL_DEPLOYMENTS,
QUOTAS_TOTAL_MODEL_DEPLOYMENTS);
QUOTAS_MAX_PARALLEL_EXECUTIONS = setLongVar(VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS,
QUOTAS_MAX_PARALLEL_EXECUTIONS);
QUOTAS_MAX_PARALLEL_EXECUTIONS = setLongVar(VARIABLE_QUOTAS_MAX_PARALLEL_EXECUTIONS,
QUOTAS_MAX_PARALLEL_EXECUTIONS);
cached = true;
}
}
private void checkCache() {
if (!cached) {
populateCache();
}
}
public synchronized void refreshCache() {
cached = false;
populateCache();
}
public synchronized void updateVariable(String variableName, String variableValue, VariablesVisibility visibility) {
updateVariableInternal(variableName, variableValue, visibility);
refreshCache();
}
public synchronized void updateVariables(List<Variables> variablesToUpdate) {
variablesToUpdate.forEach(v -> updateVariableInternal(v.getId(), v.getValue(), v.getVisibility()));
refreshCache();
}
/**
* This method will invalidate the cache of variables. The next call to read a variable after invalidateCache() will
* trigger a read of all variables from the database.
*/
public synchronized void invalidateCache() {
cached = false;
}
/**
* ******************************************************************
*/
private static final String GLASSFISH_DIR = "/srv/hops/glassfish";
public synchronized String getGlassfishDir() {
return GLASSFISH_DIR;
}
private String TWOFACTOR_AUTH = "false";
private String TWOFACTOR_EXCLUDE = "AGENT;CLUSTER_AGENT";
public synchronized String getTwoFactorAuth() {
checkCache();
return TWOFACTOR_AUTH;
}
public synchronized String getTwoFactorExclude() {
checkCache();
return TWOFACTOR_EXCLUDE;
}
public enum TwoFactorMode {
MANDATORY("mandatory", "User can not disable two factor auth."),
OPTIONAL("true", "Users can choose to disable two factor auth.");
private final String name;
private final String description;
TwoFactorMode(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
private String HOPS_RPC_TLS = "false";
public synchronized boolean getHopsRpcTls() {
checkCache();
return HOPS_RPC_TLS.toLowerCase().equals("true");
}
//Spark executor minimum memory
public synchronized int getSparkExecutorMinMemory() {
checkCache();
return SPARK_EXECUTOR_MIN_MEMORY;
}
/**
* Default Directory locations
*/
public static final String PRIVATE_DIRS = "/private_dirs/";
public static final String TENSORBOARD_DIRS = "/tensorboard/";
private String SPARK_DIR = "/srv/hops/spark";
public static final String SPARK_EXAMPLES_DIR = "/examples/jars";
public static final String CONVERSION_DIR = "/ipython_conversions/";
public static final String SPARK_NUMBER_EXECUTORS_ENV
= "spark.executor.instances";
public static final String SPARK_DYNAMIC_ALLOC_ENV
= "spark.dynamicAllocation.enabled";
public static final String SPARK_DYNAMIC_ALLOC_MIN_EXECS_ENV
= "spark.dynamicAllocation.minExecutors";
public static final String SPARK_DYNAMIC_ALLOC_MAX_EXECS_ENV
= "spark.dynamicAllocation.maxExecutors";
public static final String SPARK_DYNAMIC_ALLOC_INIT_EXECS_ENV
= "spark.dynamicAllocation.initialExecutors";
public static final String SPARK_SHUFFLE_SERVICE
= "spark.shuffle.service.enabled";
public static final String SPARK_SUBMIT_DEPLOYMODE = "spark.submit.deployMode";
public static final String SPARK_DRIVER_MEMORY_ENV = "spark.driver.memory";
public static final String SPARK_DRIVER_CORES_ENV = "spark.driver.cores";
public static final String SPARK_DRIVER_EXTRACLASSPATH = "spark.driver.extraClassPath";
public static final String SPARK_EXECUTOR_MEMORY_ENV = "spark.executor.memory";
public static final String SPARK_EXECUTOR_CORES_ENV = "spark.executor.cores";
public static final String SPARK_EXECUTOR_GPU_AMOUNT = "spark.executor.resource.gpu.amount";
public static final String SPARK_TASK_RESOURCE_GPU_AMOUNT = "spark.task.resource.gpu.amount";
public static final String SPARK_EXECUTOR_RESOURCE_GPU_DISCOVERY_SCRIPT =
"spark.executor.resource.gpu.discoveryScript";
public static final String SPARK_EXECUTOR_EXTRACLASSPATH = "spark.executor.extraClassPath";
public static final String SPARK_DRIVER_STAGINGDIR_ENV = "spark.yarn.stagingDir";
public static final String SPARK_JAVA_LIBRARY_PROP = "java.library.path";
public static final String SPARK_EXECUTOR_EXTRA_JAVA_OPTS = "spark.executor.extraJavaOptions";
public static final String SPARK_DRIVER_EXTRA_JAVA_OPTIONS="spark.driver.extraJavaOptions";
public static final String SPARK_YARN_DIST_PYFILES = "spark.yarn.dist.pyFiles";
public static final String SPARK_YARN_DIST_FILES = "spark.yarn.dist.files";
public static final String SPARK_YARN_DIST_ARCHIVES = "spark.yarn.dist.archives";
public static final String SPARK_YARN_JARS = "spark.yarn.jars";
//Blacklisting properties
public static final String SPARK_BLACKLIST_ENABLED = "spark.blacklist.enabled";
public static final String SPARK_BLACKLIST_MAX_TASK_ATTEMPTS_PER_EXECUTOR =
"spark.blacklist.task.maxTaskAttemptsPerExecutor";
public static final String SPARK_BLACKLIST_MAX_TASK_ATTEMPTS_PER_NODE =
"spark.blacklist.task.maxTaskAttemptsPerNode";
public static final String SPARK_BLACKLIST_STAGE_MAX_FAILED_TASKS_PER_EXECUTOR =
"spark.blacklist.stage.maxFailedTasksPerExecutor";
public static final String SPARK_BLACKLIST_STAGE_MAX_FAILED_TASKS_PER_NODE =
"spark.blacklist.stage.maxFailedExecutorsPerNode";
public static final String SPARK_BLACKLIST_APPLICATION_MAX_FAILED_TASKS_PER_EXECUTOR =
"spark.blacklist.application.maxFailedTasksPerExecutor";
public static final String SPARK_BLACKLIST_APPLICATION_MAX_FAILED_TASKS_PER_NODE =
"spark.blacklist.application.maxFailedExecutorsPerNode";
public static final String SPARK_BLACKLIST_KILL_BLACKLISTED_EXECUTORS =
"spark.blacklist.killBlacklistedExecutors";
public static final String SPARK_TASK_MAX_FAILURES = "spark.task.maxFailures";
//PySpark properties
public static final String SPARK_APP_NAME_ENV = "spark.app.name";
public static final String SPARK_YARN_IS_PYTHON_ENV = "spark.yarn.isPython";
public static final String SPARK_PYSPARK_PYTHON = "PYSPARK_PYTHON";
public static final String SPARK_PYSPARK_PYTHON_OPTION = "spark.pyspark.python";
//Spark log4j and metrics properties
public static final String JOB_LOG4J_CONFIG = "log4j.configuration";
public static final String JOB_LOG4J_PROPERTIES = "log4j.properties";
//If the value of this property changes, it must be changed in spark-chef log4j.properties as well
public static final String LOGSTASH_JOB_INFO = "hopsworks.logstash.job.info";
public static final String SPARK_CACHE_FILENAMES
= "spark.yarn.cache.filenames";
public static final String SPARK_CACHE_SIZES = "spark.yarn.cache.sizes";
public static final String SPARK_CACHE_TIMESTAMPS
= "spark.yarn.cache.timestamps";
public static final String SPARK_CACHE_VISIBILITIES
= "spark.yarn.cache.visibilities";
public static final String SPARK_CACHE_TYPES = "spark.yarn.cache.types";
//PYSPARK constants
public static final String SPARK_PY_MAINCLASS
= "org.apache.spark.deploy.PythonRunner";
public static final long PYTHON_JOB_KUBE_WAITING_TIMEOUT_MS = 60000;
public static final String SPARK_YARN_APPMASTER_ENV = "spark.yarn.appMasterEnv.";
public static final String SPARK_EXECUTOR_ENV = "spark.executorEnv.";
public static final String SPARK_YARN_APPMASTER_SPARK_USER = SPARK_YARN_APPMASTER_ENV + "SPARK_USER";
public static final String SPARK_YARN_APPMASTER_YARN_MODE = SPARK_YARN_APPMASTER_ENV + "SPARK_YARN_MODE";
public static final String SPARK_YARN_APPMASTER_YARN_STAGING_DIR = SPARK_YARN_APPMASTER_ENV
+ "SPARK_YARN_STAGING_DIR";
public static final String SPARK_YARN_APPMASTER_CUDA_DEVICES = SPARK_YARN_APPMASTER_ENV + "CUDA_VISIBLE_DEVICES";
public static final String SPARK_YARN_APPMASTER_HIP_DEVICES = SPARK_YARN_APPMASTER_ENV + "HIP_VISIBLE_DEVICES";
public static final String SPARK_YARN_APPMASTER_ENV_EXECUTOR_GPUS = SPARK_YARN_APPMASTER_ENV + "EXECUTOR_GPUS";
public static final String SPARK_YARN_APPMASTER_LIBHDFS_OPTS = SPARK_YARN_APPMASTER_ENV + "LIBHDFS_OPTS";
public static final String SPARK_YARN_APPMASTER_IS_DRIVER = SPARK_YARN_APPMASTER_ENV + "IS_HOPS_DRIVER";
public static final String SPARK_EXECUTOR_SPARK_USER = SPARK_EXECUTOR_ENV + "SPARK_USER";
public static final String SPARK_EXECUTOR_ENV_EXECUTOR_GPUS = SPARK_EXECUTOR_ENV + "EXECUTOR_GPUS";
public static final String SPARK_EXECUTOR_LIBHDFS_OPTS = SPARK_EXECUTOR_ENV + "LIBHDFS_OPTS";
//docker
public static final String SPARK_YARN_APPMASTER_CONTAINER_RUNTIME = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_TYPE";
public static final String SPARK_YARN_APPMASTER_DOCKER_IMAGE = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_DOCKER_IMAGE";
public static final String SPARK_YARN_APPMASTER_DOCKER_MOUNTS = SPARK_YARN_APPMASTER_ENV
+ "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS";
public static final String SPARK_EXECUTOR_CONTAINER_RUNTIME = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_TYPE";
public static final String SPARK_EXECUTOR_DOCKER_IMAGE = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_DOCKER_IMAGE";
public static final String SPARK_EXECUTOR_DOCKER_MOUNTS = SPARK_EXECUTOR_ENV + "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS";
public static final String SPARK_HADOOP_FS_PERMISSIONS_UMASK = "spark.hadoop.fs.permissions.umask-mode";
public static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString";
public static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split()
//nccl
public static final String NCCL_SOCKET_NTHREADS = "NCCL_SOCKET_NTHREADS";
public static final String NCCL_NSOCKS_PERTHREAD = "NCCL_NSOCKS_PERTHREAD";
//Hive config
public static final String HIVE_SITE = "hive-site.xml";
public synchronized String getSparkDir() {
checkCache();
return SPARK_DIR;
}
public synchronized String getSparkConfDir() {
return getSparkDir() + "/conf";
}
// "/tmp" by default
private String STAGING_DIR = "/srv/hops/domains/domain1/staging";
public synchronized String getStagingDir() {
checkCache();
return STAGING_DIR;
}
private final String FLINK_CONF_DIR = "conf";
// Remember to change this in docker-images as well
private String FLINK_DIR = "/srv/hops/flink";
public synchronized String getFlinkDir() {
checkCache();
return FLINK_DIR;
}
public String getFlinkConfDir() {
String flinkDir = getFlinkDir();
return flinkDir + File.separator + FLINK_CONF_DIR + File.separator;
}
private final String FLINK_LIB_DIR = "lib";
public String getFlinkLibDir() {
String flinkDir = getFlinkDir();
return flinkDir + File.separator + FLINK_LIB_DIR + File.separator;
}
private final String FLINK_CONF_FILE = "flink-conf.yaml";
public String getFlinkConfFile() {
return getFlinkConfDir() + File.separator + FLINK_CONF_FILE;
}
private String AIRFLOW_DIR = "/srv/hops/airflow";
public synchronized String getAirflowDir() {
checkCache();
return AIRFLOW_DIR;
}
private String HADOOP_DIR = "/srv/hops/hadoop";
// This returns the unversioned base installation directory for hops-hadoop
// For example, "/srv/hops/hadoop" - it does not return "/srv/hops/hadoop-2.8.2"
public synchronized String getHadoopSymbolicLinkDir() {
checkCache();
return HADOOP_DIR;
}
public synchronized String getHadoopVersionedDir() {
checkCache();
return HADOOP_DIR + "-" + getHadoopVersion();
}
private String HIVE_SUPERUSER = "hive";
public synchronized String getHiveSuperUser() {
checkCache();
return HIVE_SUPERUSER;
}
private String ANACONDA_DEFAULT_REPO = "defaults";
public synchronized String getCondaDefaultRepo() {
checkCache();
return ANACONDA_DEFAULT_REPO;
}
private String HIVE_WAREHOUSE = "/apps/hive/warehouse";
public synchronized String getHiveWarehouse() {
checkCache();
return HIVE_WAREHOUSE;
}
private String HIVE_SCRATCHDIR = "/tmp/hive";
public synchronized String getHiveScratchdir() {
checkCache();
return HIVE_SCRATCHDIR;
}
private String HIVE_SCRATCHDIR_DELAY = "7d";
public synchronized String getHiveScratchdirDelay() {
checkCache();
return HIVE_SCRATCHDIR_DELAY;
}
private String HIVE_SCRATCHDIR_CLEANER_INTERVAL = "24h";
public synchronized String getHiveScratchdirCleanerInterval() {
checkCache();
return HIVE_SCRATCHDIR_CLEANER_INTERVAL;
}
private long HIVE_DB_DEFAULT_QUOTA = -1;
public synchronized long getHiveDbDefaultQuota() {
checkCache();
return HIVE_DB_DEFAULT_QUOTA;
}
private String CERTS_DIR = "/srv/hops/certs-dir";
public synchronized String getCertsDir() {
checkCache();
return CERTS_DIR;
}
public synchronized String getHopsworksMasterEncPasswordFile() {
checkCache();
return getCertsDir() + File.separator + "encryption_master_password";
}
private String HOPSWORKS_INSTALL_DIR = "/srv/hops/domains/domain1";
public synchronized String getHopsworksDomainDir() {
checkCache();
return HOPSWORKS_INSTALL_DIR;
}
private String SUDOERS_DIR = "/srv/hops/sbin";
public synchronized String getSudoersDir() {
checkCache();
return SUDOERS_DIR;
}
private String HOPSWORKS_USER = "glassfish";
public synchronized String getHopsworksUser() {
checkCache();
return HOPSWORKS_USER;
}
private String HDFS_SUPERUSER = "hdfs";
public synchronized String getHdfsSuperUser() {
checkCache();
return HDFS_SUPERUSER;
}
private String SPARK_USER = "spark";
public synchronized String getSparkUser() {
checkCache();
return SPARK_USER;
}
private String FLINK_USER = "flink";
public synchronized String getFlinkUser() {
checkCache();
return FLINK_USER;
}
private Integer YARN_DEFAULT_QUOTA = 60000;
public synchronized Integer getYarnDefaultQuota() {
checkCache();
return YARN_DEFAULT_QUOTA;
}
private PaymentType DEFAULT_PAYMENT_TYPE = PaymentType.NOLIMIT;
public synchronized PaymentType getDefaultPaymentType() {
checkCache();
return DEFAULT_PAYMENT_TYPE;
}
private long HDFS_DEFAULT_QUOTA_MBs = -1;
public synchronized long getHdfsDefaultQuotaInMBs() {
checkCache();
return HDFS_DEFAULT_QUOTA_MBs;
}
// Set the DIR_ROOT (/Projects) to have DB storage policy, i.e. - small files stored on db
private DistributedFileSystemOps.StoragePolicy HDFS_BASE_STORAGE_POLICY
= DistributedFileSystemOps.StoragePolicy.SMALL_FILES;
// To not fill the SSDs with Logs files that nobody access frequently
// We set the StoragePolicy for the LOGS dir to be DEFAULT
private DistributedFileSystemOps.StoragePolicy HDFS_LOG_STORAGE_POLICY
= DistributedFileSystemOps.StoragePolicy.DEFAULT;
private DistributedFileSystemOps.StoragePolicy setHdfsStoragePolicy(String policyName,
DistributedFileSystemOps.StoragePolicy defaultPolicy) {
Optional<Variables> policyOptional = findById(policyName);
if (!policyOptional.isPresent()) {
return defaultPolicy;
}
String existingPolicy = policyOptional.get().getValue();
if (!Strings.isNullOrEmpty(existingPolicy)) {
try {
return DistributedFileSystemOps.StoragePolicy.fromPolicy(existingPolicy);
} catch (IllegalArgumentException ex) {
LOGGER.warning("Error - not a valid storage policy! Value was:" + existingPolicy);
return defaultPolicy;
}
} else {
return defaultPolicy;
}
}
public synchronized DistributedFileSystemOps.StoragePolicy getHdfsBaseStoragePolicy() {
checkCache();
return HDFS_BASE_STORAGE_POLICY;
}
public synchronized DistributedFileSystemOps.StoragePolicy getHdfsLogStoragePolicy() {
checkCache();
return HDFS_LOG_STORAGE_POLICY;
}
private String AIRFLOW_WEB_UI_IP = "127.0.0.1";
private int AIRFLOW_WEB_UI_PORT = 12358;
public synchronized String getAirflowWebUIAddress() {
checkCache();
return AIRFLOW_WEB_UI_IP + ":" + AIRFLOW_WEB_UI_PORT + "/hopsworks-api/airflow";
}
private Integer MAX_NUM_PROJ_PER_USER = 5;
public synchronized Integer getMaxNumProjPerUser() {
checkCache();
return MAX_NUM_PROJ_PER_USER;
}
private String HADOOP_VERSION = "2.8.2";
public synchronized String getHadoopVersion() {
checkCache();
return HADOOP_VERSION;
}
//Hadoop locations
public synchronized String getHadoopConfDir() {
return hadoopConfDir(getHadoopSymbolicLinkDir());
}
private String hadoopConfDir(String hadoopDir) {
return hadoopDir + "/" + HADOOP_CONF_RELATIVE_DIR;
}
public String getHadoopConfDir(String hadoopDir) {
return hadoopConfDir(hadoopDir);
}
public synchronized String getYarnConfDir() {
return getHadoopConfDir();
}
public String getYarnConfDir(String hadoopDir) {
return hadoopConfDir(hadoopDir);
}
public String getHopsLeaderElectionJarPath() {
return getHadoopSymbolicLinkDir() + "/share/hadoop/hdfs/lib/hops-leader-election-" + getHadoopVersion() + ".jar";
}
//Default configuration file names
public static final String DEFAULT_YARN_CONFFILE_NAME = "yarn-site.xml";
public static final String DEFAULT_HADOOP_CONFFILE_NAME = "core-site.xml";
private static final String DEFAULT_HDFS_CONFFILE_NAME = "hdfs-site.xml";
public static final String DEFAULT_SPARK_CONFFILE_NAME = "spark-defaults.conf";
//Environment variable keys
//TODO: Check if ENV_KEY_YARN_CONF_DIR should be replaced with ENV_KEY_YARN_CONF
private static final String ENV_KEY_YARN_CONF_DIR = "hdfs";
public static final String ENV_KEY_HADOOP_CONF_DIR = "HADOOP_CONF_DIR";
public static final String ENV_KEY_YARN_CONF = "YARN_CONF_DIR";
public static final String ENV_KEY_SPARK_CONF_DIR = "SPARK_CONF_DIR";
//YARN constants
public static final int YARN_DEFAULT_APP_MASTER_MEMORY = 2048;
public static final String HADOOP_COMMON_HOME_KEY = "HADOOP_COMMON_HOME";
public static final String HADOOP_HOME_KEY = "HADOOP_HOME";
public static final String HADOOP_HDFS_HOME_KEY = "HADOOP_HDFS_HOME";
public static final String HADOOP_YARN_HOME_KEY = "HADOOP_YARN_HOME";
public static final String HADOOP_CONF_DIR_KEY = "HADOOP_CONF_DIR";
private static final String HADOOP_CONF_RELATIVE_DIR = "etc/hadoop";
public static final String SPARK_CONF_RELATIVE_DIR = "conf";
public static final String YARN_CONF_RELATIVE_DIR = HADOOP_CONF_RELATIVE_DIR;
//Spark constants
// Subdirectory where Spark libraries will be placed.
public static final String SPARK_LOCALIZED_LIB_DIR = "__spark_libs__";
public static final String SPARK_LOCALIZED_CONF_DIR = "__spark_conf__";
public static final String SPARK_LOCALIZED_PYTHON_DIR = "__pyfiles__";
public static final String SPARK_LOCRSC_APP_JAR = "__app__.jar";
public static final String HOPS_EXPERIMENTS_DATASET = "Experiments";
public static final String HOPS_MODELS_DATASET = "Models";
public static final String HOPS_MODELS_SCHEMA = "model_schema.json";
public static final String HOPS_MODELS_INPUT_EXAMPLE = "input_example.json";
public static final String HOPS_TOUR_DATASET = "TestJob";
public static final String HOPS_DL_TOUR_DATASET = "TourData";
public static final String HOPS_TOUR_DATASET_JUPYTER = "Jupyter";
// Distribution-defined classpath to add to processes
public static final String SPARK_AM_MAIN
= "org.apache.spark.deploy.yarn.ApplicationMaster";
public static final String SPARK_CONFIG_FILE = "conf/spark-defaults.conf";
public static final String SPARK_BLACKLISTED_PROPS
= "conf/spark-blacklisted-properties.txt";
public static final int SPARK_MIN_EXECS = 1;
public static final int SPARK_MAX_EXECS = 2;
public static final String SPARK_HADOOP_FS_PERMISSIONS_UMASK_DEFAULT = "0007";
// Spark executor min memory
private int SPARK_EXECUTOR_MIN_MEMORY = 1024;
//Flink constants
public static final String FLINK_LOCRSC_FLINK_JAR = "flink.jar";
public static final int FLINK_APP_MASTER_MEMORY = 768;
public static final String HOPS_DEEP_LEARNING_TOUR_DATA = "tensorflow_demo/data";
public static final String HOPS_DEEP_LEARNING_TOUR_NOTEBOOKS = "tensorflow_demo/notebooks";
public static final String FLINK_AM_MAIN = "org.apache.flink.yarn.ApplicationMaster";
public static final String FLINK_ENV_JAVA_OPTS = "env.java.opts";
public static final String FLINK_ENV_JAVA_OPTS_JOBMANAGER = "env.java.opts.jobmanager";
public static final String FLINK_ENV_JAVA_OPTS_TASKMANAGER = "env.java.opts.taskmanager";
public static final String FLINK_STATE_CHECKPOINTS_DIR = "state.checkpoints.dir";
public static final String FLINK_WEB_UPLOAD_DIR = "web.upload.dir";
//Featurestore constants
public static final String HOPS_FEATURESTORE_TOUR_DATA = "featurestore_demo";
public static final String HOPS_FEATURESTORE_TOUR_JOB_CLASS = "io.hops.examples.featurestore_tour.Main";
public static final String HOPS_FEATURESTORE_TOUR_JOB_NAME = "featurestore_tour_job";
public static final String HOPS_FEATURESTORE_TOUR_JOB_INPUT_PARAM = "--input ";
public static final String HSFS_UTIL_MAIN_CLASS = "com.logicalclocks.utils.MainClass";
//Serving constants
public static final String INFERENCE_SCHEMANAME = "inferenceschema";
public static final int INFERENCE_SCHEMAVERSION = 3;
//Kafka constants
public static final String PROJECT_COMPATIBILITY_SUBJECT = "projectcompatibility";
public static final Set<String> KAFKA_SUBJECT_BLACKLIST =
new HashSet<>(Arrays.asList(INFERENCE_SCHEMANAME, PROJECT_COMPATIBILITY_SUBJECT));
public synchronized String getLocalFlinkJarPath() {
return getFlinkDir() + "/flink.jar";
}
public synchronized String getFlinkJarPath() {
return hdfsFlinkJarPath(getFlinkUser());
}
private String hdfsFlinkJarPath(String flinkUser) {
return "hdfs:///user/" + flinkUser + "/flink.jar";
}
public synchronized String getFlinkDefaultClasspath() {
return flinkDefaultClasspath(getFlinkDir());
}
private String flinkDefaultClasspath(String flinkDir) {
return flinkDir + "/lib/*";
}
public String getFlinkDefaultClasspath(String flinkDir) {
return flinkDefaultClasspath(flinkDir);
}
public String getSparkLog4JPath() {
return "hdfs:///user/" + getSparkUser() + "/log4j.properties";
}
public synchronized String getSparkDefaultClasspath() {
return sparkDefaultClasspath(getSparkDir());
}
private String sparkDefaultClasspath(String sparkDir) {
return sparkDir + "/lib/*";
}
private static final String HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY = "HADOOP_GLOB";
private volatile String HADOOP_CLASSPATH_GLOB = null;
public String getHadoopClasspathGlob() throws IOException {
if (HADOOP_CLASSPATH_GLOB == null) {
synchronized (Settings.class) {
if (HADOOP_CLASSPATH_GLOB == null) {
String classpathGlob = System.getenv(HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY);
if (classpathGlob == null) {
LOGGER.log(Level.WARNING, HADOOP_GLASSPATH_GLOB_ENV_VAR_KEY + " environment variable is not set. "
+ "Launching a subprocess to discover it");
String bin = Paths.get(getHadoopSymbolicLinkDir(), "bin", "hadoop").toString();
ProcessDescriptor processDescriptor = new ProcessDescriptor.Builder()
.addCommand(bin)
.addCommand("classpath")
.addCommand("--glob")
.build();
ProcessResult result = osProcessExecutor.execute(processDescriptor);
if (result.getExitCode() != 0) {
throw new IOException("Could not get Hadoop classpath, exit code " + result.getExitCode()
+ " Error: " + result.getStderr());
}
HADOOP_CLASSPATH_GLOB = result.getStdout();
} else {
HADOOP_CLASSPATH_GLOB = classpathGlob;
}
}
}
}
return HADOOP_CLASSPATH_GLOB;
}
/**
* Static final fields are allowed in session beans:
* http://stackoverflow.com/questions/9141673/static-variables-restriction-in-session-beans
*/
//Directory names in HDFS
public static final String DIR_ROOT = "Projects";
public static final String DIR_META_TEMPLATES = Path.SEPARATOR + "user" + Path.SEPARATOR + "metadata"
+ Path.SEPARATOR + "uploads" + Path.SEPARATOR;
public static final String PROJECT_STAGING_DIR = "Resources";
// Any word added to reserved words in DEFAULT_RESERVED_PROJECT_NAMES and DEFAULT_RESERVED_HIVE_NAMES should
// also be added in the documentation in:
// https://hopsworks.readthedocs.io/en/<hopsworksDocVersion>/user_guide/hopsworks/newProject.html
private final static String DEFAULT_RESERVED_PROJECT_NAMES = "hops-system,hopsworks,information_schema,airflow," +
"glassfish_timers,grafana,hops,metastore,mysql,ndbinfo,performance_schema,sqoop,sys," +
"base,python37,python38,filebeat";
//Hive reserved words can be found at:
//https://cwiki.apache.org/confluence/display/hive/LanguageManual+DDL#LanguageManualDDL-Keywords,Non-
//reservedKeywordsandReservedKeywords
private final static String DEFAULT_RESERVED_HIVE_NAMES = "ALL, ALTER, AND, ARRAY, AS, AUTHORIZATION, BETWEEN, " +
"BIGINT, BINARY, BOOLEAN, BOTH, BY, CASE, CAST, CHAR, COLUMN, CONF, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, " +
"CURRENT_TIMESTAMP, CURSOR, DATABASE, DATE, DECIMAL, DELETE, DESCRIBE, DISTINCT, DOUBLE, DROP, ELSE, END, " +
"EXCHANGE, EXISTS, EXTENDED, EXTERNAL, FALSE, FETCH, FLOAT, FOLLOWING, FOR, FROM, FULL, FUNCTION, GRANT, GROUP, " +
"GROUPING, HAVING, IF, IMPORT, IN, INNER, INSERT, INT, INTERSECT, INTERVAL, INTO, IS, JOIN, LATERAL, LEFT, LESS, " +
"LIKE, LOCAL, MACRO, MAP, MORE, NONE, NOT, NULL, OF, ON, OR, ORDER, OUT, OUTER, OVER, PARTIALSCAN, PARTITION, " +
"PERCENT, PRECEDING, PRESERVE, PROCEDURE, RANGE, READS, REDUCE, REVOKE, RIGHT, ROLLUP, ROW, ROWS, SELECT, SET, " +
"SMALLINT, TABLE, TABLESAMPLE, THEN, TIMESTAMP, TO, TRANSFORM, TRIGGER, TRUE, TRUNCATE, UNBOUNDED, UNION, " +
"UNIQUEJOIN, UPDATE, USER, USING, UTC_TMESTAMP, VALUES, VARCHAR, WHEN, WHERE, WINDOW, WITH, COMMIT, ONLY, " +
"REGEXP, RLIKE, ROLLBACK, START, CACHE, CONSTRAINT, FOREIGN, PRIMARY, REFERENCES, DAYOFWEEK, EXTRACT, FLOOR, " +
"INTEGER, PRECISION, VIEWS, TIME, NUMERIC, SYNC";
private Set<String> RESERVED_PROJECT_NAMES;
private String RESERVED_PROJECT_NAMES_STR;
public synchronized Set<String> getReservedProjectNames() {
checkCache();
RESERVED_PROJECT_NAMES = RESERVED_PROJECT_NAMES != null ? RESERVED_PROJECT_NAMES : new HashSet<>();
RESERVED_PROJECT_NAMES.addAll(getReservedHiveNames());
return RESERVED_PROJECT_NAMES;
}
public synchronized Set<String> getReservedHiveNames() {
return setStringHashSetLowerCase(DEFAULT_RESERVED_HIVE_NAMES, ",", true);
}
public synchronized String getProjectNameReservedWords() {
checkCache();
return (RESERVED_PROJECT_NAMES_STR + ", " + DEFAULT_RESERVED_HIVE_NAMES).toLowerCase();
}
//Only for unit test
public synchronized String getProjectNameReservedWordsTest() {
return (DEFAULT_RESERVED_PROJECT_NAMES + ", " + DEFAULT_RESERVED_HIVE_NAMES).toLowerCase();
}
// OpenSearch
OpenSearchSettings OPENSEARCH_SETTINGS;
public synchronized List<String> getOpenSearchIps(){
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchIps();
}
public synchronized int getOpenSearchPort() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchPort();
}
public synchronized int getOpenSearchRESTPort() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchRESTPort();
}
public synchronized String getOpenSearchEndpoint() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchEndpoint();
}
public synchronized String getOpenSearchRESTEndpoint() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchRESTEndpoint();
}
public synchronized boolean isOpenSearchSecurityEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isOpenSearchSecurityEnabled();
}
public synchronized boolean isOpenSearchHTTPSEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isHttpsEnabled();
}
public synchronized String getOpenSearchAdminUser() {
checkCache();
return OPENSEARCH_SETTINGS.getAdminUser();
}
public synchronized String getOpenSearchServiceLogUser() {
checkCache();
return OPENSEARCH_SETTINGS.getServiceLogUser();
}
public synchronized String getOpenSearchAdminPassword() {
checkCache();
return OPENSEARCH_SETTINGS.getAdminPassword();
}
public synchronized boolean isOpenSearchJWTEnabled() {
checkCache();
return OPENSEARCH_SETTINGS.isOpenSearchJWTEnabled();
}
public synchronized String getOpenSearchJwtUrlParameter() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchJWTURLParameter();
}
public synchronized long getOpenSearchJwtExpMs() {
checkCache();
return OPENSEARCH_SETTINGS.getOpenSearchJWTExpMs();
}
public synchronized Integer getOpenSearchDefaultScrollPageSize() {
checkCache();
return OPENSEARCH_SETTINGS.getDefaultScrollPageSize();
}
public synchronized Integer getOpenSearchMaxScrollPageSize() {
checkCache();
return OPENSEARCH_SETTINGS.getMaxScrollPageSize();
}
private long OpenSearch_LOGS_INDEX_EXPIRATION = 7 * 24 * 60 * 60 * 1000;
public synchronized long getOpenSearchLogsIndexExpiration() {
checkCache();
return OpenSearch_LOGS_INDEX_EXPIRATION;
}
private static final int JOB_LOGS_EXPIRATION = 604800;
/**
* TTL for job logs in opensearch, in seconds.
*
* @return
*/
public int getJobLogsExpiration() {
return JOB_LOGS_EXPIRATION;
}
private static final long JOB_LOGS_DISPLAY_SIZE = 1000000;
public long getJobLogsDisplaySize() {
return JOB_LOGS_DISPLAY_SIZE;
}
private static final String JOB_LOGS_ID_FIELD = "jobid";
public String getJobLogsIdField() {
return JOB_LOGS_ID_FIELD;
}
// CertificateMaterializer service. Delay for deleting crypto material from
// the local filesystem. The lower the value the more frequent we reach DB
// for materialization
// Suffix, defaults to minutes if omitted:
// ms: milliseconds
// s: seconds
// m: minutes (default)
// h: hours
// d: days
private String CERTIFICATE_MATERIALIZER_DELAY = "1m";
public synchronized String getCertificateMaterializerDelay() {
checkCache();
return CERTIFICATE_MATERIALIZER_DELAY;
}
private String CERTIFICATE_USER_VALID_DAYS = "12";
public synchronized String getCertificateUserValidDays() {
checkCache();
return CERTIFICATE_USER_VALID_DAYS;
}
private String SERVICE_DISCOVERY_DOMAIN = "consul";
public synchronized String getServiceDiscoveryDomain() {
checkCache();
return SERVICE_DISCOVERY_DOMAIN;
}
// Kibana
public static final String KIBANA_INDEX_PREFIX = ".kibana";
private String KIBANA_IP = "10.0.2.15";
private static final int KIBANA_PORT = 5601;
public synchronized String getKibanaUri() {
checkCache();
return (KIBANA_HTTPS_ENABELED ? "https" : "http") + "://" + KIBANA_IP +
":" + KIBANA_PORT;
}
public String getKibanaAppUri() {
return "/hopsworks-api/kibana/app/discover?";
}
public String getKibanaAppUri(String jwtToken) {
return getKibanaAppUri() + OPENSEARCH_SETTINGS.getOpenSearchJWTURLParameter() + "=" + jwtToken + "&";
}
/*
* Comma-separated list of user emails that should not be persisted in the
* userlogins table for auditing.
* kagent -> agent@hops.io
*/
private String WHITELIST_USERS_LOGIN = "agent@hops.io";
public synchronized String getWhitelistUsersLogin() {
checkCache();
return WHITELIST_USERS_LOGIN;
}
// Jupyter
private String JUPYTER_DIR = "/srv/hops/jupyter";
public synchronized String getJupyterDir() {
checkCache();
return JUPYTER_DIR;
}
private String JUPYTER_GROUP = "jupyter";
public synchronized String getJupyterGroup() {
checkCache();
return JUPYTER_GROUP;
}
private String JUPYTER_ORIGIN_SCHEME = "https";
public synchronized String getJupyterOriginScheme() {
checkCache();
return JUPYTER_ORIGIN_SCHEME;
}
private long JUPYTER_WS_PING_INTERVAL_MS = 10000L;
public synchronized long getJupyterWSPingInterval() {
checkCache();
return JUPYTER_WS_PING_INTERVAL_MS;
}
private Integer PROMETHEUS_PORT = 9089;
public synchronized Integer getPrometheusPort() {
checkCache();
return PROMETHEUS_PORT;
}
//Git
private String GIT_DIR = "/srv/hops/git";
public synchronized String getGitDir() {
checkCache();
return GIT_DIR;
}
private Integer GIT_MAX_COMMAND_TIMEOUT_MINUTES = 60;
public synchronized long getGitJwtExpMs() {
checkCache();
return GIT_MAX_COMMAND_TIMEOUT_MINUTES * 60 * 1000;
}
private String GIT_IMAGE_NAME = "git:0.1.0";
public synchronized String getGitImageName() {
return GIT_IMAGE_NAME;
}
private boolean DOCKER_CGROUP_ENABLED = false;
public synchronized boolean isDockerCgroupEnabled() {
checkCache();
return DOCKER_CGROUP_ENABLED;
}
private String DOCKER_CGROUP_MEMORY_LIMIT = "6GB";
public synchronized String getDockerCgroupMemoryLimit() {
checkCache();
return DOCKER_CGROUP_MEMORY_LIMIT;
}
private String DOCKER_CGROUP_MEMORY_SOFT_LIMIT = "2GB";
public synchronized String getDockerCgroupSoftLimit() {
checkCache();
return DOCKER_CGROUP_MEMORY_SOFT_LIMIT;
}
private Double DOCKER_CGROUP_CPU_QUOTA = 100.0;
public synchronized Double getDockerCgroupCpuQuota() {
checkCache();
return DOCKER_CGROUP_CPU_QUOTA;
}
private Integer DOCKER_CGROUP_CPU_PERIOD = 100000;
public synchronized Integer getDockerCgroupCpuPeriod() {
checkCache();
return DOCKER_CGROUP_CPU_PERIOD;
}
private String DOCKER_CGROUP_MONITOR_INTERVAL = "10m";
public synchronized String getDockerCgroupIntervalMonitor() {
checkCache();
return DOCKER_CGROUP_MONITOR_INTERVAL;
}
// Service key rotation interval
private static final String JUPYTER_SHUTDOWN_TIMER_INTERVAL = "jupyter_shutdown_timer_interval";
private String jupyterShutdownTimerInterval = "30m";
public synchronized String getJupyterShutdownTimerInterval() {
checkCache();
return jupyterShutdownTimerInterval;
}
private String KAFKA_USER = "kafka";
public synchronized String getKafkaUser() {
checkCache();
return KAFKA_USER;
}
private String KAFKA_DIR = "/srv/kafka";
public synchronized String getKafkaDir() {
checkCache();
return KAFKA_DIR;
}
private String ANACONDA_DIR = "/srv/hops/anaconda";
public synchronized String getAnacondaDir() {
checkCache();
return ANACONDA_DIR;
}
private String condaEnvName = "theenv";
/**
* Constructs the path to the project environment in Anaconda
*
* @return conda dir
*/
public String getAnacondaProjectDir() {
return getAnacondaDir() + File.separator + "envs" + File.separator + condaEnvName;
}
//TODO(Theofilos): Used by Flink. Will be removed as part of refactoring *YarnRunnerBuilders.
public String getCurrentCondaEnvironment() {
return condaEnvName;
}
private Boolean ANACONDA_ENABLED = true;
public synchronized Boolean isAnacondaEnabled() {
checkCache();
return ANACONDA_ENABLED;
}
private Boolean DOWNLOAD_ALLOWED = true;
public synchronized Boolean isDownloadAllowed() {
checkCache();
return DOWNLOAD_ALLOWED;
}
/**
* kagent liveness monitor settings
*/
private String KAGENT_USER = "kagent";
public synchronized String getKagentUser() {
checkCache();
return KAGENT_USER;
}
private boolean KAGENT_LIVENESS_MONITOR_ENABLED = false;
public synchronized boolean isKagentLivenessMonitorEnabled() {
checkCache();
return KAGENT_LIVENESS_MONITOR_ENABLED;
}
private String KAGENT_LIVENESS_THRESHOLD = "10s";
public synchronized String getKagentLivenessThreshold() {
checkCache();
return KAGENT_LIVENESS_THRESHOLD;
}
private RESTLogLevel HOPSWORKS_REST_LOG_LEVEL = RESTLogLevel.PROD;
public synchronized RESTLogLevel getHopsworksRESTLogLevel() {
checkCache();
return HOPSWORKS_REST_LOG_LEVEL;
}
private String FIRST_TIME_LOGIN = "0";
public synchronized String getFirstTimeLogin() {
checkCache();
return FIRST_TIME_LOGIN;
}
private String ADMIN_EMAIL = "admin@hopsworks.ai";
public synchronized String getAdminEmail() {
checkCache();
return ADMIN_EMAIL;
}
public synchronized boolean isDefaultAdminPasswordChanged() {
Users user = userFacade.findByEmail(ADMIN_EMAIL);
if (user != null) {
String DEFAULT_ADMIN_PWD = "12fa520ec8f65d3a6feacfa97a705e622e1fea95b80b521ec016e43874dfed5a";
return !DEFAULT_ADMIN_PWD.equals(user.getPassword());
}
return false;
}
private String HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD = "adminpw";
public synchronized String getHopsworksMasterPasswordSsl() {
checkCache();
return HOPSWORKS_DEFAULT_SSL_MASTER_PASSWORD;
}
private Integer KAFKA_DEFAULT_NUM_PARTITIONS = 2;
private Integer KAFKA_DEFAULT_NUM_REPLICAS = 1;
public synchronized Integer getKafkaDefaultNumPartitions() {
checkCache();
return KAFKA_DEFAULT_NUM_PARTITIONS;
}
public synchronized Integer getKafkaDefaultNumReplicas() {
checkCache();
return KAFKA_DEFAULT_NUM_REPLICAS;
}
private String CLUSTER_CERT = "asdasxasx8as6dx8a7sx7asdta8dtasxa8";
public synchronized String getCLUSTER_CERT() {
checkCache();
return CLUSTER_CERT;
}
// HOPSWORKS-3158
private String HOPSWORKS_PUBLIC_HOST = "";
public String getHopsworksPublicHost() {
checkCache();
return HOPSWORKS_PUBLIC_HOST;
}
// Hopsworks
public static final Charset ENCODING = StandardCharsets.UTF_8;
public static final String HOPS_USERS_HOMEDIR = "/home/";
public static final String HOPS_USERNAME_SEPARATOR = "__";
public static final String UNZIP_FILES_SCRIPTNAME = "unzip-hdfs-files.sh";
public static final int USERNAME_LEN = 8;
public static final int MAX_USERNAME_SUFFIX = 99;
public static final int MAX_RETRIES = 500;
public static final String META_NAME_FIELD = "name";
public static final String META_USAGE_TIME = "usage_time";
public static final String META_DESCRIPTION_FIELD = "description";
public static final String META_INDEX = "projects";
public static final String META_PROJECT_ID_FIELD = "project_id";
public static final String META_DATASET_ID_FIELD = "dataset_id";
public static final String META_DOC_TYPE_FIELD = "doc_type";
public static final String DOC_TYPE_PROJECT = "proj";
public static final String DOC_TYPE_DATASET = "ds";
public static final String DOC_TYPE_INODE = "inode";
public static final String META_ID = "_id";
public static final String META_DATA_NESTED_FIELD = "xattr";
public static final String META_NOTEBOOK_JUPYTER_CONFIG_XATTR_NAME = "jupyter_configuration";
public static final String META_DATA_FIELDS = META_DATA_NESTED_FIELD + ".*";
//Filename conventions
public static final String FILENAME_DISALLOWED_CHARS = " /\\?*:|'\"<>%()&;#öäåÖÅÄàáéèâîïüÜ@${}[]+~^$`";
public static final String SUBDIR_DISALLOWED_CHARS = "/\\?*:|'\"<>%()&;#öäåÖÅÄàáéèâîïüÜ@${}[]+~^$`";
public static final String SHARED_FILE_SEPARATOR = "::";
public static final String DOUBLE_UNDERSCORE = "__";
// Authentication Constants
// POSIX compliant usernake length
public static final int USERNAME_LENGTH = 8;
// Strating user id from 1000 to create a POSIX compliant username: meb1000
public static final int STARTING_USER = 1000;
public static final int PASSWORD_MIN_LENGTH = 6;
public static final int DEFAULT_SECURITY_ANSWER_LEN = 16;
public static final String DEFAULT_ROLE = "HOPS_USER";
// POSIX compliant usernake length
public static final int ACCOUNT_VALIDATION_TRIES = 5;
// Issuer of the QrCode
public static final String ISSUER = "hops.io";
// Used to indicate that a python version is unknown
public static final String UNKNOWN_LIBRARY_VERSION = "UNKNOWN";
public static final String PROJECT_PYTHON_DIR = PROJECT_STAGING_DIR + "/.python";
public static final String ENVIRONMENT_FILE = "environment.yml";
public static final String PROJECT_PYTHON_ENVIRONMENT_FILE = PROJECT_PYTHON_DIR + "/" + ENVIRONMENT_FILE;
// when user is loged in 1 otherwise 0
public static final int IS_ONLINE = 1;
public static final int IS_OFFLINE = 0;
public static final int ALLOWED_FALSE_LOGINS = 5;
public static final int ALLOWED_AGENT_FALSE_LOGINS = 20;
//hopsworks user prefix username prefix
public static final String USERNAME_PREFIX = "meb";
public static final String KEYSTORE_SUFFIX = "__kstore.jks";
public static final String TRUSTSTORE_SUFFIX = "__tstore.jks";
public static final String CERT_PASS_SUFFIX = "__cert.key";
public static final String K_CERTIFICATE = "k_certificate";
public static final String T_CERTIFICATE = "t_certificate";
public static final String DOMAIN_CA_TRUSTSTORE = "t_certificate";
//Glassfish truststore, used by hopsutil to initialize https connection to Hopsworks
public static final String CRYPTO_MATERIAL_PASSWORD = "material_passwd";
//Used by HopsUtil
public static final String HOPSWORKS_PROJECTID_PROPERTY = "hopsworks.projectid";
public static final String HOPSWORKS_PROJECTNAME_PROPERTY = "hopsworks.projectname";
public static final String HOPSWORKS_PROJECTUSER_PROPERTY = "hopsworks.projectuser";
public static final String HOPSWORKS_JOBNAME_PROPERTY = "hopsworks.job.name";
public static final String HOPSWORKS_JOBTYPE_PROPERTY = "hopsworks.job.type";
public static final String HOPSWORKS_APPID_PROPERTY = "hopsworks.job.appid";
public static final String KAFKA_BROKERADDR_PROPERTY = "hopsworks.kafka.brokeraddress";
public static final String KAFKA_JOB_TOPICS_PROPERTY = "hopsworks.kafka.job.topics";
public static final String SERVER_TRUSTSTORE_PROPERTY = "server.truststore";
public static final String KAFKA_CONSUMER_GROUPS = "hopsworks.kafka.consumergroups";
public static final String HOPSWORKS_REST_ENDPOINT_PROPERTY = "hopsworks.restendpoint";
public static final String HOPSUTIL_INSECURE_PROPERTY = "hopsutil.insecure";
public static final String HOPSWORKS_OPENSEARCH_ENDPOINT_PROPERTY = "hopsworks.opensearch.endpoint";
public static final String HOPSWORKS_DOMAIN_CA_TRUSTSTORE_PROPERTY = "hopsworks.domain.truststore";
private int FILE_PREVIEW_IMAGE_SIZE = 10000000;
private int FILE_PREVIEW_TXT_SIZE = 100;
public static final int FILE_PREVIEW_TXT_SIZE_BYTES = 1024 * 384;
public static final String README_TEMPLATE = "*This is an auto-generated README.md"
+ " file for your Dataset!*\n"
+ "To replace it, go into your DataSet and edit the README.md file.\n"
+ "\n" + "*%s* DataSet\n" + "===\n" + "\n"
+ "## %s";
public static final String FILE_PREVIEW_TEXT_TYPE = "text";
public static final String FILE_PREVIEW_HTML_TYPE = "html";
public static final String FILE_PREVIEW_IMAGE_TYPE = "image";
public static final String FILE_PREVIEW_MODE_TAIL = "tail";
//OpenSearch
// log index pattern
public static final String OPENSEARCH_LOGS_INDEX = "logs";
public static final String OPENSEARCH_PYPI_LIBRARIES_INDEX_PATTERN_PREFIX = "pypi_libraries_";
public static final String OPENSEARCH_LOGS_INDEX_PATTERN = "_" + Settings.OPENSEARCH_LOGS_INDEX + "-*";
public static final String OPENSEARCH_SERVING_INDEX = "serving";
public static final String OPENSEARCH_SERVICES_INDEX = ".services";
public static final String OPENSEARCH_LOG_INDEX_REGEX = ".*_" + OPENSEARCH_LOGS_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_SERVING_INDEX_REGEX =
".*_" + OPENSEARCH_SERVING_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_SERVICES_INDEX_REGEX = OPENSEARCH_SERVICES_INDEX + "-\\d{4}.\\d{2}.\\d{2}";
public static final String OPENSEARCH_PYPI_LIBRARIES_INDEX_REGEX =
OPENSEARCH_PYPI_LIBRARIES_INDEX_PATTERN_PREFIX + "*";
//Other OpenSearch indexes
public static final String OPENSEARCH_INDEX_APP_PROVENANCE = "app_provenance";
//OpenSearch aliases
public static final String OPENSEARCH_PYPI_LIBRARIES_ALIAS = "pypi_libraries";
public String getHopsworksTmpCertDir() {
return Paths.get(getCertsDir(), "transient").toString();
}
public String getHdfsTmpCertDir() {
return "/user/" + getHdfsSuperUser() + "/" + "kafkacerts";
}
public String getHopsworksTrueTempCertDir() {
return "/tmp/usercerts/";
}
//Dataset request subject
public static final String MESSAGE_DS_REQ_SUBJECT = "Dataset access request.";
// QUOTA
public static final float DEFAULT_YARN_MULTIPLICATOR = 1.0f;
/**
* Returns the maximum image size in bytes that can be previewed in the browser.
*
* @return file size
*/
public synchronized int getFilePreviewImageSize() {
checkCache();
return FILE_PREVIEW_IMAGE_SIZE;
}
/**
* Returns the maximum number of lines of the file that can be previewed in the browser.
*
* @return file size
*/
public synchronized int getFilePreviewTxtSize() {
checkCache();
return FILE_PREVIEW_TXT_SIZE;
}
//Project creation: default datasets
public static enum BaseDataset {
LOGS("Logs",
"Contains the logs for jobs that have been run through the Hopsworks platform."),
RESOURCES("Resources",
"Contains resources used by jobs, for example, jar files.");
private final String name;
private final String description;
private BaseDataset(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
public static enum ServiceDataset {
JUPYTER("Jupyter", "Contains Jupyter notebooks."),
SERVING("Models", "Contains models to be used for serving."),
EXPERIMENTS("Experiments", "Contains experiments from using the hops python api"),
TRAININGDATASETS("Training_Datasets", "Contains curated training datasets created from the feature store"),
STATISTICS("Statistics", "Contains the statistics for feature groups and training datasets"),
DATAVALIDATION("DataValidation", "Contains rules and results for Features validation"),
INGESTION("Ingestion", "Temporary dataset to store feature data ready for ingestion");
private final String name;
private final String description;
private ServiceDataset(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
}
public static final String JUPYTER_PIDS = "/tmp/jupyterNotebookServer.pids";
private String RESOURCE_DIRS = ".sparkStaging;spark-warehouse";
public synchronized String getResourceDirs() {
checkCache();
return RESOURCE_DIRS;
}
private String FS_JOB_ACTIVITY_TIME = "5m";
public synchronized String getFsJobActivityTime() {
checkCache();
return FS_JOB_ACTIVITY_TIME;
}
public Settings() {
}
/**
* Get the variable value with the given name.
*
* @param id
* @return The user with given email, or null if no such user exists.
*/
public Optional<Variables> findById(String id) {
try {
return Optional.of(em.createNamedQuery("Variables.findById", Variables.class)
.setParameter("id", id)
.getSingleResult());
} catch (NoResultException e) {
return Optional.empty();
}
}
/**
* Get all variables from the database.
*
* @return List with all the variables
*/
public List<Variables> getAllVariables() {
return em.createNamedQuery("Variables.findAll", Variables.class).getResultList();
}
/**
* Update a variable in the database.
*
* @param variableName name
* @param variableValue value
*/
private void updateVariableInternal(String variableName, String variableValue, VariablesVisibility visibility) {
Variables var = findById(variableName)
.orElseThrow(() -> new NoResultException("Variable <" + variableName + "> does not exist in the database"));
if (!var.getValue().equals(variableValue) || !var.getVisibility().equals(visibility)) {
var.setValue(variableValue);
var.setVisibility(visibility);
em.persist(var);
}
}
public void detach(Variables variable) {
em.detach(variable);
}
Configuration conf;
public Configuration getConfiguration() throws IllegalStateException {
if (conf == null) {
String hadoopDir = getHadoopSymbolicLinkDir();
//Get the path to the Yarn configuration file from environment variables
String yarnConfDir = System.getenv(Settings.ENV_KEY_YARN_CONF_DIR);
//If not found in environment variables: warn and use default,
if (yarnConfDir == null) {
yarnConfDir = getYarnConfDir(hadoopDir);
}
Path confPath = new Path(yarnConfDir);
File confFile = new File(confPath + File.separator
+ Settings.DEFAULT_YARN_CONFFILE_NAME);
if (!confFile.exists()) {
throw new IllegalStateException("No Yarn conf file");
}
//Also add the hadoop config
String hadoopConfDir = System.getenv(Settings.ENV_KEY_HADOOP_CONF_DIR);
//If not found in environment variables: warn and use default
if (hadoopConfDir == null) {
hadoopConfDir = hadoopDir + "/" + Settings.HADOOP_CONF_RELATIVE_DIR;
}
confPath = new Path(hadoopConfDir);
File hadoopConf = new File(confPath + "/"
+ Settings.DEFAULT_HADOOP_CONFFILE_NAME);
if (!hadoopConf.exists()) {
throw new IllegalStateException("No Hadoop conf file");
}
File hdfsConf = new File(confPath + "/"
+ Settings.DEFAULT_HDFS_CONFFILE_NAME);
if (!hdfsConf.exists()) {
throw new IllegalStateException("No HDFS conf file");
}
//Set the Configuration object for the returned YarnClient
conf = new Configuration();
conf.addResource(new Path(confFile.getAbsolutePath()));
conf.addResource(new Path(hadoopConf.getAbsolutePath()));
conf.addResource(new Path(hdfsConf.getAbsolutePath()));
addPathToConfig(conf, confFile);
addPathToConfig(conf, hadoopConf);
setDefaultConfValues(conf);
}
return conf;
}
private void addPathToConfig(Configuration conf, File path) {
// chain-in a new classloader
URL fileUrl = null;
try {
fileUrl = path.toURL();
} catch (MalformedURLException e) {
throw new RuntimeException("Erroneous config file path", e);
}
URL[] urls = {fileUrl};
ClassLoader cl = new URLClassLoader(urls, conf.getClassLoader());
conf.setClassLoader(cl);
}
private void setDefaultConfValues(Configuration conf) {
if (conf.get("fs.hdfs.impl", null) == null) {
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
}
if (conf.get("fs.file.impl", null) == null) {
conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
}
}
private int KAFKA_MAX_NUM_TOPICS = 10;
public synchronized int getKafkaMaxNumTopics() {
checkCache();
return KAFKA_MAX_NUM_TOPICS;
}
private int MAX_STATUS_POLL_RETRY = 5;
public synchronized int getMaxStatusPollRetry() {
checkCache();
return MAX_STATUS_POLL_RETRY;
}
/**
* Returns aggregated log dir path for an application with the the given appId.
*
* @param hdfsUser user
* @param appId appId
* @return path
*/
public String getAggregatedLogPath(String hdfsUser, String appId) {
boolean logPathsAreAggregated = conf.getBoolean(
YarnConfiguration.LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED);
String aggregatedLogPath = null;
if (logPathsAreAggregated) {
String[] nmRemoteLogDirs = conf.getStrings(
YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR);
String[] nmRemoteLogDirSuffix = conf.getStrings(
YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
aggregatedLogPath = nmRemoteLogDirs[0] + File.separator + hdfsUser
+ File.separator + nmRemoteLogDirSuffix[0] + File.separator
+ appId;
}
return aggregatedLogPath;
}
// For performance reasons, we have an in-memory cache of files being unzipped
// Lazily remove them from the cache, when we check the FS and they aren't there.
private Set<CompressionInfo> zippingFiles = new HashSet<>();
public synchronized void addZippingState(CompressionInfo compressionInfo) {
zippingFiles.add(compressionInfo);
}
private Set<CompressionInfo> unzippingFiles = new HashSet<>();
public synchronized void addUnzippingState(CompressionInfo compressionInfo) {
unzippingFiles.add(compressionInfo);
}
public synchronized String getZipState(String hdfsPath) {
boolean zipOperation = false;
boolean unzipOperation = false;
CompressionInfo zipInfo = zippingFiles.stream()
.filter(zinfo -> zinfo.getHdfsPath().toString().equals(hdfsPath))
.findAny()
.orElse(null);
CompressionInfo unzipInfo = unzippingFiles.stream()
.filter(uzinfo -> uzinfo.getHdfsPath().toString().equals(hdfsPath))
.findAny()
.orElse(null);
String compressionDir = null;
String fsmPath = null;
if(zipInfo != null) {
compressionDir = getStagingDir() + File.separator + zipInfo.getStagingDirectory();
fsmPath = compressionDir + "/fsm.txt";
zipOperation = true;
} else if(unzipInfo != null) {
compressionDir = getStagingDir() + File.separator + unzipInfo.getStagingDirectory();
fsmPath = compressionDir + "/fsm.txt";
unzipOperation = true;
} else {
return "NONE";
}
String state = "NOT_FOUND";
try {
state = new String(java.nio.file.Files.readAllBytes(Paths.get(fsmPath)));
state = state.trim();
} catch (IOException ex) {
if (!java.nio.file.Files.exists(Paths.get(compressionDir))) {
state = "NONE";
// lazily remove the file, probably because it has finished zipping/unzipping
if (zipOperation) {
zippingFiles.remove(zipInfo);
} else if (unzipOperation) {
unzippingFiles.remove(unzipInfo);
}
}
}
// If a terminal state has been reached, removed the entry and the file.
if (state.isEmpty() || state.compareTo("FAILED") == 0 || state.compareTo("SUCCESS") == 0) {
try {
if (zipOperation) {
zippingFiles.remove(zipInfo);
} else if (unzipOperation) {
unzippingFiles.remove(unzipInfo);
}
java.nio.file.Files.deleteIfExists(Paths.get(fsmPath));
} catch (IOException ex) {
Logger.getLogger(Settings.class.getName()).log(Level.SEVERE, null, ex);
}
}
if (state.isEmpty()) {
state = "NOT_FOUND";
}
return state;
}
private boolean PYTHON_KERNEL = true;
public synchronized boolean isPythonKernelEnabled() {
checkCache();
return PYTHON_KERNEL;
}
private String PYPI_REST_ENDPOINT = "https://pypi.org/pypi/{package}/json";
public synchronized String getPyPiRESTEndpoint() {
checkCache();
return PYPI_REST_ENDPOINT;
}
private String PYPI_INDEXER_TIMER_INTERVAL = "1d";
public synchronized String getPyPiIndexerTimerInterval() {
checkCache();
return PYPI_INDEXER_TIMER_INTERVAL;
}
private String PYPI_SIMPLE_ENDPOINT = "https://pypi.org/simple/";
public synchronized String getPyPiSimpleEndpoint() {
checkCache();
return PYPI_SIMPLE_ENDPOINT;
}
private boolean PYPI_INDEXER_TIMER_ENABLED = true;
public synchronized boolean isPyPiIndexerTimerEnabled() {
checkCache();
return PYPI_INDEXER_TIMER_ENABLED;
}
private String PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL = "1d";
public synchronized String getPythonLibraryUpdatesMonitorInterval() {
checkCache();
return PYTHON_LIBRARY_UPDATES_MONITOR_INTERVAL;
}
private String HOPS_EXAMPLES_VERSION = "0.3.0";
public synchronized String getHopsExamplesSparkFilename() {
checkCache();
return "hops-examples-spark-" + HOPS_EXAMPLES_VERSION + ".jar";
}
private String VERIFICATION_PATH = "/hopsworks-admin/security/validate_account.xhtml";
public synchronized String getEmailVerificationEndpoint() {
checkCache();
return VERIFICATION_PATH;
}
//Dela START
private static final String VARIABLE_HOPSSITE_BASE_URI = "hops_site_endpoint";
private static final String VARIABLE_HOPSSITE_BASE_URI_HOST = "hops_site_host";
private static final String VARIABLE_CLUSTER_CERT = "hopsworks_certificate";
private static final String VARIABLE_DELA_ENABLED = "dela_enabled";
private static final String VARIABLE_DELA_CLIENT_TYPE = "dela_client_type";
private static final String VARIABLE_HOPSSITE_HEARTBEAT_INTERVAL = "hopssite_heartbeat_interval";
private static final String VARIABLE_DELA_CLUSTER_ID = "cluster_id";
private static final String VARIABLE_DELA_CLUSTER_IP = "dela_cluster_ip";
private static final String VARIABLE_DELA_CLUSTER_HTTP_PORT = "dela_cluster_http_port";
private static final String VARIABLE_DELA_PUBLIC_HOPSWORKS_PORT = "dela_hopsworks_public_port";
private static final String VARIABLE_PUBLIC_HTTPS_PORT = "public_https_port";
private static final String VARIABLE_DELA_SEARCH_ENDPOINT = "dela_search_endpoint";
private static final String VARIABLE_DELA_TRANSFER_ENDPOINT = "dela_transfer_endpoint";
private static final String VARIABLE_HOPSWORKS_PUBLIC_HOST = "hopsworks_public_host";
public static final Level DELA_DEBUG = Level.INFO;
private String HOPSSITE_HOST = "hops.site";
private String HOPSSITE = "http://hops.site:5081/hops-site/api";
private Boolean DELA_ENABLED = false; // set to false if not found in variables table
private DelaClientType DELA_CLIENT_TYPE = DelaClientType.FULL_CLIENT;
private long HOPSSITE_HEARTBEAT_RETRY = 10 * 1000l; //10s
private long HOPSSITE_HEARTBEAT_INTERVAL = 10 * 60 * 1000l;//10min
private String DELA_TRANSFER_IP = "localhost";
private String DELA_TRANSFER_HTTP_PORT = "42000";
private String DELA_PUBLIC_HOPSWORK_PORT = "8080";
private String PUBLIC_HTTPS_PORT = "8181";
//set on registration after Dela is contacted to detect public port
private String DELA_SEARCH_ENDPOINT = "";
private String DELA_TRANSFER_ENDPOINT = "";
//set on cluster registration
private String DELA_CLUSTER_ID = null;
//
private AddressJSON DELA_PUBLIC_ENDPOINT = null;
//
public static final String MANIFEST_FILE = "manifest.json";
public static final String README_FILE = "README.md";
private void populateDelaCache() {
DELA_ENABLED = setBoolVar(VARIABLE_DELA_ENABLED, DELA_ENABLED);
DELA_CLIENT_TYPE = DelaClientType.from(setVar(VARIABLE_DELA_CLIENT_TYPE, DELA_CLIENT_TYPE.type));
HOPSSITE_CLUSTER_NAME = setVar(VARIABLE_HOPSSITE_CLUSTER_NAME, HOPSSITE_CLUSTER_NAME);
HOPSSITE_CLUSTER_PSWD = setVar(VARIABLE_HOPSSITE_CLUSTER_PSWD, HOPSSITE_CLUSTER_PSWD);
HOPSSITE_CLUSTER_PSWD_AUX = setVar(VARIABLE_HOPSSITE_CLUSTER_PSWD_AUX, HOPSSITE_CLUSTER_PSWD_AUX);
HOPSSITE_HOST = setVar(VARIABLE_HOPSSITE_BASE_URI_HOST, HOPSSITE_HOST);
HOPSSITE = setVar(VARIABLE_HOPSSITE_BASE_URI, HOPSSITE);
HOPSSITE_HEARTBEAT_INTERVAL = setLongVar(VARIABLE_HOPSSITE_HEARTBEAT_INTERVAL, HOPSSITE_HEARTBEAT_INTERVAL);
DELA_TRANSFER_IP = setStrVar(VARIABLE_DELA_CLUSTER_IP, DELA_TRANSFER_IP);
DELA_TRANSFER_HTTP_PORT = setStrVar(VARIABLE_DELA_CLUSTER_HTTP_PORT, DELA_TRANSFER_HTTP_PORT);
DELA_SEARCH_ENDPOINT = setStrVar(VARIABLE_DELA_SEARCH_ENDPOINT, DELA_SEARCH_ENDPOINT);
DELA_TRANSFER_ENDPOINT = setStrVar(VARIABLE_DELA_TRANSFER_ENDPOINT, DELA_TRANSFER_ENDPOINT);
DELA_PUBLIC_HOPSWORK_PORT = setStrVar(VARIABLE_DELA_PUBLIC_HOPSWORKS_PORT, DELA_PUBLIC_HOPSWORK_PORT);
PUBLIC_HTTPS_PORT = setStrVar(VARIABLE_PUBLIC_HTTPS_PORT, PUBLIC_HTTPS_PORT);
DELA_CLUSTER_ID = setStrVar(VARIABLE_DELA_CLUSTER_ID, DELA_CLUSTER_ID);
}
private void populateServiceJWTCache() {
SERVICE_MASTER_JWT = setStrVar(VARIABLE_SERVICE_MASTER_JWT, SERVICE_MASTER_JWT);
RENEW_TOKENS = new String[NUM_OF_SERVICE_RENEW_TOKENS];
for (int i = 0; i < NUM_OF_SERVICE_RENEW_TOKENS; i++) {
String variableKey = String.format(SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE, i);
String token = setStrVar(variableKey, "");
RENEW_TOKENS[i] = token;
}
}
public synchronized Boolean isDelaEnabled() {
checkCache();
return DELA_ENABLED;
}
public synchronized DelaClientType getDelaClientType() {
return DELA_CLIENT_TYPE;
}
public synchronized String getHOPSSITE_HOST() {
checkCache();
return HOPSSITE_HOST;
}
public synchronized String getHOPSSITE() {
checkCache();
return HOPSSITE;
}
public synchronized long getHOPSSITE_HEARTBEAT_RETRY() {
checkCache();
return HOPSSITE_HEARTBEAT_RETRY;
}
public synchronized long getHOPSSITE_HEARTBEAT_INTERVAL() {
checkCache();
return HOPSSITE_HEARTBEAT_INTERVAL;
}
public synchronized String getDELA_TRANSFER_IP() {
checkCache();
return DELA_TRANSFER_IP;
}
public synchronized String getDELA_TRANSFER_HTTP_PORT() {
checkCache();
return DELA_TRANSFER_HTTP_PORT;
}
public synchronized String getDELA_TRANSFER_HTTP_ENDPOINT() {
checkCache();
return "http://" + DELA_TRANSFER_IP + ":" + DELA_TRANSFER_HTTP_PORT + "/";
}
public synchronized String getDELA_HOPSWORKS_PORT() {
checkCache();
return DELA_PUBLIC_HOPSWORK_PORT;
}
public synchronized String getPUBLIC_HTTPS_PORT() {
checkCache();
return PUBLIC_HTTPS_PORT;
}
public synchronized AddressJSON getDELA_PUBLIC_ENDPOINT() {
return DELA_PUBLIC_ENDPOINT;
}
public synchronized String getDELA_SEARCH_ENDPOINT() {
checkCache();
if (DELA_SEARCH_ENDPOINT != null) {
return DELA_SEARCH_ENDPOINT;
}
return setStrVar(DELA_SEARCH_ENDPOINT, null);
}
public synchronized String getDELA_TRANSFER_ENDPOINT() {
checkCache();
if (DELA_TRANSFER_ENDPOINT != null) {
return DELA_TRANSFER_ENDPOINT;
}
return setStrVar(DELA_TRANSFER_ENDPOINT, null);
}
public synchronized void setDELA_PUBLIC_ENDPOINT(AddressJSON endpoint) {
DELA_PUBLIC_ENDPOINT = endpoint;
String delaSearchEndpoint = "https://" + endpoint.getIp() + ":"
+ getPUBLIC_HTTPS_PORT() + "/hopsworks-api/api";
String delaTransferEndpoint = endpoint.getIp() + ":" + endpoint.getPort() + "/" + endpoint.getId();
if (getDELA_SEARCH_ENDPOINT() == null) {
em.persist(new Variables(VARIABLE_DELA_SEARCH_ENDPOINT, delaSearchEndpoint));
} else {
em.merge(new Variables(VARIABLE_DELA_SEARCH_ENDPOINT, delaSearchEndpoint));
}
DELA_SEARCH_ENDPOINT = delaSearchEndpoint;
if (getDELA_TRANSFER_ENDPOINT() == null) {
em.persist(new Variables(VARIABLE_DELA_TRANSFER_ENDPOINT, delaTransferEndpoint));
} else {
em.merge(new Variables(VARIABLE_DELA_TRANSFER_ENDPOINT, delaTransferEndpoint));
}
DELA_TRANSFER_ENDPOINT = delaTransferEndpoint;
}
public synchronized void setDELA_CLUSTER_ID(String id) {
if (getDELA_CLUSTER_ID() == null) {
em.persist(new Variables(VARIABLE_DELA_CLUSTER_ID, id));
} else {
em.merge(new Variables(VARIABLE_DELA_CLUSTER_ID, id));
}
DELA_CLUSTER_ID = id;
}
public synchronized String getDELA_CLUSTER_ID() {
checkCache();
if (DELA_CLUSTER_ID != null) {
return DELA_CLUSTER_ID;
}
return setStrVar(VARIABLE_DELA_CLUSTER_ID, null);
}
public synchronized String getDELA_DOMAIN() {
if (DELA_PUBLIC_ENDPOINT != null) {
return DELA_PUBLIC_ENDPOINT.getIp();
}
return null;
}
//************************************************CERTIFICATES********************************************************
private static final String HOPS_SITE_CA_DIR = "hops-site-certs";
private final static String HOPS_SITE_CERTFILE = "/pub.pem";
private final static String HOPS_SITE_CA_CERTFILE = "/ca_pub.pem";
private final static String HOPS_SITE_INTERMEDIATE_CERTFILE = "/intermediate_ca_pub.pem";
private final static String HOPS_SITE_KEY_STORE = "/keystores/keystore.jks";
private final static String HOPS_SITE_TRUST_STORE = "/keystores/truststore.jks";
private static final String VARIABLE_HOPSSITE_CLUSTER_NAME = "hops_site_cluster_name";
private static final String VARIABLE_HOPSSITE_CLUSTER_PSWD = "hops_site_cluster_pswd";
private static final String VARIABLE_HOPSSITE_CLUSTER_PSWD_AUX = "hops_site_cluster_pswd_aux";
private String HOPSSITE_CLUSTER_NAME = null;
private String HOPSSITE_CLUSTER_PSWD = null;
private String HOPSSITE_CLUSTER_PSWD_AUX = "1234";
public synchronized Optional<String> getHopsSiteClusterName() {
checkCache();
return Optional.ofNullable(HOPSSITE_CLUSTER_NAME);
}
public synchronized void setHopsSiteClusterName(String clusterName) {
if (getHopsSiteClusterName().isPresent()) {
em.merge(new Variables(VARIABLE_HOPSSITE_CLUSTER_NAME, clusterName));
} else {
em.persist(new Variables(VARIABLE_HOPSSITE_CLUSTER_NAME, clusterName));
}
HOPSSITE_CLUSTER_NAME = clusterName;
}
public synchronized void deleteHopsSiteClusterName() {
if (getHopsSiteClusterName().isPresent()) {
Optional<Variables> v = findById(VARIABLE_HOPSSITE_CLUSTER_NAME);
if (v.isPresent()) {
em.remove(v);
HOPSSITE_CLUSTER_NAME = null;
}
}
}
public synchronized String getHopsSiteClusterPswdAux() {
checkCache();
return HOPSSITE_CLUSTER_PSWD_AUX;
}
public synchronized Optional<String> getHopsSiteClusterPswd() {
checkCache();
return Optional.ofNullable(HOPSSITE_CLUSTER_PSWD);
}
public synchronized void setHopsSiteClusterPswd(String pswd) {
if (getHopsSiteClusterPswd().isPresent()) {
em.merge(new Variables(VARIABLE_HOPSSITE_CLUSTER_PSWD, pswd));
} else {
em.persist(new Variables(VARIABLE_HOPSSITE_CLUSTER_PSWD, pswd));
}
HOPSSITE_CLUSTER_PSWD = pswd;
}
public synchronized String getHopsSiteCaDir() {
return getCertsDir() + File.separator + HOPS_SITE_CA_DIR;
}
public synchronized String getHopsSiteCaScript() {
return getSudoersDir() + File.separator + "ca-keystore.sh";
}
public synchronized String getHopsSiteCert() {
return getHopsSiteCaDir() + HOPS_SITE_CERTFILE;
}
public synchronized String getHopsSiteCaCert() {
return getHopsSiteCaDir() + HOPS_SITE_CA_CERTFILE;
}
public synchronized String getHopsSiteIntermediateCert() {
return getHopsSiteCaDir() + HOPS_SITE_INTERMEDIATE_CERTFILE;
}
public synchronized String getHopsSiteKeyStorePath() {
return getHopsSiteCaDir() + HOPS_SITE_KEY_STORE;
}
public synchronized String getHopsSiteTrustStorePath() {
return getHopsSiteCaDir() + HOPS_SITE_TRUST_STORE;
}
//Dela END
//************************************************ZOOKEEPER********************************************************
public static final int ZOOKEEPER_SESSION_TIMEOUT_MS = 30 * 1000;//30 seconds
public static final int ZOOKEEPER_CONNECTION_TIMEOUT_MS = 30 * 1000;// 30 seconds
//Zookeeper END
//************************************************KAFKA********************************************************
public static final String KAFKA_ACL_WILDCARD = "*";
//Kafka END
//-------------------------Remote auth [OAuth2, KRB, LDAP]----------------------------
private static final String VARIABLE_KRB_AUTH = "kerberos_auth";
private static final String VARIABLE_LDAP_AUTH = "ldap_auth";
private static final String VARIABLE_LDAP_GROUP_MAPPING = "ldap_group_mapping";
private static final String VARIABLE_LDAP_USER_ID = "ldap_user_id";
private static final String VARIABLE_LDAP_USER_GIVEN_NAME = "ldap_user_givenName";
private static final String VARIABLE_LDAP_USER_SURNAME = "ldap_user_surname";
private static final String VARIABLE_LDAP_USER_EMAIL = "ldap_user_email";
private static final String VARIABLE_LDAP_USER_SEARCH_FILTER = "ldap_user_search_filter";
private static final String VARIABLE_LDAP_GROUP_SEARCH_FILTER = "ldap_group_search_filter";
private static final String VARIABLE_LDAP_KRB_USER_SEARCH_FILTER = "ldap_krb_search_filter";
private static final String VARIABLE_LDAP_ATTR_BINARY = "ldap_attr_binary";
private static final String VARIABLE_LDAP_GROUP_TARGET = "ldap_group_target";
private static final String VARIABLE_LDAP_DYNAMIC_GROUP_TARGET = "ldap_dyn_group_target";
private static final String VARIABLE_LDAP_USERDN = "ldap_user_dn";
private static final String VARIABLE_LDAP_GROUPDN = "ldap_group_dn";
private static final String VARIABLE_LDAP_ACCOUNT_STATUS = "ldap_account_status";
private static final String VARIABLE_LDAP_GROUPS_SEARCH_FILTER = "ldap_groups_search_filter";
private static final String VARIABLE_LDAP_GROUP_MEMBERS_SEARCH_FILTER = "ldap_group_members_filter";
private static final String VARIABLE_LDAP_GROUPS_TARGET = "ldap_groups_target";
private static final String VARIABLE_OAUTH_ENABLED = "oauth_enabled";
private static final String VARIABLE_OAUTH_REDIRECT_URI = "oauth_redirect_uri";
private static final String VARIABLE_OAUTH_LOGOUT_REDIRECT_URI = "oauth_logout_redirect_uri";
private static final String VARIABLE_OAUTH_ACCOUNT_STATUS = "oauth_account_status";
private static final String VARIABLE_OAUTH_GROUP_MAPPING = "oauth_group_mapping";
private static final String VARIABLE_REMOTE_AUTH_NEED_CONSENT = "remote_auth_need_consent";
private static final String VARIABLE_DISABLE_PASSWORD_LOGIN = "disable_password_login";
private static final String VARIABLE_DISABLE_REGISTRATION = "disable_registration";
private static final String VARIABLE_DISABLE_REGISTRATION_UI = "disable_registration_ui";
private static final String VARIABLE_LDAP_GROUP_MAPPING_SYNC_INTERVAL = "ldap_group_mapping_sync_interval";
private static final String VARIABLE_VALIDATE_REMOTE_USER_EMAIL_VERIFIED = "validate_email_verified";
private static final String VARIABLE_MANAGED_CLOUD_REDIRECT_URI = "managed_cloud_redirect_uri";
private static final String VARIABLE_MANAGED_CLOUD_PROVIDER_NAME = "managed_cloud_provider_name";
private String KRB_AUTH = "false";
private String LDAP_AUTH = "false";
private boolean IS_KRB_ENABLED = false;
private boolean IS_LDAP_ENABLED = false;
private String LDAP_GROUP_MAPPING = "";
private String LDAP_USER_ID = "uid"; //login name
private String LDAP_USER_GIVEN_NAME = "givenName";
private String LDAP_USER_SURNAME = "sn";
private String LDAP_USER_EMAIL = "mail";
private String LDAP_USER_SEARCH_FILTER = "uid=%s";
private String LDAP_GROUP_SEARCH_FILTER = "member=%d";
private String LDAP_KRB_USER_SEARCH_FILTER = "krbPrincipalName=%s";
private String LDAP_ATTR_BINARY = "java.naming.ldap.attributes.binary";
private String LDAP_GROUP_TARGET = "cn";
private String LDAP_DYNAMIC_GROUP_TARGET = "memberOf";
private String LDAP_USER_DN_DEFAULT = "";
private String LDAP_GROUP_DN_DEFAULT = "";
private String LDAP_USER_DN = LDAP_USER_DN_DEFAULT;
private String LDAP_GROUP_DN = LDAP_GROUP_DN_DEFAULT;
private String LDAP_GROUPS_TARGET = "distinguishedName";
private String LDAP_GROUPS_SEARCH_FILTER = "(&(objectCategory=group)(cn=%c))";
private String LDAP_GROUP_MEMBERS_SEARCH_FILTER = "(&(objectCategory=user)(memberOf=%d))";
private int LDAP_ACCOUNT_STATUS = 1;
private String OAUTH_ENABLED = "false";
private boolean IS_OAUTH_ENABLED = false;
private String OAUTH_GROUP_MAPPING = "";
private String OAUTH_REDIRECT_URI_PATH = "hopsworks/callback";
private String OAUTH_LOGOUT_REDIRECT_URI_PATH = "hopsworks/";
private String OAUTH_REDIRECT_URI = OAUTH_REDIRECT_URI_PATH;
private String OAUTH_LOGOUT_REDIRECT_URI = OAUTH_LOGOUT_REDIRECT_URI_PATH;
private int OAUTH_ACCOUNT_STATUS = 1;
private long LDAP_GROUP_MAPPING_SYNC_INTERVAL = 0;
private boolean REMOTE_AUTH_NEED_CONSENT = true;
private boolean DISABLE_PASSWORD_LOGIN = false;
private boolean DISABLE_REGISTRATION = false;
private boolean VALIDATE_REMOTE_USER_EMAIL_VERIFIED = false;
private String MANAGED_CLOUD_REDIRECT_URI = "";
private String MANAGED_CLOUD_PROVIDER_NAME = "hopsworks.ai";
private void populateLDAPCache() {
KRB_AUTH = setVar(VARIABLE_KRB_AUTH, KRB_AUTH);
LDAP_AUTH = setVar(VARIABLE_LDAP_AUTH, LDAP_AUTH);
LDAP_GROUP_MAPPING = setVar(VARIABLE_LDAP_GROUP_MAPPING, LDAP_GROUP_MAPPING);
LDAP_USER_ID = setVar(VARIABLE_LDAP_USER_ID, LDAP_USER_ID);
LDAP_USER_GIVEN_NAME = setVar(VARIABLE_LDAP_USER_GIVEN_NAME, LDAP_USER_GIVEN_NAME);
LDAP_USER_SURNAME = setVar(VARIABLE_LDAP_USER_SURNAME, LDAP_USER_SURNAME);
LDAP_USER_EMAIL = setVar(VARIABLE_LDAP_USER_EMAIL, LDAP_USER_EMAIL);
LDAP_ACCOUNT_STATUS = setIntVar(VARIABLE_LDAP_ACCOUNT_STATUS, LDAP_ACCOUNT_STATUS);
LDAP_USER_SEARCH_FILTER = setVar(VARIABLE_LDAP_USER_SEARCH_FILTER, LDAP_USER_SEARCH_FILTER);
LDAP_GROUP_SEARCH_FILTER = setVar(VARIABLE_LDAP_GROUP_SEARCH_FILTER, LDAP_GROUP_SEARCH_FILTER);
LDAP_KRB_USER_SEARCH_FILTER = setVar(VARIABLE_LDAP_KRB_USER_SEARCH_FILTER, LDAP_KRB_USER_SEARCH_FILTER);
LDAP_ATTR_BINARY = setVar(VARIABLE_LDAP_ATTR_BINARY, LDAP_ATTR_BINARY);
LDAP_GROUP_TARGET = setVar(VARIABLE_LDAP_GROUP_TARGET, LDAP_GROUP_TARGET);
LDAP_DYNAMIC_GROUP_TARGET = setVar(VARIABLE_LDAP_DYNAMIC_GROUP_TARGET, LDAP_DYNAMIC_GROUP_TARGET);
LDAP_USER_DN = setStrVar(VARIABLE_LDAP_USERDN, LDAP_USER_DN_DEFAULT);
LDAP_GROUP_DN = setStrVar(VARIABLE_LDAP_GROUPDN, LDAP_GROUP_DN_DEFAULT);
LDAP_GROUPS_TARGET = setVar(VARIABLE_LDAP_GROUPS_TARGET, LDAP_GROUPS_TARGET);
LDAP_GROUPS_SEARCH_FILTER = setStrVar(VARIABLE_LDAP_GROUPS_SEARCH_FILTER, LDAP_GROUPS_SEARCH_FILTER);
LDAP_GROUP_MEMBERS_SEARCH_FILTER =
setStrVar(VARIABLE_LDAP_GROUP_MEMBERS_SEARCH_FILTER, LDAP_GROUP_MEMBERS_SEARCH_FILTER);
IS_KRB_ENABLED = setBoolVar(VARIABLE_KRB_AUTH, IS_KRB_ENABLED);
IS_LDAP_ENABLED = setBoolVar(VARIABLE_LDAP_AUTH, IS_LDAP_ENABLED);
OAUTH_ENABLED = setStrVar(VARIABLE_OAUTH_ENABLED, OAUTH_ENABLED);
IS_OAUTH_ENABLED = setBoolVar(VARIABLE_OAUTH_ENABLED, IS_OAUTH_ENABLED);
OAUTH_REDIRECT_URI = setStrVar(VARIABLE_OAUTH_REDIRECT_URI, OAUTH_REDIRECT_URI);
OAUTH_LOGOUT_REDIRECT_URI = setStrVar(VARIABLE_OAUTH_LOGOUT_REDIRECT_URI, OAUTH_LOGOUT_REDIRECT_URI);
OAUTH_ACCOUNT_STATUS = setIntVar(VARIABLE_OAUTH_ACCOUNT_STATUS, OAUTH_ACCOUNT_STATUS);
OAUTH_GROUP_MAPPING = setStrVar(VARIABLE_OAUTH_GROUP_MAPPING, OAUTH_GROUP_MAPPING);
REMOTE_AUTH_NEED_CONSENT = setBoolVar(VARIABLE_REMOTE_AUTH_NEED_CONSENT, REMOTE_AUTH_NEED_CONSENT);
DISABLE_PASSWORD_LOGIN = setBoolVar(VARIABLE_DISABLE_PASSWORD_LOGIN, DISABLE_PASSWORD_LOGIN);
DISABLE_REGISTRATION = setBoolVar(VARIABLE_DISABLE_REGISTRATION, DISABLE_REGISTRATION);
DISABLE_REGISTRATION_UI = setBoolVar(VARIABLE_DISABLE_REGISTRATION_UI, DISABLE_REGISTRATION_UI);
LDAP_GROUP_MAPPING_SYNC_INTERVAL = setLongVar(VARIABLE_LDAP_GROUP_MAPPING_SYNC_INTERVAL,
LDAP_GROUP_MAPPING_SYNC_INTERVAL);
VALIDATE_REMOTE_USER_EMAIL_VERIFIED =
setBoolVar(VARIABLE_VALIDATE_REMOTE_USER_EMAIL_VERIFIED, VALIDATE_REMOTE_USER_EMAIL_VERIFIED);
MANAGED_CLOUD_REDIRECT_URI = setStrVar(VARIABLE_MANAGED_CLOUD_REDIRECT_URI, MANAGED_CLOUD_REDIRECT_URI);
MANAGED_CLOUD_PROVIDER_NAME = setStrVar(VARIABLE_MANAGED_CLOUD_PROVIDER_NAME, MANAGED_CLOUD_PROVIDER_NAME);
}
public synchronized String getKRBAuthStatus() {
checkCache();
return KRB_AUTH;
}
public synchronized String getLDAPAuthStatus() {
checkCache();
return LDAP_AUTH;
}
public synchronized boolean isKrbEnabled() {
checkCache();
return IS_KRB_ENABLED;
}
public synchronized boolean isLdapEnabled() {
checkCache();
return IS_LDAP_ENABLED;
}
public synchronized String getLdapGroupMapping() {
checkCache();
return LDAP_GROUP_MAPPING;
}
public synchronized String getLdapUserId() {
checkCache();
return LDAP_USER_ID;
}
public synchronized String getLdapUserGivenName() {
checkCache();
return LDAP_USER_GIVEN_NAME;
}
public synchronized String getLdapUserSurname() {
checkCache();
return LDAP_USER_SURNAME;
}
public synchronized String getLdapUserMail() {
checkCache();
return LDAP_USER_EMAIL;
}
public synchronized String getLdapUserSearchFilter() {
checkCache();
return LDAP_USER_SEARCH_FILTER;
}
public synchronized String getLdapGroupSearchFilter() {
checkCache();
return LDAP_GROUP_SEARCH_FILTER;
}
public synchronized String getKrbUserSearchFilter() {
checkCache();
return LDAP_KRB_USER_SEARCH_FILTER;
}
public synchronized String getLdapAttrBinary() {
checkCache();
return LDAP_ATTR_BINARY;
}
public synchronized String getLdapGroupTarget() {
checkCache();
return LDAP_GROUP_TARGET;
}
public synchronized String getLdapDynGroupTarget() {
checkCache();
return LDAP_DYNAMIC_GROUP_TARGET;
}
public synchronized String getLdapUserDN() {
checkCache();
return LDAP_USER_DN;
}
public synchronized String getLdapGroupDN() {
checkCache();
return LDAP_GROUP_DN;
}
public synchronized int getLdapAccountStatus() {
checkCache();
return LDAP_ACCOUNT_STATUS;
}
public synchronized String getLdapGroupsTarget() {
checkCache();
return LDAP_GROUPS_TARGET;
}
public synchronized String getLdapGroupsSearchFilter() {
checkCache();
return LDAP_GROUPS_SEARCH_FILTER;
}
public synchronized String getLdapGroupMembersFilter() {
checkCache();
return LDAP_GROUP_MEMBERS_SEARCH_FILTER;
}
public synchronized String getOAuthEnabled() {
checkCache();
return OAUTH_ENABLED;
}
public synchronized boolean isOAuthEnabled() {
checkCache();
return IS_OAUTH_ENABLED;
}
public synchronized String getOAuthGroupMapping() {
checkCache();
return OAUTH_GROUP_MAPPING;
}
public void updateOAuthGroupMapping(String mapping) {
updateVariableInternal(VARIABLE_OAUTH_GROUP_MAPPING, mapping, VariablesVisibility.ADMIN);
}
public synchronized String getOauthRedirectUri(String providerName) {
return getOauthRedirectUri(providerName, false);
}
/*
* when using oauth for hopsworks.ai we need to first redirect to hopsworks.ai
* which then redirect to hopsworks.
*/
public synchronized String getOauthRedirectUri(String providerName, boolean skipManagedCloud) {
checkCache();
if (MANAGED_CLOUD_REDIRECT_URI.isEmpty() || skipManagedCloud || !Objects.equals(MANAGED_CLOUD_PROVIDER_NAME,
providerName)) {
return OAUTH_REDIRECT_URI;
}
return MANAGED_CLOUD_REDIRECT_URI;
}
public synchronized String getManagedCloudRedirectUri() {
checkCache();
return MANAGED_CLOUD_REDIRECT_URI;
}
public synchronized String getManagedCloudProviderName() {
checkCache();
return MANAGED_CLOUD_PROVIDER_NAME;
}
public void updateOauthRedirectUri(String uri) {
updateVariableInternal(VARIABLE_OAUTH_REDIRECT_URI, uri + OAUTH_REDIRECT_URI_PATH,
VariablesVisibility.ADMIN);
}
public synchronized String getOauthLogoutRedirectUri() {
checkCache();
return OAUTH_LOGOUT_REDIRECT_URI;
}
public void addPathAndupdateOauthLogoutRedirectUri(String uri) {
updateOauthLogoutRedirectUri(uri + OAUTH_LOGOUT_REDIRECT_URI_PATH);
}
public void updateOauthLogoutRedirectUri(String uri) {
updateVariableInternal(VARIABLE_OAUTH_LOGOUT_REDIRECT_URI, uri,
VariablesVisibility.ADMIN);
}
public void updateManagedCloudRedirectUri(String uri) {
updateVariableInternal(VARIABLE_MANAGED_CLOUD_REDIRECT_URI, uri , VariablesVisibility.ADMIN);
}
public synchronized int getOAuthAccountStatus() {
checkCache();
return OAUTH_ACCOUNT_STATUS;
}
public void updateOAuthAccountStatus(Integer val) {
updateVariableInternal(VARIABLE_OAUTH_ACCOUNT_STATUS, val.toString(), VariablesVisibility.ADMIN);
}
public synchronized boolean shouldValidateEmailVerified() {
checkCache();
return VALIDATE_REMOTE_USER_EMAIL_VERIFIED;
}
public synchronized boolean remoteAuthNeedConsent() {
checkCache();
return REMOTE_AUTH_NEED_CONSENT;
}
public void updateRemoteAuthNeedConsent(boolean needConsent) {
updateVariableInternal(VARIABLE_REMOTE_AUTH_NEED_CONSENT, Boolean.toString(needConsent), VariablesVisibility.ADMIN);
}
public synchronized String getVarLdapAccountStatus() {
return VARIABLE_LDAP_ACCOUNT_STATUS;
}
public synchronized String getVarLdapGroupMapping() {
return VARIABLE_LDAP_GROUP_MAPPING;
}
public synchronized String getVarLdapUserId() {
return VARIABLE_LDAP_USER_ID;
}
public synchronized String getVarLdapUserGivenName() {
return VARIABLE_LDAP_USER_GIVEN_NAME;
}
public synchronized String getVarLdapUserSurname() {
return VARIABLE_LDAP_USER_SURNAME;
}
public synchronized String getVarLdapUserMail() {
return VARIABLE_LDAP_USER_EMAIL;
}
public synchronized String getVarLdapUserSearchFilter() {
return VARIABLE_LDAP_USER_SEARCH_FILTER;
}
public synchronized String getVarLdapGroupSearchFilter() {
return VARIABLE_LDAP_GROUP_SEARCH_FILTER;
}
public synchronized String getVarKrbUserSearchFilter() {
return VARIABLE_LDAP_KRB_USER_SEARCH_FILTER;
}
public synchronized String getVarLdapAttrBinary() {
return VARIABLE_LDAP_ATTR_BINARY;
}
public synchronized String getVarLdapGroupTarget() {
return VARIABLE_LDAP_GROUP_TARGET;
}
public synchronized String getVarLdapDynGroupTarget() {
return VARIABLE_LDAP_DYNAMIC_GROUP_TARGET;
}
public synchronized String getVarLdapUserDN() {
return VARIABLE_LDAP_USERDN;
}
public synchronized String getVarLdapGroupDN() {
return VARIABLE_LDAP_GROUPDN;
}
public synchronized boolean isPasswordLoginDisabled() {
checkCache();
return DISABLE_PASSWORD_LOGIN;
}
public synchronized boolean isRegistrationDisabled() {
checkCache();
return DISABLE_REGISTRATION;
}
public void updateRegistrationDisabled(boolean disable) {
updateVariableInternal(VARIABLE_DISABLE_REGISTRATION, Boolean.toString(disable), VariablesVisibility.ADMIN);
}
// Special flag to disable only registration UI but not the backend
// It is used in managed cloud when user management is MANAGED by hopsworks.ai
// Variable value is set during instance initialization by ec2-init
private boolean DISABLE_REGISTRATION_UI = false;
public synchronized boolean isRegistrationUIDisabled() {
checkCache();
return isRegistrationDisabled() || DISABLE_REGISTRATION_UI;
}
public synchronized long ldapGroupMappingSyncInterval() {
checkCache();
return LDAP_GROUP_MAPPING_SYNC_INTERVAL;
}
//----------------------------END remote user------------------------------------
// Service key rotation enabled
private static final String SERVICE_KEY_ROTATION_ENABLED_KEY = "service_key_rotation_enabled";
private boolean serviceKeyRotationEnabled = false;
public synchronized boolean isServiceKeyRotationEnabled() {
checkCache();
return serviceKeyRotationEnabled;
}
// Service key rotation interval
private static final String SERVICE_KEY_ROTATION_INTERVAL_KEY = "service_key_rotation_interval";
private String serviceKeyRotationInterval = "3d";
public synchronized String getServiceKeyRotationInterval() {
checkCache();
return serviceKeyRotationInterval;
}
// TensorBoard kill rotation interval in milliseconds (should be lower than the TensorBoardKillTimer)
private static final String TENSORBOARD_MAX_LAST_ACCESSED = "tensorboard_max_last_accessed";
private int tensorBoardMaxLastAccessed = 1140000;
public synchronized int getTensorBoardMaxLastAccessed() {
checkCache();
return tensorBoardMaxLastAccessed;
}
// TensorBoard kill rotation interval in milliseconds
private static final String SPARK_UI_LOGS_OFFSET = "spark_ui_logs_offset";
private int sparkUILogsOffset = 512000;
public synchronized int getSparkUILogsOffset() {
checkCache();
return sparkUILogsOffset;
}
public Long getConfTimeValue(String configurationTime) {
Matcher matcher = TIME_CONF_PATTERN.matcher(configurationTime.toLowerCase());
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid time in configuration: " + configurationTime);
}
return Long.parseLong(matcher.group(1));
}
public TimeUnit getConfTimeTimeUnit(String configurationTime) {
Matcher matcher = TIME_CONF_PATTERN.matcher(configurationTime.toLowerCase());
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid time in configuration: " + configurationTime);
}
String timeUnitStr = matcher.group(2);
if (null != timeUnitStr && !TIME_SUFFIXES.containsKey(timeUnitStr.toLowerCase())) {
throw new IllegalArgumentException("Invalid time suffix in configuration: " + configurationTime);
}
return timeUnitStr == null ? TimeUnit.MINUTES : TIME_SUFFIXES.get(timeUnitStr.toLowerCase());
}
private Set<String> toSetFromCsv(String csv, String separator) {
return new HashSet<>(Splitter.on(separator).trimResults().splitToList(csv));
}
// Libraries that should not be uninstallable
private Set<String> IMMUTABLE_PYTHON_LIBRARY_NAMES;
private static final String VARIABLE_IMMUTABLE_PYTHON_LIBRARY_NAMES = "preinstalled_python_lib_names";
private static final String DEFAULT_IMMUTABLE_PYTHON_LIBRARY_NAMES = "pydoop, pyspark, jupyterlab, sparkmagic, " +
"hdfscontents, pyjks, hops-apache-beam, pyopenssl";
public synchronized Set<String> getImmutablePythonLibraryNames() {
checkCache();
return IMMUTABLE_PYTHON_LIBRARY_NAMES;
}
private String HOPSWORKS_VERSION;
public synchronized String getHopsworksVersion() {
checkCache();
return HOPSWORKS_VERSION;
}
private String KUBE_KSERVE_TENSORFLOW_VERSION;
public synchronized String getKServeTensorflowVersion() {
checkCache();
return KUBE_KSERVE_TENSORFLOW_VERSION;
}
private String TENSORFLOW_VERSION;
public synchronized String getTensorflowVersion() {
checkCache();
return TENSORFLOW_VERSION;
}
private String OPENSEARCH_VERSION;
public synchronized String getOpenSearchVersion() {
checkCache();
return OPENSEARCH_VERSION;
}
private String KAFKA_VERSION;
public synchronized String getKafkaVersion() {
checkCache();
return KAFKA_VERSION;
}
private String DELA_VERSION;
public synchronized String getDelaVersion() {
checkCache();
return DELA_VERSION;
}
private String EPIPE_VERSION;
public synchronized String getEpipeVersion() {
checkCache();
return EPIPE_VERSION;
}
private String FLINK_VERSION;
public synchronized String getFlinkVersion() {
checkCache();
return FLINK_VERSION;
}
private String SPARK_VERSION;
public synchronized String getSparkVersion() {
checkCache();
return SPARK_VERSION;
}
private String TEZ_VERSION;
public synchronized String getTezVersion() {
checkCache();
return TEZ_VERSION;
}
private String HIVE2_VERSION;
public synchronized String getHive2Version() {
checkCache();
return HIVE2_VERSION;
}
private String LIVY_VERSION;
public synchronized String getLivyVersion() {
checkCache();
return LIVY_VERSION;
}
private String NDB_VERSION;
public synchronized String getNdbVersion() {
checkCache();
return NDB_VERSION;
}
private String FILEBEAT_VERSION;
public synchronized String getFilebeatVersion() {
checkCache();
return FILEBEAT_VERSION;
}
private String KIBANA_VERSION;
public synchronized String getKibanaVersion() {
checkCache();
return KIBANA_VERSION;
}
private String LOGSTASH_VERSION;
public synchronized String getLogstashVersion() {
checkCache();
return LOGSTASH_VERSION;
}
private String GRAFANA_VERSION;
public synchronized String getGrafanaVersion() {
checkCache();
return GRAFANA_VERSION;
}
private String ZOOKEEPER_VERSION;
public synchronized String getZookeeperVersion() {
checkCache();
return ZOOKEEPER_VERSION;
}
// -------------------------------- Kubernetes ----------------------------------------------//
private String KUBE_USER = "kubernetes";
public synchronized String getKubeUser() {
checkCache();
return KUBE_USER;
}
private String KUBE_HOPSWORKS_USER = "hopsworks";
public synchronized String getKubeHopsworksUser() {
checkCache();
return KUBE_HOPSWORKS_USER;
}
private String KUBEMASTER_URL = "https://192.168.68.102:6443";
public synchronized String getKubeMasterUrl() {
checkCache();
return KUBEMASTER_URL;
}
private String KUBE_CA_CERTFILE = "/srv/hops/certs-dir/certs/ca.cert.pem";
public synchronized String getKubeCaCertfile() {
checkCache();
return KUBE_CA_CERTFILE;
}
private String KUBE_CLIENT_KEYFILE = "/srv/hops/certs-dir/kube/hopsworks/hopsworks.key.pem";
public synchronized String getKubeClientKeyfile() {
checkCache();
return KUBE_CLIENT_KEYFILE;
}
private String KUBE_CLIENT_CERTFILE = "/srv/hops/certs-dir/kube/hopsworks/hopsworks.cert.pem";
public synchronized String getKubeClientCertfile() {
checkCache();
return KUBE_CLIENT_CERTFILE;
}
private String KUBE_CLIENT_KEYPASS = "adminpw";
public synchronized String getKubeClientKeypass() {
checkCache();
return KUBE_CLIENT_KEYPASS;
}
private String KUBE_TRUSTSTORE_PATH = "/srv/hops/certs-dir/kube/hopsworks/hopsworks__tstore.jks";
public synchronized String getKubeTruststorePath() {
checkCache();
return KUBE_TRUSTSTORE_PATH;
}
private String KUBE_TRUSTSTORE_KEY = "adminpw";
public synchronized String getKubeTruststoreKey() {
checkCache();
return KUBE_TRUSTSTORE_KEY;
}
private String KUBE_KEYSTORE_PATH = "/srv/hops/certs-dir/kube/hopsworks/hopsworks__kstore.jks";
public synchronized String getKubeKeystorePath() {
checkCache();
return KUBE_KEYSTORE_PATH;
}
private String KUBE_KEYSTORE_KEY = "adminpw";
public synchronized String getKubeKeystoreKey() {
checkCache();
return KUBE_KEYSTORE_KEY;
}
private String KUBE_PULL_POLICY = "Always";
public synchronized String getKubeImagePullPolicy() {
checkCache();
return KUBE_PULL_POLICY;
}
private Integer KUBE_API_MAX_ATTEMPTS = 12;
public synchronized Integer getKubeAPIMaxAttempts() {
checkCache();
return KUBE_API_MAX_ATTEMPTS;
}
private Boolean ONLINE_FEATURESTORE = false;
public synchronized Boolean isOnlineFeaturestore() {
checkCache();
return ONLINE_FEATURESTORE;
}
private String ONLINE_FEATURESTORE_TS = "";
public synchronized String getOnlineFeatureStoreTableSpace() {
checkCache();
return ONLINE_FEATURESTORE_TS;
}
private Integer ONLINEFS_THREAD_NUMBER = 10;
public synchronized Integer getOnlineFsThreadNumber() {
checkCache();
return ONLINEFS_THREAD_NUMBER;
}
private Integer KUBE_DOCKER_MAX_MEMORY_ALLOCATION = 8192;
public synchronized Integer getKubeDockerMaxMemoryAllocation() {
checkCache();
return KUBE_DOCKER_MAX_MEMORY_ALLOCATION;
}
private Double KUBE_DOCKER_MAX_CORES_ALLOCATION = 4.0;
public synchronized Double getKubeDockerMaxCoresAllocation() {
checkCache();
return KUBE_DOCKER_MAX_CORES_ALLOCATION;
}
private Integer KUBE_DOCKER_MAX_GPUS_ALLOCATION = 1;
public synchronized Integer getKubeDockerMaxGpusAllocation() {
checkCache();
return KUBE_DOCKER_MAX_GPUS_ALLOCATION;
}
private Boolean KUBE_INSTALLED = false;
public synchronized Boolean getKubeInstalled() {
checkCache();
return KUBE_INSTALLED;
}
private Boolean KUBE_KSERVE_INSTALLED = false;
public synchronized Boolean getKubeKServeInstalled() {
checkCache();
return KUBE_KSERVE_INSTALLED;
}
private String KUBE_SERVING_NODE_LABELS = "";
public synchronized String getKubeServingNodeLabels() {
checkCache();
return KUBE_SERVING_NODE_LABELS;
}
private String KUBE_SERVING_NODE_TOLERATIONS = "";
public synchronized String getKubeServingNodeTolerations() {
checkCache();
return KUBE_SERVING_NODE_TOLERATIONS;
}
private Integer KUBE_SERVING_MAX_MEMORY_ALLOCATION = -1; // no upper limit
public synchronized Integer getKubeServingMaxMemoryAllocation() {
checkCache();
return KUBE_SERVING_MAX_MEMORY_ALLOCATION;
}
private Double KUBE_SERVING_MAX_CORES_ALLOCATION = -1.0; // no upper limit
public synchronized Double getKubeServingMaxCoresAllocation() {
checkCache();
return KUBE_SERVING_MAX_CORES_ALLOCATION;
}
private Integer KUBE_SERVING_MAX_GPUS_ALLOCATION = -1; // no upper limit
public synchronized Integer getKubeServingMaxGpusAllocation() {
checkCache();
return KUBE_SERVING_MAX_GPUS_ALLOCATION;
}
// Maximum number of instances. Possible values >=-1 where -1 means no limit.
private Integer KUBE_SERVING_MAX_NUM_INSTANCES = -1;
public synchronized Integer getKubeServingMaxNumInstances() {
checkCache();
return KUBE_SERVING_MAX_NUM_INSTANCES;
}
// Minimum number of instances. Possible values: >=-1 where -1 means no limit and 0 enforces scale-to-zero
// capabilities when available
private Integer KUBE_SERVING_MIN_NUM_INSTANCES = -1;
public synchronized Integer getKubeServingMinNumInstances() {
checkCache();
return KUBE_SERVING_MIN_NUM_INSTANCES;
}
private String KUBE_KNATIVE_DOMAIN_NAME = "";
public synchronized String getKubeKnativeDomainName() {
checkCache();
return KUBE_KNATIVE_DOMAIN_NAME;
}
//comma seperated list of tainted nodes
private String KUBE_TAINTED_NODES = "";
public synchronized String getKubeTaintedNodes() {
checkCache();
return KUBE_TAINTED_NODES;
}
private String KUBE_TAINTED_NODES_MONITOR_INTERVAL = "30m";
public synchronized String getKubeTaintedMonitorInterval() {
checkCache();
return KUBE_TAINTED_NODES_MONITOR_INTERVAL;
}
private Boolean HOPSWORKS_ENTERPRISE = false;
public synchronized Boolean getHopsworksEnterprise() {
checkCache();
return HOPSWORKS_ENTERPRISE;
}
private String SERVING_MONITOR_INT = "30s";
public synchronized String getServingMonitorInt() {
checkCache();
return SERVING_MONITOR_INT;
}
private int SERVING_CONNECTION_POOL_SIZE = 40;
public synchronized int getServingConnectionPoolSize() {
checkCache();
return SERVING_CONNECTION_POOL_SIZE;
}
private int SERVING_MAX_ROUTE_CONNECTIONS = 10;
public synchronized int getServingMaxRouteConnections() {
checkCache();
return SERVING_MAX_ROUTE_CONNECTIONS;
}
private int TENSORBOARD_MAX_RELOAD_THREADS = 1;
public synchronized int getTensorBoardMaxReloadThreads() {
checkCache();
return TENSORBOARD_MAX_RELOAD_THREADS;
}
private String JUPYTER_HOST = "localhost";
public synchronized String getJupyterHost() {
checkCache();
return JUPYTER_HOST;
}
//These dependencies were collected by installing jupyterlab in a new environment
public static List<String> JUPYTER_DEPENDENCIES = Arrays.asList("urllib3", "chardet", "idna", "requests", "attrs",
"zipp", "importlib-metadata", "pyrsistent", "six", "jsonschema", "prometheus-client", "pycparser",
"cffi", "argon2-cffi", "pyzmq", "ipython-genutils", "decorator", "traitlets", "jupyter-core", "Send2Trash",
"tornado", "pygments", "pickleshare", "wcwidth", "prompt-toolkit", "backcall", "ptyprocess", "pexpect",
"parso", "jedi", "ipython", "python-dateutil", "jupyter-client", "ipykernel", "terminado", "MarkupSafe",
"jinja2", "mistune", "defusedxml", "jupyterlab-pygments", "pandocfilters", "entrypoints", "pyparsing",
"packaging", "webencodings", "bleach", "testpath", "nbformat", "nest-asyncio", "async-generator",
"nbclient", "nbconvert", "notebook", "json5", "jupyterlab-server", "jupyterlab", "sparkmagic");
private String JWT_SIGNATURE_ALGORITHM = "HS512";
private String JWT_SIGNING_KEY_NAME = "apiKey";
private String JWT_ISSUER = "hopsworks@logicalclocks.com";
private long JWT_LIFETIME_MS = 1800000l;
private int JWT_EXP_LEEWAY_SEC = 900;
private long SERVICE_JWT_LIFETIME_MS = 86400000l;
private int SERVICE_JWT_EXP_LEEWAY_SEC = 43200;
public synchronized String getJWTSignatureAlg() {
checkCache();
return JWT_SIGNATURE_ALGORITHM;
}
public synchronized long getJWTLifetimeMs() {
checkCache();
return JWT_LIFETIME_MS;
}
public synchronized int getJWTExpLeewaySec() {
checkCache();
return JWT_EXP_LEEWAY_SEC;
}
public synchronized long getJWTLifetimeMsPlusLeeway() {
checkCache();
return JWT_LIFETIME_MS + (JWT_EXP_LEEWAY_SEC * 1000L);
}
public synchronized long getServiceJWTLifetimeMS() {
checkCache();
return SERVICE_JWT_LIFETIME_MS;
}
public synchronized int getServiceJWTExpLeewaySec() {
checkCache();
return SERVICE_JWT_EXP_LEEWAY_SEC;
}
public synchronized String getJWTSigningKeyName() {
checkCache();
return JWT_SIGNING_KEY_NAME;
}
public synchronized String getJWTIssuer() {
checkCache();
return JWT_ISSUER;
}
private String SERVICE_MASTER_JWT = "";
public synchronized String getServiceMasterJWT() {
checkCache();
return SERVICE_MASTER_JWT;
}
public synchronized void setServiceMasterJWT(String JWT) {
updateVariableInternal(VARIABLE_SERVICE_MASTER_JWT, JWT, VariablesVisibility.ADMIN);
em.flush();
SERVICE_MASTER_JWT = JWT;
}
private final int NUM_OF_SERVICE_RENEW_TOKENS = 5;
private final static String SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE = "service_renew_token_%d";
private String[] RENEW_TOKENS = new String[0];
public synchronized String[] getServiceRenewJWTs() {
checkCache();
return RENEW_TOKENS;
}
public synchronized void setServiceRenewJWTs(String[] renewTokens) {
for (int i = 0; i < renewTokens.length; i++) {
String variableKey = String.format(SERVICE_RENEW_TOKEN_VARIABLE_TEMPLATE, i);
updateVariableInternal(variableKey, renewTokens[i], VariablesVisibility.ADMIN);
}
RENEW_TOKENS = renewTokens;
}
private int CONNECTION_KEEPALIVE_TIMEOUT = 30;
public synchronized int getConnectionKeepAliveTimeout() {
checkCache();
return CONNECTION_KEEPALIVE_TIMEOUT;
}
private int MAGGY_CLEANUP_INTERVAL = 24 * 60 * 1000;
public synchronized int getMaggyCleanupInterval() {
checkCache();
return MAGGY_CLEANUP_INTERVAL;
}
private String HIVE_CONF_PATH = "/srv/hops/apache-hive/conf/hive-site.xml";
public synchronized String getHiveConfPath() {
checkCache();
return HIVE_CONF_PATH;
}
private String FS_PY_JOB_UTIL_PATH = "hdfs:///user/spark/hsfs_util-2.1.0-SNAPSHOT.py";
public synchronized String getFSPyJobUtilPath() {
checkCache();
return FS_PY_JOB_UTIL_PATH;
}
private String FS_JAVA_JOB_UTIL_PATH = "hdfs:///user/spark/hsfs-utils-2.1.0-SNAPSHOT.jar";
public synchronized String getFSJavaJobUtilPath() {
checkCache();
return FS_JAVA_JOB_UTIL_PATH;
}
private long FEATURESTORE_DB_DEFAULT_QUOTA = -1;
public synchronized long getFeaturestoreDbDefaultQuota() {
checkCache();
return FEATURESTORE_DB_DEFAULT_QUOTA;
}
private String FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT = "ORC";
public synchronized String getFeaturestoreDbDefaultStorageFormat() {
checkCache();
return FEATURESTORE_DB_DEFAULT_STORAGE_FORMAT;
}
private Boolean LOCALHOST = false;
public synchronized Boolean isLocalHost() {
checkCache();
return LOCALHOST;
}
private String CLOUD = "";
public synchronized String getCloudProvider() {
checkCache();
return CLOUD;
}
public Boolean isCloud() {
return !getCloudProvider().isEmpty();
}
public synchronized CLOUD_TYPES getCloudType() {
checkCache();
if (CLOUD.isEmpty()) {
return CLOUD_TYPES.NONE;
}
return CLOUD_TYPES.fromString(CLOUD);
}
public static enum CLOUD_TYPES {
NONE,
AWS,
GCP,
AZURE;
public static CLOUD_TYPES fromString(String type) {
return CLOUD_TYPES.valueOf(type.toUpperCase());
}
}
public Boolean isHopsUtilInsecure() {
return isCloud() || isLocalHost();
}
private String FEATURESTORE_JDBC_URL = "jdbc:mysql://onlinefs.mysql.service.consul:3306/";
public synchronized String getFeaturestoreJdbcUrl() {
checkCache();
return FEATURESTORE_JDBC_URL;
}
private Boolean REQUESTS_VERIFY = false;
/**
* Whether to verify HTTP requests in hops-util-py. Accepted values are "true", "false"
*
*/
public synchronized Boolean getRequestsVerify() {
checkCache();
return REQUESTS_VERIFY;
}
private Boolean KIBANA_HTTPS_ENABELED = false;
public synchronized Boolean isKibanaHTTPSEnabled() {
checkCache();
return KIBANA_HTTPS_ENABELED;
}
private Boolean KIBANA_MULTI_TENANCY_ENABELED = false;
public synchronized Boolean isKibanaMultiTenancyEnabled() {
checkCache();
return KIBANA_MULTI_TENANCY_ENABELED;
}
public static final int OPENSEARCH_KIBANA_NO_CONNECTIONS = 5;
//-------------------------------- PROVENANCE ----------------------------------------------//
private static final String VARIABLE_PROVENANCE_TYPE = "provenance_type"; //disabled/meta/min/full
private static final String VARIABLE_PROVENANCE_ARCHIVE_SIZE = "provenance_archive_size";
private static final String VARIABLE_PROVENANCE_GRAPH_MAX_SIZE = "provenance_graph_max_size";
private static final String VARIABLE_PROVENANCE_ARCHIVE_DELAY = "provenance_archive_delay";
private static final String VARIABLE_PROVENANCE_CLEANUP_SIZE = "provenance_cleanup_size";
private static final String VARIABLE_PROVENANCE_CLEANER_PERIOD = "provenance_cleaner_period";
public static final String PROV_FILE_INDEX_SUFFIX = "__file_prov";
private Provenance.Type PROVENANCE_TYPE = Provenance.Type.MIN;
private String PROVENANCE_TYPE_S = PROVENANCE_TYPE.name();
private Integer PROVENANCE_CLEANUP_SIZE = 5;
private Integer PROVENANCE_ARCHIVE_SIZE = 100;
private Integer PROVENANCE_GRAPH_MAX_SIZE = 10000;
private Long PROVENANCE_CLEANER_PERIOD = 3600L; //1h in s
private Long PROVENANCE_ARCHIVE_DELAY = 0l;
private Integer PROVENANCE_OPENSEARCH_ARCHIVAL_PAGE_SIZE = 50;
public static final Integer PROVENANCE_OPENSEARCH_PAGE_DEFAULT_SIZE = 1000;
public String getProvFileIndex(Long projectIId) {
return projectIId.toString() + Settings.PROV_FILE_INDEX_SUFFIX;
}
private void populateProvenanceCache() {
PROVENANCE_TYPE_S = setStrVar(VARIABLE_PROVENANCE_TYPE, PROVENANCE_TYPE_S);
try {
PROVENANCE_TYPE = ProvTypeDTO.provTypeFromString(PROVENANCE_TYPE_S);
} catch(ProvenanceException e) {
LOGGER.log(Level.WARNING, "unknown prov type:" + PROVENANCE_TYPE_S + ", using default");
PROVENANCE_TYPE = Provenance.Type.MIN;
PROVENANCE_TYPE_S = PROVENANCE_TYPE.name();
}
PROVENANCE_ARCHIVE_SIZE = setIntVar(VARIABLE_PROVENANCE_ARCHIVE_SIZE, PROVENANCE_ARCHIVE_SIZE);
PROVENANCE_GRAPH_MAX_SIZE = setIntVar(VARIABLE_PROVENANCE_GRAPH_MAX_SIZE, PROVENANCE_GRAPH_MAX_SIZE);
PROVENANCE_ARCHIVE_DELAY = setLongVar(VARIABLE_PROVENANCE_ARCHIVE_DELAY, PROVENANCE_ARCHIVE_DELAY);
PROVENANCE_CLEANUP_SIZE = setIntVar(VARIABLE_PROVENANCE_CLEANUP_SIZE, PROVENANCE_CLEANUP_SIZE);
PROVENANCE_CLEANER_PERIOD = setLongVar(VARIABLE_PROVENANCE_CLEANER_PERIOD, PROVENANCE_CLEANER_PERIOD);
}
public synchronized Provenance.Type getProvType() {
checkCache();
return PROVENANCE_TYPE;
}
public synchronized Integer getProvArchiveSize() {
checkCache();
return PROVENANCE_ARCHIVE_SIZE;
}
public synchronized void setProvArchiveSize(Integer size) {
if(!PROVENANCE_ARCHIVE_SIZE.equals(size)) {
em.merge(new Variables(VARIABLE_PROVENANCE_ARCHIVE_SIZE, size.toString()));
PROVENANCE_ARCHIVE_SIZE = size;
}
}
public synchronized Integer getProvenanceGraphMaxSize() {
checkCache();
return PROVENANCE_GRAPH_MAX_SIZE;
}
public synchronized void setProvenanceGraphMaxSize(Integer size) {
if(!PROVENANCE_GRAPH_MAX_SIZE.equals(size)) {
em.merge(new Variables(VARIABLE_PROVENANCE_GRAPH_MAX_SIZE, size.toString()));
PROVENANCE_GRAPH_MAX_SIZE = size;
}
}
public synchronized Long getProvArchiveDelay() {
checkCache();
return PROVENANCE_ARCHIVE_DELAY;
}
public synchronized void setProvArchiveDelay(Long delay) {
if(!PROVENANCE_ARCHIVE_DELAY.equals(delay)) {
em.merge(new Variables(VARIABLE_PROVENANCE_ARCHIVE_DELAY, delay.toString()));
PROVENANCE_ARCHIVE_DELAY = delay;
}
}
public synchronized Integer getProvCleanupSize() {
checkCache();
return PROVENANCE_CLEANUP_SIZE;
}
public synchronized Integer getProvOpenSearchArchivalPageSize() {
checkCache();
return PROVENANCE_OPENSEARCH_ARCHIVAL_PAGE_SIZE;
}
public synchronized Long getProvCleanerPeriod() {
checkCache();
return PROVENANCE_CLEANER_PERIOD;
}
public synchronized void setProvCleanerPeriod(Long period) {
if(!PROVENANCE_CLEANER_PERIOD.equals(period)) {
em.merge(new Variables(VARIABLE_PROVENANCE_CLEANER_PERIOD, period.toString()));
PROVENANCE_CLEANER_PERIOD = period;
}
}
//------------------------------ END PROVENANCE --------------------------------------------//
private String CLIENT_PATH = "/srv/hops/client.tar.gz";
public synchronized String getClientPath() {
checkCache();
return CLIENT_PATH;
}
// CLOUD
private String CLOUD_EVENTS_ENDPOINT = "";
public synchronized String getCloudEventsEndPoint() {
checkCache();
return CLOUD_EVENTS_ENDPOINT;
}
private String CLOUD_EVENTS_ENDPOINT_API_KEY = "";
public synchronized String getCloudEventsEndPointAPIKey() {
checkCache();
return CLOUD_EVENTS_ENDPOINT_API_KEY;
}
private int FG_PREVIEW_LIMIT = 100;
public synchronized int getFGPreviewLimit() {
checkCache();
return FG_PREVIEW_LIMIT;
}
public static final String FEATURESTORE_INDEX = "featurestore";
public static final String FEATURESTORE_PROJECT_ID_FIELD = "project_id";
//-----------------------------YARN DOCKER-------------------------------------------------//
private static String YARN_RUNTIME = "docker";
public synchronized String getYarnRuntime(){
checkCache();
return YARN_RUNTIME;
}
//----------------------------YARN NODEMANAGER--------------------------------------------//
private boolean checkNodemanagersStatus = false;
public synchronized boolean isCheckingForNodemanagerStatusEnabled() {
checkCache();
return checkNodemanagersStatus;
}
private static String DOCKER_MOUNTS =
"/srv/hops/hadoop/etc/hadoop,/srv/hops/spark,/srv/hops/flink";
public synchronized String getDockerMounts() {
checkCache();
String result = "";
for(String mountPoint: DOCKER_MOUNTS.split(",")){
result += mountPoint + ":" + mountPoint + ":ro,";
}
return result.substring(0, result.length() - 1);
}
private String DOCKER_BASE_IMAGE_PYTHON_NAME = "python38";
public synchronized String getBaseDockerImagePythonName() {
checkCache();
if(isManagedDockerRegistry()){
return DOCKER_BASE_NON_PYTHON_IMAGE + ":" + DOCKER_BASE_IMAGE_PYTHON_NAME +
"_" + HOPSWORKS_VERSION;
}else{
return DOCKER_BASE_IMAGE_PYTHON_NAME + ":" + HOPSWORKS_VERSION;
}
}
private String DOCKER_BASE_IMAGE_PYTHON_VERSION = "3.7";
public synchronized String getDockerBaseImagePythonVersion() {
checkCache();
return DOCKER_BASE_IMAGE_PYTHON_VERSION;
}
private final static String DOCKER_BASE_NON_PYTHON_IMAGE = "base";
public synchronized String getBaseNonPythonDockerImage() {
return DOCKER_BASE_NON_PYTHON_IMAGE + ":" + HOPSWORKS_VERSION;
}
private long YARN_APP_UID = 1235L;
public long getYarnAppUID() {
checkCache();
return YARN_APP_UID;
}
//-----------------------------END YARN DOCKER-------------------------------------------------//
private KubeType KUBE_TYPE = KubeType.Local;
public synchronized KubeType getKubeType() {
checkCache();
return KUBE_TYPE;
}
private String DOCKER_NAMESPACE = "";
public synchronized String getDockerNamespace(){
checkCache();
return DOCKER_NAMESPACE;
}
private Boolean MANAGED_DOCKER_REGISTRY = false;
public synchronized Boolean isManagedDockerRegistry(){
checkCache();
return MANAGED_DOCKER_REGISTRY && isCloud();
}
public synchronized String getBaseNonPythonDockerImageWithNoTag(){
checkCache();
return DOCKER_BASE_NON_PYTHON_IMAGE;
}
private String DOCKER_JOB_MOUNTS_LIST;
public synchronized List<String> getDockerMountsList(){
checkCache();
return Arrays.asList(DOCKER_JOB_MOUNTS_LIST.split(","));
}
private Boolean DOCKER_JOB_MOUNT_ALLOWED = true;
public synchronized Boolean isDockerJobMountAllowed(){
checkCache();
return DOCKER_JOB_MOUNT_ALLOWED;
}
private Boolean DOCKER_JOB_UID_STRICT = true;
public synchronized Boolean isDockerJobUidStrict(){
checkCache();
return DOCKER_JOB_UID_STRICT;
}
private int EXECUTIONS_PER_JOB_LIMIT = 10000;
public synchronized int getExecutionsPerJobLimit(){
checkCache();
return EXECUTIONS_PER_JOB_LIMIT;
}
private int EXECUTIONS_CLEANER_BATCH_SIZE = 1000;
public synchronized int getExecutionsCleanerBatchSize(){
checkCache();
return EXECUTIONS_CLEANER_BATCH_SIZE;
}
private int EXECUTIONS_CLEANER_INTERVAL_MS = 600000;
public synchronized int getExecutionsCleanerInterval(){
checkCache();
return EXECUTIONS_CLEANER_INTERVAL_MS;
}
private int MAX_ENV_YML_BYTE_SIZE = 20000;
public synchronized int getMaxEnvYmlByteSize() {
checkCache();
return MAX_ENV_YML_BYTE_SIZE;
}
private int LIVY_STARTUP_TIMEOUT = 240;
public synchronized int getLivyStartupTimeout() {
checkCache();
return LIVY_STARTUP_TIMEOUT;
}
private boolean USER_SEARCH_ENABLED = true;
public synchronized boolean isUserSearchEnabled() {
checkCache();
return USER_SEARCH_ENABLED;
}
/*
* When a user try to connect for the first time with OAuth or LDAP
* do not create the user if it does not bellong to any group.
* This is to avoid having users that belong to no group poluting the users table
*/
private boolean REJECT_REMOTE_USER_NO_GROUP = false;
public synchronized boolean getRejectRemoteNoGroup() {
checkCache();
return REJECT_REMOTE_USER_NO_GROUP;
}
public void updateRejectRemoteNoGroup(boolean reject) {
updateVariableInternal(VARIABLE_REJECT_REMOTE_USER_NO_GROUP, Boolean.toString(reject), VariablesVisibility.ADMIN);
}
private boolean SKIP_NAMESPACE_CREATION = false;
public synchronized boolean shouldSkipNamespaceCreation() {
checkCache();
return SKIP_NAMESPACE_CREATION;
}
private long QUOTAS_ONLINE_ENABLED_FEATUREGROUPS = -1L;
public synchronized long getQuotasOnlineEnabledFeaturegroups() {
checkCache();
return QUOTAS_ONLINE_ENABLED_FEATUREGROUPS;
}
private long QUOTAS_ONLINE_DISABLED_FEATUREGROUPS = -1L;
public synchronized long getQuotasOnlineDisabledFeaturegroups() {
checkCache();
return QUOTAS_ONLINE_DISABLED_FEATUREGROUPS;
}
private long QUOTAS_TRAINING_DATASETS = -1L;
public synchronized long getQuotasTrainingDatasets() {
checkCache();
return QUOTAS_TRAINING_DATASETS;
}
private long QUOTAS_RUNNING_MODEL_DEPLOYMENTS = -1L;
public synchronized long getQuotasRunningModelDeployments() {
checkCache();
return QUOTAS_RUNNING_MODEL_DEPLOYMENTS;
}
private long QUOTAS_TOTAL_MODEL_DEPLOYMENTS = -1L;
public synchronized long getQuotasTotalModelDeployments() {
checkCache();
return QUOTAS_TOTAL_MODEL_DEPLOYMENTS;
}
private long QUOTAS_MAX_PARALLEL_EXECUTIONS = -1L;
public synchronized long getQuotasMaxParallelExecutions() {
checkCache();
return QUOTAS_MAX_PARALLEL_EXECUTIONS;
}
}
|
hops-git_0.2.0 (#1196)
|
hopsworks-common/src/main/java/io/hops/hopsworks/common/util/Settings.java
|
hops-git_0.2.0 (#1196)
|
|
Java
|
agpl-3.0
|
ffbfe957578aac041c54830f6635e6751ad9573d
| 0
|
lp0/cursus-core
|
/*
cursus - Race series management program
Copyright 2011, 2014 Simon Arlott
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package eu.lp0.cursus.db;
public enum DatabaseVersion {
_0_0_1;
public long asLong() {
String[] ver = name().split("_"); //$NON-NLS-1$
return (Long.valueOf(ver[1]) << 40) | (Long.valueOf(ver[2]) << 16) | Long.valueOf(ver[3]);
}
public static DatabaseVersion getLatest() {
return values()[values().length - 1];
}
public static String parseLong(long ver) {
return String.format("%d.%d.%d", (ver >> 40), (ver >> 16) & 0xFFFF, ver & 0xFFFF); //$NON-NLS-1$
}
@Override
public String toString() {
return parseLong(asLong());
}
}
|
src/main/java/eu/lp0/cursus/db/DatabaseVersion.java
|
/*
cursus - Race series management program
Copyright 2011, 2014 Simon Arlott
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package eu.lp0.cursus.db;
public enum DatabaseVersion {
_0_0_1;
public long asLong() {
String[] ver = name().split("_"); //$NON-NLS-1$
return (Long.valueOf(ver[1]) << 32) | (Long.valueOf(ver[2]) << 16) | Long.valueOf(ver[3]);
}
public static DatabaseVersion getLatest() {
return values()[values().length - 1];
}
public static String parseLong(long ver) {
return String.format("%d.%d.%d", (ver >> 32), (ver >> 16) & 0xFFFF, ver & 0xFFFF); //$NON-NLS-1$
}
@Override
public String toString() {
return parseLong(asLong());
}
}
|
split db version up into 24/24/16 bits instead of 32/16/16 bits
|
src/main/java/eu/lp0/cursus/db/DatabaseVersion.java
|
split db version up into 24/24/16 bits instead of 32/16/16 bits
|
|
Java
|
agpl-3.0
|
548b51e92c5141e37a88b5d02b2b71f63849f001
| 0
|
aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms
|
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2011-2015 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2015 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.smoketest;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.ExpectedConditions;
/**
* The Test Class for the New Provisioning UI using AngularJS.
* <p>This test will left the current OpenNMS installation as it was before running,
* to avoid issues related with the execution order of the smoke-tests.</p>
*
* @author <a href="mailto:agalue@opennms.org">Alejandro Galue</a>
*/
public class ProvisioningNewUIIT extends OpenNMSSeleniumTestCase {
private static final String NODE_LABEL = "localNode";
private static final String NODE_FOREIGNID = "localNode";
private static final String NODE_IPADDR = "127.0.0.1";
private static final String NODE_SERVICE = "HTTP-8980";
private static final String NODE_CATEGORY = "Test";
/**
* Sets up the test.
*
* @throws Exception the exception
*/
@Before
public void setUp() throws Exception {
deleteTestRequisition();
provisioningPage();
}
/**
* Tears down the test.
* <p>Be 100% sure that there are no left-overs on the testing OpenNMS installation.</p>
*
* @throws Exception the exception
*/
@After
public void tearDown() throws Exception {
//deleteTestRequisition();
}
/**
* Test requisition UI.
*
* @throws Exception the exception
*/
@Test
public void testRequisitionUI() throws Exception {
setImplicitWait(2, TimeUnit.SECONDS);
// Add a new requisition
clickId("add-requisition", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form.bootbox-form > input.bootbox-input")));
enterText(By.cssSelector("form.bootbox-form > input.bootbox-input"), REQUISITION_NAME);
findElementByXpath("//div/button[text()='OK']").click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='" + REQUISITION_NAME + "']")));
// Edit the foreign source
final String editForeignSourceButton = "button.btn[uib-tooltip='Edit detectors and policies of the "+REQUISITION_NAME+" Requisition']";
wait.until(ExpectedConditions.elementToBeClickable(By.cssSelector(editForeignSourceButton))).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("ul.nav-tabs > li > a.nav-link")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Foreign Source Definition for Requisition " + REQUISITION_NAME + "']")));
// Add a detector
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("add-detector")));
clickId("add-detector", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form[name='detectorForm']")));
enterText(By.xpath("//form[@name='detectorForm']//input[@ng-model='detector.name']"), NODE_SERVICE);
enterText(By.xpath("//form[@name='detectorForm']//input[@ng-model='detector.class']"), "HTTP");
findElementByXpath("//form[@name='detectorForm']//ul[contains(@class, 'dropdown-menu')]/li/a/strong[text()='HTTP']").click();
waitForDropdownClose();
// Add a parameter to the detector
clickId("add-detector-parameter", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input[name='paramName']")));
enterText(By.cssSelector("input[name='paramName']"), "po");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@title='port']"))).click();
enterText(By.cssSelector("input[name='paramValue']"), "8980");
//enterText(By.cssSelector("input[name='paramValue']"), Keys.ENTER);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("save-detector"))).click();
waitForModalClose();
enterText(By.cssSelector("input[placeholder='Search/Filter Detectors'][ng-model='filters.detector']"), "HTTP-8980");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='"+NODE_SERVICE+"']")));
// Add a policy to the detector
findElementByCss("#tab-policies .ng-binding").click();
clickId("add-policy", false);
findElementByCss("form[name='policyForm']");
enterText(By.cssSelector("input#name"), "No IPs");
enterText(By.cssSelector("input#clazz"), "Match IP Interface");
enterText(By.cssSelector("input#clazz"), Keys.ENTER);
enterText(By.xpath("(//input[@name='paramValue'])[1]"), "DO_NOT_PERSIST");
enterText(By.xpath("(//input[@name='paramValue'])[1]"), Keys.ENTER);
enterText(By.xpath("(//input[@name='paramValue'])[2]"), "NO_PARAMETERS");
enterText(By.xpath("(//input[@name='paramValue'])[2]"), Keys.ENTER);
clickId("save-policy", false);
waitForModalClose();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='No IPs']")));
// Save foreign source definition
clickId("save-foreign-source", false);
wait.until(ExpectedConditions.not(ExpectedConditions.visibilityOfElementLocated(By.id("save-foreign-source"))));
// Go to the Requisition page
clickId("go-back", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Requisition " + REQUISITION_NAME + " (0 defined, 0 deployed)']")));
// Add node to a requisition
clickId("add-node", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("nodeLabel"))).clear();
findElementById("nodeLabel").sendKeys(NODE_LABEL);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("foreignId"))).clear();
findElementById("foreignId").sendKeys(NODE_FOREIGNID);
saveNode();
// Add an IP Interface
clickId("tab-interfaces", false);
findElementById("add-interface").click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form[name='intfForm']")));
final By ipaddrBy = By.cssSelector("input#ipAddress");
wait.until(ExpectedConditions.visibilityOfElementLocated(ipaddrBy));
enterText(ipaddrBy, NODE_IPADDR);
// Add a service to the IP Interface
findElementById("add-service").click();
final By xpath = By.cssSelector("input[name='serviceName']");
wait.until(ExpectedConditions.visibilityOfElementLocated(xpath));
Thread.sleep(100);
enterText(xpath, "HTTP-89");
findElementByXpath("//a[@title='HTTP-8980']/strong").click();
// Save the IP interface
clickId("save-interface", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_IPADDR + "']")));
// Add an asset to the node
clickId("tab-assets", false);
clickId("add-asset", false);
findElementByCss("form[name='assetForm']");
enterText(By.id("asset-name"), "countr");
findElementByXpath("//a[@title='country']/strong").click();
enterText(By.id("asset-value"), "USA");
clickId("save-asset", false);
waitForModalClose();
// Add a category to the node
clickId("tab-categories", false);
clickId("add-category", false);
Thread.sleep(100);
enterText(By.cssSelector("input[name='categoryName']"), NODE_CATEGORY);
findElementByXpath("//a[@title='"+NODE_CATEGORY+"']/strong").click();
saveNode();
// Go to the requisition page
clickId("go-back", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_LABEL + "']")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_FOREIGNID + "']")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//p[contains(@class,'ng-binding') and text()='" + NODE_IPADDR + " (P)']")));
// Synchronize the requisition
clickId("synchronize", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector(".modal-dialog button.btn.btn-success")));
WebElement modal = findModal();
modal.findElement(By.xpath("//div/button[text()='Yes']")).click();
waitForModalClose();
wait.until(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(final WebDriver input) {
final boolean ret = (getNodesInRequisition(REQUISITION_NAME) == 1 && getNodesInDatabase(REQUISITION_NAME) == 1);
try {
clickId("refresh", false);
clickId("refreshDeployedStats", false);
} catch (final InterruptedException e) {
}
return ret;
}
});
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Requisition " + REQUISITION_NAME + " (1 defined, 1 deployed)']")));
// Go to the requisitions page
clickId("go-back", false);
// Wait until the node has been added to the database, using the ReST API
m_driver.get(BASE_URL + "opennms/rest/nodes/" + REQUISITION_NAME + ":" + NODE_FOREIGNID + "/ipinterfaces/" + NODE_IPADDR + "/services/ICMP");
m_driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
try {
for (int i=0; i<30; i++) {
try {
final WebElement e = m_driver.findElement(By.xpath("//service/serviceType/name[text()='ICMP']"));
if (e != null) {
break;
}
} catch (Exception e) {}
m_driver.navigate().refresh();
}
} finally {
m_driver.manage().timeouts().implicitlyWait(LOAD_TIMEOUT, TimeUnit.MILLISECONDS);
}
// Open the nodes list page
m_driver.get(BASE_URL + "opennms/");
clickMenuItem("Info", "Nodes", "element/nodeList.htm");
try {
// Disable implicitlyWait
m_driver.manage().timeouts().implicitlyWait(0, TimeUnit.MILLISECONDS);
// If this is the only node on the system, we'll be sent directly to its node details page.
findElementByXpath("//h3[text()='Availability']");
} catch (NoSuchElementException e) {
// If there are multiple nodes, we will be on the node list page, click through to the node
findElementByLink(NODE_LABEL).click();
} finally {
// Restore the implicitlyWait timeout
m_driver.manage().timeouts().implicitlyWait(LOAD_TIMEOUT, TimeUnit.MILLISECONDS);
}
wait.until(ExpectedConditions.elementToBeClickable(By.linkText("ICMP")));
findElementByXpath("//a[contains(@href, 'element/interface.jsp') and text()='" + NODE_IPADDR + "']");
findElementByLink("HTTP-8980");
}
protected void saveNode() throws InterruptedException {
clickId("save-node", false);
wait.until(ExpectedConditions.not(ExpectedConditions.visibilityOfElementLocated(By.id("save-node"))));
}
protected void waitForDropdownClose() {
waitForClose(By.cssSelector(".modal-dialog ul.dropdown-menu"));
}
protected void waitForModalClose() {
System.err.println("waitForModalClose()");
waitForClose(By.cssSelector(".modal-dialog"));
}
protected WebElement findModal() {
final String xpath = "//div[contains(@class, 'modal-dialog')]";
return wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath(xpath)));
}
}
|
smoke-test/src/test/java/org/opennms/smoketest/ProvisioningNewUIIT.java
|
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2011-2015 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2015 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.smoketest;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.ExpectedConditions;
/**
* The Test Class for the New Provisioning UI using AngularJS.
* <p>This test will left the current OpenNMS installation as it was before running,
* to avoid issues related with the execution order of the smoke-tests.</p>
*
* @author <a href="mailto:agalue@opennms.org">Alejandro Galue</a>
*/
public class ProvisioningNewUIIT extends OpenNMSSeleniumTestCase {
private static final String NODE_LABEL = "localNode";
private static final String NODE_FOREIGNID = "localNode";
private static final String NODE_IPADDR = "127.0.0.1";
private static final String NODE_SERVICE = "HTTP-8980";
private static final String NODE_CATEGORY = "Test";
/**
* Sets up the test.
*
* @throws Exception the exception
*/
@Before
public void setUp() throws Exception {
deleteTestRequisition();
provisioningPage();
}
/**
* Tears down the test.
* <p>Be 100% sure that there are no left-overs on the testing OpenNMS installation.</p>
*
* @throws Exception the exception
*/
@After
public void tearDown() throws Exception {
//deleteTestRequisition();
}
/**
* Test requisition UI.
*
* @throws Exception the exception
*/
@Test
public void testRequisitionUI() throws Exception {
setImplicitWait(2, TimeUnit.SECONDS);
// Add a new requisition
clickId("add-requisition", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form.bootbox-form > input.bootbox-input")));
enterText(By.cssSelector("form.bootbox-form > input.bootbox-input"), REQUISITION_NAME);
findElementByXpath("//div/button[text()='OK']").click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='" + REQUISITION_NAME + "']")));
// Edit the foreign source
final String editForeignSourceButton = "button.btn[uib-tooltip='Edit detectors and policies of the "+REQUISITION_NAME+" Requisition']";
wait.until(ExpectedConditions.elementToBeClickable(By.cssSelector(editForeignSourceButton))).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("ul.nav-tabs > li > a.nav-link")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Foreign Source Definition for Requisition " + REQUISITION_NAME + "']")));
// Add a detector
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("add-detector")));
clickId("add-detector", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form[name='detectorForm']")));
enterText(By.xpath("//form[@name='detectorForm']//input[@ng-model='detector.name']"), NODE_SERVICE);
enterText(By.xpath("//form[@name='detectorForm']//input[@ng-model='detector.class']"), "HTTP");
findElementByXpath("//form[@name='detectorForm']//ul[contains(@class, 'dropdown-menu')]/li/a/strong[text()='HTTP']").click();
waitForDropdownClose();
// Add a parameter to the detector
clickId("add-detector-parameter", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input[name='paramName']")));
enterText(By.cssSelector("input[name='paramName']"), "po");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@title='port']"))).click();
enterText(By.cssSelector("input[name='paramValue']"), "8980");
//enterText(By.cssSelector("input[name='paramValue']"), Keys.ENTER);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("save-detector"))).click();
waitForModalClose();
enterText(By.cssSelector("input[placeholder='Search/Filter Detectors'][ng-model='filters.detector']"), "HTTP-8980");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='"+NODE_SERVICE+"']")));
// Add a policy to the detector
findElementByCss("#tab-policies .ng-binding").click();
clickId("add-policy", false);
findElementByCss("form[name='policyForm']");
enterText(By.cssSelector("input#name"), "No IPs");
enterText(By.cssSelector("input#clazz"), "Match IP Interface");
enterText(By.cssSelector("input#clazz"), Keys.ENTER);
enterText(By.xpath("(//input[@name='paramValue'])[1]"), "DO_NOT_PERSIST");
enterText(By.xpath("(//input[@name='paramValue'])[1]"), Keys.ENTER);
enterText(By.xpath("(//input[@name='paramValue'])[2]"), "NO_PARAMETERS");
enterText(By.xpath("(//input[@name='paramValue'])[2]"), Keys.ENTER);
clickId("save-policy", false);
waitForModalClose();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[text()='No IPs']")));
// Save foreign source definition
clickId("save-foreign-source", false);
wait.until(ExpectedConditions.not(ExpectedConditions.visibilityOfElementLocated(By.id("save-foreign-source"))));
// Go to the Requisition page
clickId("go-back", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Requisition " + REQUISITION_NAME + " (0 defined, 0 deployed)']")));
// Add node to a requisition
clickId("add-node", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("nodeLabel"))).clear();
findElementById("nodeLabel").sendKeys(NODE_LABEL);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("foreignId"))).clear();
findElementById("foreignId").sendKeys(NODE_FOREIGNID);
saveNode();
// Add an IP Interface
clickId("tab-interfaces", false);
findElementById("add-interface").click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("form[name='intfForm']")));
final By ipaddrBy = By.cssSelector("input#ipAddress");
wait.until(ExpectedConditions.visibilityOfElementLocated(ipaddrBy));
enterText(ipaddrBy, NODE_IPADDR);
// Add a service to the IP Interface
findElementById("add-service").click();
final By xpath = By.cssSelector("input[name='serviceName']");
wait.until(ExpectedConditions.visibilityOfElementLocated(xpath));
Thread.sleep(100);
enterText(xpath, "HTTP-89");
findElementByXpath("//a[@title='HTTP-8980']/strong").click();
// Save the IP interface
clickId("save-interface", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_IPADDR + "']")));
// Add an asset to the node
clickId("tab-assets", false);
clickId("add-asset", false);
findElementByCss("form[name='assetForm']");
enterText(By.id("asset-name"), "countr");
findElementByXpath("//a[@title='country']/strong").click();
enterText(By.id("asset-value"), "USA");
clickId("save-asset", false);
waitForModalClose();
// Add a category to the node
clickId("tab-categories", false);
clickId("add-category", false);
Thread.sleep(100);
enterText(By.cssSelector("input[name='categoryName']"), NODE_CATEGORY);
findElementByXpath("//a[@title='"+NODE_CATEGORY+"']/strong").click();
saveNode();
// Go to the requisition page
clickId("go-back", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_LABEL + "']")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//td[contains(@class,'ng-binding') and text()='" + NODE_FOREIGNID + "']")));
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//p[contains(@class,'ng-binding') and text()='" + NODE_IPADDR + " (P)']")));
// Synchronize the requisition
clickId("synchronize", false);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector(".modal-dialog button.btn.btn-success")));
WebElement modal = findModal();
modal.findElement(By.xpath("//div/button[text()='Yes']")).click();
waitForModalClose();
wait.until(new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(final WebDriver input) {
final boolean ret = (getNodesInRequisition(REQUISITION_NAME) == 1 && getNodesInDatabase(REQUISITION_NAME) == 1);
try {
clickId("refresh", false);
clickId("refreshDeployedStats", false);
} catch (final InterruptedException e) {
}
return ret;
}
});
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//h4[text()='Requisition " + REQUISITION_NAME + " (1 defined, 1 deployed)']")));
// Go to the requisitions page
clickId("go-back", false);
// Wait until the node has been added to the database, using the ReST API
m_driver.get(BASE_URL + "opennms/rest/nodes/" + REQUISITION_NAME + ":" + NODE_FOREIGNID + "/ipinterfaces/" + NODE_IPADDR + "/services/ICMP");
m_driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
try {
for (int i=0; i<30; i++) {
try {
final WebElement e = m_driver.findElement(By.xpath("//service/serviceType/name[text()='ICMP']"));
if (e != null) {
break;
}
} catch (Exception e) {}
m_driver.navigate().refresh();
}
} finally {
m_driver.manage().timeouts().implicitlyWait(LOAD_TIMEOUT, TimeUnit.MILLISECONDS);
}
// Open the nodes list page
m_driver.get(BASE_URL + "opennms/");
clickMenuItem("Info", "Nodes", "element/nodeList.htm");
try {
// Disable implicitlyWait
m_driver.manage().timeouts().implicitlyWait(0, TimeUnit.MILLISECONDS);
// If this is the only node on the system, we'll be sent directly to its node details page.
findElementByXpath("//h3[text()='Availability']");
} catch (NoSuchElementException e) {
// If there are multiple nodes, we will be on the node list page, click through to the node
findElementByLink(NODE_LABEL).click();
} finally {
// Restore the implicitlyWait timeout
m_driver.manage().timeouts().implicitlyWait(LOAD_TIMEOUT, TimeUnit.MILLISECONDS);
}
wait.until(ExpectedConditions.elementToBeClickable(By.linkText("ICMP")));
findElementByXpath("//a[contains(@href, 'element/interface.jsp') and text()='" + NODE_IPADDR + "']");
findElementByLink("HTTP-8980");
}
protected void saveNode() throws InterruptedException {
clickId("save-node", false);
wait.until(ExpectedConditions.not(ExpectedConditions.visibilityOfElementLocated(By.id("save-node"))));
}
}
|
move these back here
|
smoke-test/src/test/java/org/opennms/smoketest/ProvisioningNewUIIT.java
|
move these back here
|
|
Java
|
lgpl-2.1
|
c4020aa1c011b078b92661a1e8b3336f4023dac6
| 0
|
bansalayush/checkstyle,gallandarakhneorg/checkstyle,HubSpot/checkstyle,checkstyle/checkstyle,universsky/checkstyle,romani/checkstyle,ivanov-alex/checkstyle,Godin/checkstyle,FeodorFitsner/checkstyle,baratali/checkstyle,llocc/checkstyle,llocc/checkstyle,sharang108/checkstyle,mkordas/checkstyle,WonderCsabo/checkstyle,jochenvdv/checkstyle,another-dave/checkstyle,MEZk/checkstyle,ilanKeshet/checkstyle,jasonchaffee/checkstyle,AkshitaKukreja30/checkstyle,romani/checkstyle,vboerchers/checkstyle,jonmbake/checkstyle,mkordas/checkstyle,checkstyle/checkstyle,cs1331/checkstyle,rnveach/checkstyle,romani/checkstyle,jonmbake/checkstyle,sharang108/checkstyle,cs1331/checkstyle,AkshitaKukreja30/checkstyle,sabaka/checkstyle,another-dave/checkstyle,jonmbake/checkstyle,jdoyle65/checkstyle,bansalayush/checkstyle,beckerhd/checkstyle,vboerchers/checkstyle,autermann/checkstyle,jasonchaffee/checkstyle,beckerhd/checkstyle,jasonchaffee/checkstyle,vboerchers/checkstyle,pietern/checkstyle,StetsiukRoman/checkstyle,Bhavik3/checkstyle,sabaka/checkstyle,liscju/checkstyle,pietern/checkstyle,rnveach/checkstyle,philwebb/checkstyle,checkstyle/checkstyle,gallandarakhneorg/checkstyle,rnveach/checkstyle,attatrol/checkstyle,ivanov-alex/checkstyle,llocc/checkstyle,cs1331/checkstyle,WonderCsabo/checkstyle,zofuthan/checkstyle-1,romani/checkstyle,liscju/checkstyle,FeodorFitsner/checkstyle,pbaranchikov/checkstyle,pietern/checkstyle,nikhilgupta23/checkstyle,sirdis/checkstyle,rnveach/checkstyle,FeodorFitsner/checkstyle,ilanKeshet/checkstyle,izishared/checkstyle,izishared/checkstyle,nikhilgupta23/checkstyle,izishared/checkstyle,bansalayush/checkstyle,ilanKeshet/checkstyle,philwebb/checkstyle,romani/checkstyle,rmswimkktt/checkstyle,nikhilgupta23/checkstyle,AkshitaKukreja30/checkstyle,sirdis/checkstyle,Andrew0701/checkstyle,HubSpot/checkstyle,Bhavik3/checkstyle,mkordas/checkstyle,attatrol/checkstyle,Bhavik3/checkstyle,designreuse/checkstyle,another-dave/checkstyle,zofuthan/checkstyle-1,checkstyle/checkstyle,liscju/checkstyle,naver/checkstyle,philwebb/checkstyle,WilliamRen/checkstyle,jochenvdv/checkstyle,ivanov-alex/checkstyle,rmswimkktt/checkstyle,beckerhd/checkstyle,pbaranchikov/checkstyle,naver/checkstyle,WilliamRen/checkstyle,baratali/checkstyle,sirdis/checkstyle,autermann/checkstyle,universsky/checkstyle,sharang108/checkstyle,jdoyle65/checkstyle,naver/checkstyle,MEZk/checkstyle,gallandarakhneorg/checkstyle,checkstyle/checkstyle,rnveach/checkstyle,baratali/checkstyle,Godin/checkstyle,MEZk/checkstyle,designreuse/checkstyle,autermann/checkstyle,universsky/checkstyle,StetsiukRoman/checkstyle,jochenvdv/checkstyle,zofuthan/checkstyle-1,WilliamRen/checkstyle,designreuse/checkstyle,rmswimkktt/checkstyle,HubSpot/checkstyle,StetsiukRoman/checkstyle,Andrew0701/checkstyle,rnveach/checkstyle,sabaka/checkstyle,attatrol/checkstyle,checkstyle/checkstyle,romani/checkstyle,Godin/checkstyle,WonderCsabo/checkstyle
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2009 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.api;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
/**
* This enum defines the various Javadoc tags and there properties.
*
* <p>
* This class was modeled after documentation located at
* <a href="http://java.sun.com/j2se/1.5.0/docs/tooldocs/windows/javadoc.html">
* javadoc</a>
*
* and
*
* <a href="http://java.sun.com/j2se/javadoc/writingdoccomments/index.html">
* how to write</a>.
* </p>
*
* <p>
* Some of this documentation was a little incomplete (ex: valid placement of
* code, value, and literal tags).
* </p>
*
* <p>
* Whenever an inconsistency was found the author's judgment was used.
* </p>
*
* <p>
* For now, the number of required/optional tag arguments are not included
* because some Javadoc tags have very complex rules for determining this
* (ex: {@code {@value}} tag).
* </p>
*
* <p>
* Also, the {@link #isValidOn(DetailAST) isValidOn} method does not consider
* classes defined in a local code block (method, init block, etc.).
* </>
*
* @author Travis Schneeberger
*/
public enum JavadocTagInfo
{
/**
* {@code @author}.
*/
AUTHOR("@author", "author", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF;
}
},
/**
* {@code {@code}}.
*/
CODE("{@code}", "code", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code {@docRoot}}.
*/
DOC_ROOT("{@docRoot}", "docRoot", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @deprecated}.
*/
DEPRECATED("@deprecated", "deprecated", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| type == TokenTypes.ENUM_CONSTANT_DEF
|| type == TokenTypes.ANNOTATION_FIELD_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @exception}.
*/
EXCEPTION("@exception", "exception", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF;
}
},
/**
* {@code {@inheritDoc}}.
*/
INHERIT_DOC("{@inheritDoc}", "inheritDoc", Type.INLINE, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF
&& !aAst.branchContains(TokenTypes.LITERAL_STATIC)
&& ScopeUtils.getScopeFromMods(aAst
.findFirstToken(TokenTypes.MODIFIERS)) != Scope.PRIVATE;
}
},
/**
* {@code {@link}}.
*/
LINK("{@link}", "link", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code {@linkplain}}.
*/
LINKPLAIN("{@linkplain}", "linkplain", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code {@literal}}.
*/
LITERAL("{@literal}", "literal", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @param}.
*/
PARAM("@param", "param", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF;
}
},
/**
* {@code @return}.
*/
RETURN("@return", "return", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST returnType = aAst.findFirstToken(TokenTypes.TYPE);
return type == TokenTypes.METHOD_DEF
&& returnType.getFirstChild().getType()
!= TokenTypes.LITERAL_VOID;
}
},
/**
* {@code @see}.
*/
SEE("@see", "see", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @serial}.
*/
SERIAL("@serial", "serial", Type.BLOCK, true, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst);
}
},
/**
* {@code @serialData}.
*/
SERIAL_DATA("@serialData", "serialData", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST methodNameAst = aAst
.findFirstToken(TokenTypes.IDENT);
final String methodName = methodNameAst.getText();
return type == TokenTypes.METHOD_DEF
&& ("writeObject".equals(methodName)
|| "readObject".equals(methodName)
|| "writeExternal".equals(methodName)
|| "readExternal".equals(methodName)
|| "writeReplace".equals(methodName)
|| "readResolve"
.equals(methodName));
}
},
/**
* {@code @serialField}.
*/
SERIAL_FIELD("@serialField", "serialField", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST varType = aAst.findFirstToken(TokenTypes.TYPE);
return type == TokenTypes.VARIABLE_DEF
&& varType.getType() == TokenTypes.ARRAY_DECLARATOR
&& "ObjectStreamField"
.equals(varType.getFirstChild().getText());
}
},
/**
* {@code @since}.
*/
SINCE("@since", "since", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @throws}.
*/
THROWS("@throws", "throws", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF;
}
},
/**
* {@code {@value}}.
*/
VALUE("{@value}", "value", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* {@code @version}.
*/
VERSION("@version", "version", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF;
}
};
/** holds tag text to tag enum mappings **/
private static final Map<String, JavadocTagInfo> TEXT_TO_TAG;
/** holds tag name to tag enum mappings **/
private static final Map<String, JavadocTagInfo> NAME_TO_TAG;
static
{
final ImmutableMap.Builder<String, JavadocTagInfo> textToTagBuilder =
new ImmutableMap.Builder<String, JavadocTagInfo>();
final ImmutableMap.Builder<String, JavadocTagInfo> nameToTagBuilder =
new ImmutableMap.Builder<String, JavadocTagInfo>();
for (final JavadocTagInfo tag : JavadocTagInfo.values()) {
textToTagBuilder.put(tag.getText(), tag);
nameToTagBuilder.put(tag.getName(), tag);
}
TEXT_TO_TAG = textToTagBuilder.build();
NAME_TO_TAG = nameToTagBuilder.build();
}
/** the tag text **/
private final String mText;
/** the tag name **/
private final String mName;
/** the tag type **/
private final Type mType;
/** if tag is valid in package.html **/
private final boolean mValidInPackageHtml;
/** if tag is valid in overview.html **/
private final boolean mValidInOverviewHtml;
/**
* Sets the various properties of a Javadoc tag.
*
* @param aText the tag text
* @param aName the tag name
* @param aType the type of tag
* @param aValidInPackageHtml whether the tag is valid
* in package.html file
* @param aValidInOverviewHtml whether the tag is valid
* in overview.html file
*/
private JavadocTagInfo(final String aText, final String aName,
final Type aType, final boolean aValidInPackageHtml,
final boolean aValidInOverviewHtml)
{
this.mText = aText;
this.mName = aName;
this.mType = aType;
this.mValidInPackageHtml = aValidInPackageHtml;
this.mValidInOverviewHtml = aValidInOverviewHtml;
}
/**
* Checks if a particular Javadoc tag is valid within a Javadoc block of a
* given AST.
*
* <p>
* For example: Given a call to
* <code>JavadocTag.RETURN{@link #isValidOn(DetailAST)}</code>.
* </p>
*
* <p>
* If passing in a DetailAST representing a non-void METHOD_DEF
* <code> true </code> would be returned. If passing in a DetailAST
* representing a CLASS_DEF <code> false </code> would be returned because
* CLASS_DEF's cannot return a value.
* </p>
*
* @param aAST the AST representing a type that can be Javadoc'd
* @return true if tag is valid.
*/
public abstract boolean isValidOn(DetailAST aAST);
/**
* Checks if tag is valid in a package.html Javadoc file.
*
* @return true if tag is valid.
*/
public boolean isValidInPackageHtml()
{
return this.mValidInPackageHtml;
}
/**
* Checks if tag is valid in a overview.html Javadoc file.
*
* @return true if tag is valid.
*/
public boolean isValidInOverviewHtml()
{
return this.mValidInOverviewHtml;
}
/**
* Gets the tag text.
* @return the tag text
*/
public String getText()
{
return this.mText;
}
/**
* Gets the tag name.
* @return the tag name
*/
public String getName()
{
return this.mName;
}
/**
* Gets the Tag type defined by {@link JavadocTagInfo.Type Type}.
* @return the Tag type
*/
public Type getType()
{
return this.mType;
}
/**
* returns a JavadocTag from the tag text.
* @param aText String representing the tag text
* @return Returns a JavadocTag type from a String representing the tag
* @throws NullPointerException if the text is null
* @throws IllegalArgumentException if the text is not a valid tag
*/
public static JavadocTagInfo fromText(final String aText)
{
if (aText == null) {
throw new NullPointerException("the text is null");
}
final JavadocTagInfo tag = TEXT_TO_TAG.get(aText);
if (tag == null) {
throw new IllegalArgumentException("the text [" + aText
+ "] is not a valid Javadoc tag text");
}
return tag;
}
/**
* returns a JavadocTag from the tag name.
* @param aName String name of the tag
* @return Returns a JavadocTag type from a String representing the tag
* @throws NullPointerException if the text is null
* @throws IllegalArgumentException if the text is not a valid tag
*/
public static JavadocTagInfo fromName(final String aName)
{
if (aName == null) {
throw new NullPointerException("the name is null");
}
final JavadocTagInfo tag = NAME_TO_TAG.get(aName);
if (tag == null) {
throw new IllegalArgumentException("the name [" + aName
+ "] is not a valid Javadoc tag name");
}
return tag;
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return "text [" + this.mText + "] name [" + this.mName
+ "] type [" + this.mType
+ "] validInPackageHtml [" + this.mValidInPackageHtml
+ "] validInOverviewHtml [" + this.mValidInOverviewHtml + "]";
}
/**
* The Javadoc Type.
*
* For example a {@code @param} tag is a block tag while a
* {@code {@link}} tag is a inline tag.
*
* @author Travis Schneeberger
*/
public enum Type
{
/** block type. **/
BLOCK,
/** inline type. **/
INLINE;
}
}
|
src/checkstyle/com/puppycrawl/tools/checkstyle/api/JavadocTagInfo.java
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2009 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.api;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
/**
* This enum defines the various Javadoc tags and there properties.
*
* <p>
* This class was modeled after documentation located at
* <a href="http://java.sun.com/j2se/1.5.0/docs/tooldocs/windows/javadoc.html">
* javadoc</a>
*
* and
*
* <a href="http://java.sun.com/j2se/javadoc/writingdoccomments/index.html">
* how to write</a>.
* </p>
*
* <p>
* Some of this documentation was a little incomplete (ex: valid placement of
* code, value, and literal tags).
* </p>
*
* <p>
* Whenever an inconsistency was found the author's judgment was used.
* </p>
*
* <p>
* For now, the number of required/optional tag arguments are not included
* because some Javadoc tags have very complex rules for determining this
* (ex: <pre>{@value}</pre> stag).
* </p>
*
* <p>
* Also, the {@link #isValidOn(DetailAST) isValidOn} method does not consider
* classes defined in a local code block (method, init block, etc.).
* </>
*
* @author Travis Schneeberger
*/
public enum JavadocTagInfo
{
/**
* <pre>
* @author
* </pre>.
*/
AUTHOR("@author", "author", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF;
}
},
/**
* <pre>
* @code}
* </pre>.
*/
CODE("{@code}", "code", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @docRoot}
* </pre>.
*/
DOC_ROOT("{@docRoot}", "docRoot", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @deprecated
* </pre>.
*/
DEPRECATED("@deprecated", "deprecated", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| type == TokenTypes.ENUM_CONSTANT_DEF
|| type == TokenTypes.ANNOTATION_FIELD_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @exception
* </pre>.
*/
EXCEPTION("@exception", "exception", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF;
}
},
/**
* <pre>
* @inheritDoc}
* </pre>.
*/
INHERIT_DOC("{@inheritDoc}", "inheritDoc", Type.INLINE, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF
&& !aAst.branchContains(TokenTypes.LITERAL_STATIC)
&& ScopeUtils.getScopeFromMods(aAst
.findFirstToken(TokenTypes.MODIFIERS)) != Scope.PRIVATE;
}
},
/**
* <pre>
* @link}
* </pre>.
*/
LINK("{@link}", "link", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @linkplain}
* </pre>.
*/
LINKPLAIN("{@linkplain}", "linkplain", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @literal}
* </pre>.
*/
LITERAL("{@literal}", "literal", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF || type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @param
* </pre>.
*/
PARAM("@param", "param", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF;
}
},
/**
* <pre>
* @return
* </pre>.
*/
RETURN("@return", "return", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST returnType = aAst.findFirstToken(TokenTypes.TYPE);
return type == TokenTypes.METHOD_DEF
&& returnType.getFirstChild().getType()
!= TokenTypes.LITERAL_VOID;
}
},
/**
* <pre>
* @see
* </pre>.
*/
SEE("@see", "see", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @serial
* </pre>.
*/
SERIAL("@serial", "serial", Type.BLOCK, true, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst);
}
},
/**
* <pre>
* @serialData
* </pre>.
*/
SERIAL_DATA("@serialData", "serialData", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST methodNameAst = aAst
.findFirstToken(TokenTypes.IDENT);
final String methodName = methodNameAst.getText();
return type == TokenTypes.METHOD_DEF
&& ("writeObject".equals(methodName)
|| "readObject".equals(methodName)
|| "writeExternal".equals(methodName)
|| "readExternal".equals(methodName)
|| "writeReplace".equals(methodName)
|| "readResolve"
.equals(methodName));
}
},
/**
* <pre>
* @serialField
* </pre>.
*/
SERIAL_FIELD("@serialField", "serialField", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
final DetailAST varType = aAst.findFirstToken(TokenTypes.TYPE);
return type == TokenTypes.VARIABLE_DEF
&& varType.getType() == TokenTypes.ARRAY_DECLARATOR
&& "ObjectStreamField"
.equals(varType.getFirstChild().getText());
}
},
/**
* <pre>
* @since
* </pre>.
*/
SINCE("@since", "since", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @throws
* </pre>.
*/
THROWS("@throws", "throws", Type.BLOCK, false, false)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF;
}
},
/**
* <pre>
* @value}
* </pre>.
*/
VALUE("{@value}", "value", Type.INLINE, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF
|| type == TokenTypes.METHOD_DEF
|| type == TokenTypes.CTOR_DEF
|| (type == TokenTypes.VARIABLE_DEF
&& !ScopeUtils.isLocalVariableDef(aAst));
}
},
/**
* <pre>
* @version
* </pre>.
*/
VERSION("@version", "version", Type.BLOCK, true, true)
{
/** {@inheritDoc} */
@Override
public boolean isValidOn(final DetailAST aAst)
{
final int type = aAst.getType();
return type == TokenTypes.PACKAGE_DEF
|| type == TokenTypes.CLASS_DEF
|| type == TokenTypes.INTERFACE_DEF
|| type == TokenTypes.ENUM_DEF
|| type == TokenTypes.ANNOTATION_DEF;
}
};
/** holds tag text to tag enum mappings **/
private static final Map<String, JavadocTagInfo> TEXT_TO_TAG;
/** holds tag name to tag enum mappings **/
private static final Map<String, JavadocTagInfo> NAME_TO_TAG;
static
{
final ImmutableMap.Builder<String, JavadocTagInfo> textToTagBuilder =
new ImmutableMap.Builder<String, JavadocTagInfo>();
final ImmutableMap.Builder<String, JavadocTagInfo> nameToTagBuilder =
new ImmutableMap.Builder<String, JavadocTagInfo>();
for (final JavadocTagInfo tag : JavadocTagInfo.values()) {
textToTagBuilder.put(tag.getText(), tag);
nameToTagBuilder.put(tag.getName(), tag);
}
TEXT_TO_TAG = textToTagBuilder.build();
NAME_TO_TAG = nameToTagBuilder.build();
}
/** the tag text **/
private final String mText;
/** the tag name **/
private final String mName;
/** the tag type **/
private final Type mType;
/** if tag is valid in package.html **/
private final boolean mValidInPackageHtml;
/** if tag is valid in overview.html **/
private final boolean mValidInOverviewHtml;
/**
* Sets the various properties of a Javadoc tag.
*
* @param aText the tag text
* @param aName the tag name
* @param aType the type of tag
* @param aValidInPackageHtml whether the tag is valid
* in package.html file
* @param aValidInOverviewHtml whether the tag is valid
* in overview.html file
*/
private JavadocTagInfo(final String aText, final String aName,
final Type aType, final boolean aValidInPackageHtml,
final boolean aValidInOverviewHtml)
{
this.mText = aText;
this.mName = aName;
this.mType = aType;
this.mValidInPackageHtml = aValidInPackageHtml;
this.mValidInOverviewHtml = aValidInOverviewHtml;
}
/**
* Checks if a particular Javadoc tag is valid within a Javadoc block of a
* given AST.
*
* <p>
* For example: Given a call to
* <code>JavadocTag.RETURN{@link #isValidOn(DetailAST)}</code>.
* </p>
*
* <p>
* If passing in a DetailAST representing a non-void METHOD_DEF
* <code> true </code> would be returned. If passing in a DetailAST
* representing a CLASS_DEF <code> false </code> would be returned because
* CLASS_DEF's cannot return a value.
* </p>
*
* @param aAST the AST representing a type that can be Javadoc'd
* @return true if tag is valid.
*/
public abstract boolean isValidOn(DetailAST aAST);
/**
* Checks if tag is valid in a package.html Javadoc file.
*
* @return true if tag is valid.
*/
public boolean isValidInPackageHtml()
{
return this.mValidInPackageHtml;
}
/**
* Checks if tag is valid in a overview.html Javadoc file.
*
* @return true if tag is valid.
*/
public boolean isValidInOverviewHtml()
{
return this.mValidInOverviewHtml;
}
/**
* @return the tag text
*/
public String getText()
{
return this.mText;
}
/**
* @return the tag name
*/
public String getName()
{
return this.mName;
}
/**
* @return the Tag type defined by {@link JavadocTagInfo.Type Type}.
*/
public Type getType()
{
return this.mType;
}
/**
* returns a JavadocTag from the tag text.
* @param aText String representing the tag text
* @return Returns a JavadocTag type from a String representing the tag
* @throws NullPointerException if the text is null
* @throws IllegalArgumentException if the text is not a valid tag
*/
public static JavadocTagInfo fromText(final String aText)
{
if (aText == null) {
throw new NullPointerException("the text is null");
}
final JavadocTagInfo tag = TEXT_TO_TAG.get(aText);
if (tag == null) {
throw new IllegalArgumentException("the text [" + aText
+ "] is not a valid Javadoc tag text");
}
return tag;
}
/**
* returns a JavadocTag from the tag name.
* @param aName String name of the tag
* @return Returns a JavadocTag type from a String representing the tag
* @throws NullPointerException if the text is null
* @throws IllegalArgumentException if the text is not a valid tag
*/
public static JavadocTagInfo fromName(final String aName)
{
if (aName == null) {
throw new NullPointerException("the name is null");
}
final JavadocTagInfo tag = NAME_TO_TAG.get(aName);
if (tag == null) {
throw new IllegalArgumentException("the name [" + aName
+ "] is not a valid Javadoc tag name");
}
return tag;
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return "text [" + this.mText + "] name [" + this.mName
+ "] type [" + this.mType
+ "] validInPackageHtml [" + this.mValidInPackageHtml
+ "] validInOverviewHtml [" + this.mValidInOverviewHtml + "]";
}
/**
* The Javadoc Type.
*
* For example a
*
* <pre>
* @param
* </pre>
*
* tag is a block tag while a
*
* <pre>
* @link}
* </pre>
*
* tag is a inline tag
*
* @author Travis Schneeberger
*/
public enum Type
{
/** block type. **/
BLOCK,
/** inline type. **/
INLINE;
}
}
|
patch #2782630 - fixing javadoc errors that were corrected in patch provided by Martin von Gagern
|
src/checkstyle/com/puppycrawl/tools/checkstyle/api/JavadocTagInfo.java
|
patch #2782630 - fixing javadoc errors that were corrected in patch provided by Martin von Gagern
|
|
Java
|
lgpl-2.1
|
116d1921a3fba98f9df10f70ffde8d5c423b98bb
| 0
|
KengoTODA/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,sewe/spotbugs,johnscancella/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,sewe/spotbugs,KengoTODA/spotbugs
|
/*
* FindBugs Eclipse Plug-in.
* Copyright (C) 2005, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package de.tobject.findbugs.io;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import de.tobject.findbugs.FindbugsPlugin;
/**
* Input/output helper methods.
*
* @author David Hovemeyer
*/
public abstract class IO {
/**
* Write the contents of a file in the Eclipse workspace.
*
* @param file the file to write to
* @param output the FileOutput object responsible for generating the data
* @param monitor a progress monitor (or null if none)
* @throws CoreException
*/
public static void writeFile(IFile file, final FileOutput output,
IProgressMonitor monitor) throws CoreException {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
output.writeFile(bos);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
if (!file.exists()) {
file.create(bis, true, monitor);
} else {
file.setContents(bis, true, false, monitor);
}
} catch (IOException e) {
IStatus status = FindbugsPlugin.createErrorStatus("Exception while "
+ output.getTaskDescription(), e);
throw new CoreException(status);
}
}
/**
* Write the contents of a java.io.File
*
* @param file the file to write to
* @param output the FileOutput object responsible for generating the data
*/
public static void writeFile(final File file, final FileOutput output,
final IProgressMonitor monitor) throws CoreException {
FileOutputStream fout = null;
try {
fout = new FileOutputStream(file);
BufferedOutputStream bout = new BufferedOutputStream(fout);
if (monitor != null) {
monitor.subTask("writing data to " + file.getName());
}
output.writeFile(bout);
bout.flush();
} catch (IOException e) {
IStatus status = FindbugsPlugin.createErrorStatus("Exception while "
+ output.getTaskDescription(), e);
throw new CoreException(status);
} finally {
try {
if (fout != null) {
fout.close();
}
} catch (IOException e) {
// ignore
}
}
}
}
|
eclipsePlugin/src/de/tobject/findbugs/io/IO.java
|
/*
* FindBugs Eclipse Plug-in.
* Copyright (C) 2005, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package de.tobject.findbugs.io;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import de.tobject.findbugs.FindbugsPlugin;
/**
* Input/output helper methods.
*
* @author David Hovemeyer
*/
public abstract class IO {
/**
* Write the contents of a file in the Eclipse workspace.
*
* @param file the file to write to
* @param output the FileOutput object responsible for generating the data
* @param monitor a progress monitor (or null if none)
* @throws CoreException
*/
public static void writeFile(IFile file, final FileOutput output,
IProgressMonitor monitor) throws CoreException {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
output.writeFile(bos);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
if (!file.exists()) {
file.create(bis, true, monitor);
} else {
file.setContents(bis, true, false, monitor);
}
// Need to refresh here?
file.refreshLocal(IResource.DEPTH_INFINITE, monitor);
} catch (IOException e) {
IStatus status = FindbugsPlugin.createErrorStatus("Exception while "
+ output.getTaskDescription(), e);
throw new CoreException(status);
}
}
/**
* Write the contents of a java.io.File
*
* @param file the file to write to
* @param output the FileOutput object responsible for generating the data
*/
public static void writeFile(final File file, final FileOutput output,
final IProgressMonitor monitor) throws CoreException {
FileOutputStream fout = null;
try {
fout = new FileOutputStream(file);
BufferedOutputStream bout = new BufferedOutputStream(fout);
if (monitor != null) {
monitor.subTask("writing data to " + file.getName());
}
output.writeFile(bout);
bout.flush();
} catch (IOException e) {
IStatus status = FindbugsPlugin.createErrorStatus("Exception while "
+ output.getTaskDescription(), e);
throw new CoreException(status);
} finally {
try {
if (fout != null) {
fout.close();
}
} catch (IOException e) {
// ignore
}
}
}
}
|
Removed unneeded refresh
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@10767 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
|
eclipsePlugin/src/de/tobject/findbugs/io/IO.java
|
Removed unneeded refresh
|
|
Java
|
apache-2.0
|
75d9d24345ff0e7325b98628c1a6ecbcb757fcd5
| 0
|
alibaba/nacos,alibaba/nacos,alibaba/nacos,alibaba/nacos
|
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.naming.misc;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.nacos.api.naming.pojo.AbstractHealthChecker;
import com.alibaba.nacos.naming.core.Domain;
import com.alibaba.nacos.naming.exception.NacosException;
import com.alibaba.nacos.naming.healthcheck.JsonAdapter;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.*;
/**
* @author nacos
*/
public class UtilsAndCommons {
public static final String NACOS_SERVER_CONTEXT = "/nacos";
public static final String NACOS_SERVER_VERSION = "/v1";
public static final String DEFAULT_NACOS_NAMING_CONTEXT = NACOS_SERVER_VERSION + "/ns";
public static final String NACOS_NAMING_CONTEXT = DEFAULT_NACOS_NAMING_CONTEXT;
public static final String NACOS_NAMING_CATALOG_CONTEXT = "/catalog";
public static final String NACOS_NAMING_INSTANCE_CONTEXT = "/instance";
public static final String NACOS_NAMING_RAFT_CONTEXT = "/raft";
public static final String NACOS_SERVER_HEADER = "Nacos-Server";
public static final String NACOS_VERSION = "1.0";
public static final String SUPER_TOKEN = "xy";
public static final String DOMAINS_DATA_ID = "com.alibaba.nacos.naming.domains.meta";
public static final String IPADDRESS_DATA_ID_PRE = "com.alibaba.nacos.naming.iplist.";
static public final String NODE_TAG_IP_PRE = "com.alibaba.nacos.naming.tag.iplist.";
public static final String TAG_DOMAINS_DATA_ID = "com.alibaba.nacos.naming.domains.tag.meta";
static public final String CIDR_REGEX = "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}/[0-9]+";
static public final String UNKNOWN_SITE = "unknown";
static public final String UNKNOWN_HOST = "unknown";
public static final String DEFAULT_CLUSTER_NAME = "DEFAULT";
public static final int RAFT_PUBLISH_TIMEOUT = 5000;
static public final String RAFT_DOM_PRE = "meta";
static public final String RAFT_IPLIST_PRE = "iplist.";
static public final String RAFT_TAG_DOM_PRE = "tag.meta";
static public final String RAFT_TAG_IPLIST_PRE = "tag.iplist.";
public static final String SERVER_VERSION = NACOS_SERVER_HEADER + ":" + NACOS_VERSION;
public static final String SELF_SERVICE_CLUSTER_ENV = "naming_self_service_cluster_ips";
public static final String CACHE_KEY_SPLITER = "@@@@";
public static final String LOCAL_HOST_IP = "127.0.0.1";
public static final String CLUSTER_CONF_IP_SPLITER = ":";
public static final int MAX_PUBLISH_WAIT_TIME_MILLIS = 5000;
public static final String VERSION_STRING_SYNTAX = "[0-9]+\\.[0-9]+\\.[0-9]+";
public static final String API_UPDATE_SWITCH = "/api/updateSwitch";
public static final String API_SET_ALL_WEIGHTS = "/api/setWeight4AllIPs";
public static final String API_DOM_SERVE_STATUS = "/api/domServeStatus";
public static final String API_IP_FOR_DOM = "/api/ip4Dom";
public static final String API_DOM = "/api/dom";
public static final String INSTANCE_LIST_PERSISTED_PROPERTY_KEY = "nacos.instanceListPersisted";
public static final boolean INSTANCE_LIST_PERSISTED = Boolean.getBoolean(INSTANCE_LIST_PERSISTED_PROPERTY_KEY);
public static final ScheduledExecutorService SERVER_STATUS_EXECUTOR;
public static final ScheduledExecutorService DOMAIN_SYNCHRONIZATION_EXECUTOR;
public static final ScheduledExecutorService DOMAIN_UPDATE_EXECUTOR;
public static final ScheduledExecutorService INIT_CONFIG_EXECUTOR;
public static final Executor RAFT_PUBLISH_EXECUTOR;
static {
// custom serializer and deserializer for fast-json
SerializeConfig.getGlobalInstance()
.put(AbstractHealthChecker.class, JsonAdapter.getInstance());
ParserConfig.getGlobalInstance()
.putDeserializer(AbstractHealthChecker.class, JsonAdapter.getInstance());
// write null values, otherwise will cause compatibility issues
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullStringAsEmpty.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullListAsEmpty.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullBooleanAsFalse.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteMapNullValue.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullNumberAsZero.getMask();
DOMAIN_SYNCHRONIZATION_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.domains.worker");
t.setDaemon(true);
return t;
}
});
DOMAIN_UPDATE_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.domains.update.processor");
t.setDaemon(true);
return t;
}
});
INIT_CONFIG_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.init.config.worker");
t.setDaemon(true);
return t;
}
});
SERVER_STATUS_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.status.worker");
t.setDaemon(true);
return t;
}
});
RAFT_PUBLISH_EXECUTOR
= Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.raft.publisher");
t.setDaemon(true);
return t;
}
});
}
public static String getAllExceptionMsg(Throwable e) {
Throwable cause = e;
StringBuilder strBuilder = new StringBuilder();
while (cause != null && !StringUtils.isEmpty(cause.getMessage())) {
strBuilder.append("caused: ").append(cause.getMessage()).append(";");
cause = cause.getCause();
}
return strBuilder.toString();
}
public static String getIPListStoreKey(Domain dom) {
return UtilsAndCommons.IPADDRESS_DATA_ID_PRE + dom.getName();
}
public static String getDomStoreKey(Domain dom) {
return UtilsAndCommons.DOMAINS_DATA_ID + "." + dom.getName();
}
public static Map<String, String> parseMetadata(String metadata) throws NacosException {
Map<String, String> metadataMap = new HashMap<>(16);
if (StringUtils.isBlank(metadata)) {
return metadataMap;
}
try {
metadataMap = JSON.parseObject(metadata, new TypeReference<Map<String, String>>(){});
} catch (Exception e) {
String[] datas = metadata.split(",");
if (datas.length > 0) {
for (String data : datas) {
String[] kv = data.split("=");
if (kv.length != 2) {
throw new NacosException(NacosException.INVALID_PARAM, "metadata format incorrect:" + metadata);
}
metadataMap.put(kv[0], kv[1]);
}
}
}
return metadataMap;
}
}
|
naming/src/main/java/com/alibaba/nacos/naming/misc/UtilsAndCommons.java
|
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.naming.misc;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.nacos.api.naming.pojo.AbstractHealthChecker;
import com.alibaba.nacos.naming.core.Domain;
import com.alibaba.nacos.naming.exception.NacosException;
import com.alibaba.nacos.naming.healthcheck.JsonAdapter;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.*;
/**
* @author nacos
*/
public class UtilsAndCommons {
public static final String NACOS_SERVER_CONTEXT = "/nacos";
public static final String NACOS_SERVER_VERSION = "/v1";
public static final String DEFAULT_NACOS_NAMING_CONTEXT = NACOS_SERVER_VERSION + "/ns";
public static final String NACOS_NAMING_CONTEXT = DEFAULT_NACOS_NAMING_CONTEXT;
public static final String NACOS_NAMING_CATALOG_CONTEXT = "/catalog";
public static final String NACOS_NAMING_INSTANCE_CONTEXT = "/instance";
public static final String NACOS_NAMING_RAFT_CONTEXT = "/raft";
public static final String NACOS_SERVER_HEADER = "Nacos-Server";
public static final String NACOS_VERSION = "1.0";
public static final String SUPER_TOKEN = "xy";
public static final String DOMAINS_DATA_ID = "com.alibaba.nacos.naming.domains.meta";
public static final String IPADDRESS_DATA_ID_PRE = "com.alibaba.nacos.naming.iplist.";
static public final String NODE_TAG_IP_PRE = "com.alibaba.nacos.naming.tag.iplist.";
public static final String TAG_DOMAINS_DATA_ID = "com.alibaba.nacos.naming.domains.tag.meta";
static public final String CIDR_REGEX = "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}/[0-9]+";
static public final String UNKNOWN_SITE = "unknown";
static public final String UNKNOWN_HOST = "unknown";
public static final String DEFAULT_CLUSTER_NAME = "DEFAULT";
public static final int RAFT_PUBLISH_TIMEOUT = 5000;
static public final String RAFT_DOM_PRE = "meta";
static public final String RAFT_IPLIST_PRE = "iplist.";
static public final String RAFT_TAG_DOM_PRE = "tag.meta";
static public final String RAFT_TAG_IPLIST_PRE = "tag.iplist.";
public static final String SERVER_VERSION = NACOS_SERVER_HEADER + ":" + NACOS_VERSION;
public static final String SELF_SERVICE_CLUSTER_ENV = "naming_self_service_cluster_ips";
public static final String CACHE_KEY_SPLITER = "@@@@";
public static final String LOCAL_HOST_IP = "127.0.0.1";
public static final String CLUSTER_CONF_IP_SPLITER = ":";
public static final int MAX_PUBLISH_WAIT_TIME_MILLIS = 5000;
public static final String VERSION_STRING_SYNTAX = "[0-9]+\\.[0-9]+\\.[0-9]+";
public static final String API_UPDATE_SWITCH = "/api/updateSwitch";
public static final String API_SET_ALL_WEIGHTS = "/api/setWeight4AllIPs";
public static final String API_DOM_SERVE_STATUS = "/api/domServeStatus";
public static final String API_IP_FOR_DOM = "/api/ip4Dom";
public static final String API_DOM = "/api/dom";
public static final String INSTANCE_LIST_PERSISTED_PROPERTY_KEY = "NacosNamingInstanceListPersisted";
public static final boolean INSTANCE_LIST_PERSISTED = Boolean.getBoolean(INSTANCE_LIST_PERSISTED_PROPERTY_KEY);
public static final ScheduledExecutorService SERVER_STATUS_EXECUTOR;
public static final ScheduledExecutorService DOMAIN_SYNCHRONIZATION_EXECUTOR;
public static final ScheduledExecutorService DOMAIN_UPDATE_EXECUTOR;
public static final ScheduledExecutorService INIT_CONFIG_EXECUTOR;
public static final Executor RAFT_PUBLISH_EXECUTOR;
static {
// custom serializer and deserializer for fast-json
SerializeConfig.getGlobalInstance()
.put(AbstractHealthChecker.class, JsonAdapter.getInstance());
ParserConfig.getGlobalInstance()
.putDeserializer(AbstractHealthChecker.class, JsonAdapter.getInstance());
// write null values, otherwise will cause compatibility issues
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullStringAsEmpty.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullListAsEmpty.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullBooleanAsFalse.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteMapNullValue.getMask();
JSON.DEFAULT_GENERATE_FEATURE |= SerializerFeature.WriteNullNumberAsZero.getMask();
DOMAIN_SYNCHRONIZATION_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.domains.worker");
t.setDaemon(true);
return t;
}
});
DOMAIN_UPDATE_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.domains.update.processor");
t.setDaemon(true);
return t;
}
});
INIT_CONFIG_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.init.config.worker");
t.setDaemon(true);
return t;
}
});
SERVER_STATUS_EXECUTOR
= new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.status.worker");
t.setDaemon(true);
return t;
}
});
RAFT_PUBLISH_EXECUTOR
= Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("nacos.naming.raft.publisher");
t.setDaemon(true);
return t;
}
});
}
public static String getAllExceptionMsg(Throwable e) {
Throwable cause = e;
StringBuilder strBuilder = new StringBuilder();
while (cause != null && !StringUtils.isEmpty(cause.getMessage())) {
strBuilder.append("caused: ").append(cause.getMessage()).append(";");
cause = cause.getCause();
}
return strBuilder.toString();
}
public static String getIPListStoreKey(Domain dom) {
return UtilsAndCommons.IPADDRESS_DATA_ID_PRE + dom.getName();
}
public static String getDomStoreKey(Domain dom) {
return UtilsAndCommons.DOMAINS_DATA_ID + "." + dom.getName();
}
public static Map<String, String> parseMetadata(String metadata) throws NacosException {
Map<String, String> metadataMap = new HashMap<>(16);
if (StringUtils.isBlank(metadata)) {
return metadataMap;
}
try {
metadataMap = JSON.parseObject(metadata, new TypeReference<Map<String, String>>(){});
} catch (Exception e) {
String[] datas = metadata.split(",");
if (datas.length > 0) {
for (String data : datas) {
String[] kv = data.split("=");
if (kv.length != 2) {
throw new NacosException(NacosException.INVALID_PARAM, "metadata format incorrect:" + metadata);
}
metadataMap.put(kv[0], kv[1]);
}
}
}
return metadataMap;
}
}
|
#369 Update property name
|
naming/src/main/java/com/alibaba/nacos/naming/misc/UtilsAndCommons.java
|
#369 Update property name
|
|
Java
|
apache-2.0
|
412adecb0fa6745a4bfa39205f7d21cf32164f4c
| 0
|
Tycheo/coffeemud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,Tycheo/coffeemud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud
|
package com.planet_ink.coffee_mud.core.collections;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
/*
Copyright 2012-2013 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class PrioritizingLimitedMap<T extends Comparable<T>, K> implements Map<T, K>
{
protected int itemLimit;
protected final long touchAgeLimitMillis;
protected final long maxAgeLimitMillis;
protected int threshHoldToExpand;
private class LinkedEntry<V,W> extends Pair<V,W>
{
public volatile LinkedEntry<V,W> next=null;
public volatile LinkedEntry<V,W> prev=null;
public volatile int priority=0;
public volatile int index=0;
public volatile long lastTouch=System.currentTimeMillis();
public final long birthDate=System.currentTimeMillis();
public LinkedEntry(V frst, W scnd)
{
super(frst, scnd);
}
}
protected volatile LinkedEntry<T,K> head=null;
protected volatile LinkedEntry<T,K> tail=null;
protected final TreeMap<T,LinkedEntry<T,K>> map=new TreeMap<T,LinkedEntry<T,K>>();
/**
* Constructed a "limit" tree-based map.
* All the parameters here are somewhat fuzzy. The itemLimit will be ignored if lots of youngsters
* come flooding in. If the itemLimit is exceeded by more than 2* the touch/max age, then those
* ages will be similarly multiplied to clean out older and older entries in order to at least
* approach the itemLimit standards. The threshold to expand is the only hard limit.
* @param itemLimit the number of items to try to limit this map to
* @param touchAgeLimitMillis the age of last-touching that makes an item too old to keep
* @param maxAgeLimitMillis the longest amount of time any entry is allowed to live, regardless of touching
* @param threshHoldToExpand the number of touches on any given item before the limit expands to accommodate
*/
public PrioritizingLimitedMap(int itemLimit, long touchAgeLimitMillis, long maxAgeLimitMillis, int threshHoldToExpand)
{
if(itemLimit<=0) itemLimit=1;
this.itemLimit=itemLimit;
this.touchAgeLimitMillis=(touchAgeLimitMillis > Integer.MAX_VALUE) ? Integer.MAX_VALUE : touchAgeLimitMillis;
this.maxAgeLimitMillis=(maxAgeLimitMillis > Integer.MAX_VALUE) ? Integer.MAX_VALUE : maxAgeLimitMillis;
this.threshHoldToExpand=threshHoldToExpand;
}
/**
* Constructed a "limit" tree-based map.
* All the parameters here are somewhat fuzzy. The itemLimit will be ignored if lots of youngsters
* come flooding in. If the itemLimit is exceeded by more than 2* the touch/max age, then those
* ages will be similarly multiplied to clean out older and older entries in order to at least
* approach the itemLimit standards.
* @param itemLimit the number of items to try to limit this map to
* @param touchAgeLimitMillis the age of last-touching that makes an item too old to keep
* @param maxAgeLimitMillis the longest amount of time any entry is allowed to live, regardless of touching
*/
public PrioritizingLimitedMap(int itemLimit, long touchAgeLimitMillis, long maxAgeLimitMillis)
{
this(itemLimit,touchAgeLimitMillis,maxAgeLimitMillis,Integer.MAX_VALUE);
}
@Override
public K get(Object key)
{
LinkedEntry<T,K> p=map.get(key);
if(p!=null)
{
markFoundAgain(p);
trimDeadwood(1);
return p.second;
}
return null;
}
@Override
public synchronized void clear()
{
head=null;
tail=null;
map.clear();
}
@Override
public boolean containsKey(Object arg0) { return map.containsKey(arg0); }
public Enumeration<T> prioritizedKeys()
{
return new Enumeration<T>()
{
private LinkedEntry<T,K>ptr=head;
@Override
public boolean hasMoreElements()
{
return ptr!=null;
}
@Override
public T nextElement()
{
if(ptr!=null)
{
T elem=ptr.first;
ptr=ptr.next;
return elem;
}
return null;
}
};
}
@Override
public synchronized boolean containsValue(Object arg0) {
for(LinkedEntry<T,K> p : map.values())
if(p.first==arg0)
return true;
return false;
}
@Override
public synchronized Set<java.util.Map.Entry<T, K>> entrySet() {
final Set<java.util.Map.Entry<T, K>> c= new TreeSet<java.util.Map.Entry<T, K>>();
for(T t : map.keySet())
c.add(new Pair<T,K>(t,map.get(t).second));
return c;
}
@Override
public boolean isEmpty() { return map.isEmpty(); }
@Override
public Set<T> keySet() { return map.keySet(); }
private void markFoundAgain(LinkedEntry<T,K> p)
{
p.priority++;
p.lastTouch=System.currentTimeMillis();
LinkedEntry<T,K> pp=p.prev;
while((pp!=null) && (p.priority > pp.priority))
{
LinkedEntry<T,K> pn=p.next;
int ppIndex=pp.index;
pp.index=p.index;
p.index=ppIndex;
p.prev=pp.prev;
p.next=pp;
if(pp.prev==null)
head=p;
else
pp.prev.next=p;
pp.prev=p;
if(pn != null)
pn.prev=pp;
else
tail=pp;
pp.next=pn;
pp=p.prev;
}
}
private void trimDeadwood(int multiplier)
{
if(map.size() > itemLimit)
{
LinkedEntry<T,K> prev=tail;
final long touchTimeout=System.currentTimeMillis()-(touchAgeLimitMillis*multiplier);
final long maxAgeTimeout=System.currentTimeMillis()-(maxAgeLimitMillis*multiplier);
int expands=0;
int counter=0;
while((prev != null)&&(prev != head)&&(prev.index <=0)&&(map.size() > itemLimit))
{
if(counter++>map.size())
break; // failsafe to broken links
final LinkedEntry<T,K> pprev=prev.prev;
if((prev.lastTouch<touchTimeout)||(prev.birthDate<maxAgeTimeout))
remove(prev.first);
else
if(prev.priority > this.threshHoldToExpand)
expands=1; // dont want to count the same ones every time through
prev=pprev;
}
itemLimit+=expands;
if((map.size() > itemLimit) && (((multiplier+1)*itemLimit)<map.size()))
trimDeadwood(multiplier+1); // addition to better enforce the item limit w/o being crazy
}
}
@Override
public synchronized K put(T arg0, K arg1) {
LinkedEntry<T,K> p=map.get(arg0);
if(p == null)
{
p=new LinkedEntry<T,K>(arg0,arg1);
map.put(arg0,p);
if(tail == null)
{
head=p;
tail=p;
p.index=itemLimit;
}
else
{
p.index=tail.index-1;
tail.next=p;
p.prev=tail;
tail=p;
}
}
else
{
if(p.second!=arg1)
p.second=arg1;
markFoundAgain(p);
}
trimDeadwood(1);
return arg1;
}
@Override
public synchronized void putAll(Map<? extends T, ? extends K> arg0) {
for(T t : arg0.keySet())
put(t,arg0.get(t));
}
@Override
public synchronized K remove(Object arg0) {
final LinkedEntry<T,K> p=map.get(arg0);
if(p == null) return null;
map.remove(arg0);
LinkedEntry<T,K> pn=p.next;
while((pn != null)&&(tail != pn))
{
pn.index++;
pn=pn.next;
}
if(head == p) head=p.next;
if(tail == p) tail=p.prev;
if(p.prev != null)
p.prev.next=p.next;
if(p.next!=null)
p.next.prev=p.prev;
return p.second;
}
@Override
public int size() {
return map.size();
}
@Override
public synchronized Collection<K> values() {
final Collection<K> c= new Vector<K>(map.size());
for(T t : map.keySet())
c.add(map.get(t).second);
return c;
}
}
|
com/planet_ink/coffee_mud/core/collections/PrioritizingLimitedMap.java
|
package com.planet_ink.coffee_mud.core.collections;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
/*
Copyright 2012-2013 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class PrioritizingLimitedMap<T extends Comparable<T>, K> implements Map<T, K>
{
protected int itemLimit;
protected final long touchAgeLimitMillis;
protected final long maxAgeLimitMillis;
protected int threshHoldToExpand;
private class LinkedEntry<V,W> extends Pair<V,W>
{
public volatile LinkedEntry<V,W> next=null;
public volatile LinkedEntry<V,W> prev=null;
public volatile int priority=0;
public volatile int index=0;
public volatile long lastTouch=System.currentTimeMillis();
public final long birthDate=System.currentTimeMillis();
public LinkedEntry(V frst, W scnd)
{
super(frst, scnd);
}
}
protected volatile LinkedEntry<T,K> head=null;
protected volatile LinkedEntry<T,K> tail=null;
protected final TreeMap<T,LinkedEntry<T,K>> map=new TreeMap<T,LinkedEntry<T,K>>();
public PrioritizingLimitedMap(int itemLimit, long touchAgeLimitMillis, long maxAgeLimitMillis, int threshHoldToExpand)
{
if(itemLimit<=0) itemLimit=1;
this.itemLimit=itemLimit;
this.touchAgeLimitMillis=touchAgeLimitMillis;
this.maxAgeLimitMillis=maxAgeLimitMillis;
this.threshHoldToExpand=threshHoldToExpand;
}
public PrioritizingLimitedMap(int itemLimit, long touchAgeLimitMillis, long maxAgeLimitMillis)
{
this(itemLimit,touchAgeLimitMillis,maxAgeLimitMillis,Integer.MAX_VALUE);
}
@Override
public K get(Object key)
{
LinkedEntry<T,K> p=map.get(key);
if(p!=null)
{
markFoundAgain(p);
trimDeadwood();
return p.second;
}
return null;
}
@Override
public synchronized void clear()
{
head=null;
tail=null;
map.clear();
}
@Override
public boolean containsKey(Object arg0) { return map.containsKey(arg0); }
public Enumeration<T> prioritizedKeys()
{
return new Enumeration<T>()
{
private LinkedEntry<T,K>ptr=head;
@Override
public boolean hasMoreElements()
{
return ptr!=null;
}
@Override
public T nextElement()
{
if(ptr!=null)
{
T elem=ptr.first;
ptr=ptr.next;
return elem;
}
return null;
}
};
}
@Override
public synchronized boolean containsValue(Object arg0) {
for(LinkedEntry<T,K> p : map.values())
if(p.first==arg0)
return true;
return false;
}
@Override
public synchronized Set<java.util.Map.Entry<T, K>> entrySet() {
final Set<java.util.Map.Entry<T, K>> c= new TreeSet<java.util.Map.Entry<T, K>>();
for(T t : map.keySet())
c.add(new Pair<T,K>(t,map.get(t).second));
return c;
}
@Override
public boolean isEmpty() { return map.isEmpty(); }
@Override
public Set<T> keySet() { return map.keySet(); }
private void markFoundAgain(LinkedEntry<T,K> p)
{
p.priority++;
p.lastTouch=System.currentTimeMillis();
LinkedEntry<T,K> pp=p.prev;
while((pp!=null) && (p.priority > pp.priority))
{
LinkedEntry<T,K> pn=p.next;
int ppIndex=pp.index;
pp.index=p.index;
p.index=ppIndex;
p.prev=pp.prev;
p.next=pp;
if(pp.prev==null)
head=p;
else
pp.prev.next=p;
pp.prev=p;
if(pn != null)
pn.prev=pp;
else
tail=pp;
pp.next=pn;
pp=p.prev;
}
}
private void trimDeadwood()
{
if(map.size() > itemLimit)
{
LinkedEntry<T,K> prev=tail;
final long touchTimeout=System.currentTimeMillis()-touchAgeLimitMillis;
final long maxAgeTimeout=System.currentTimeMillis()-maxAgeLimitMillis;
int expands=0;
while((prev != null)&&(prev != head)&&(prev.index <=0)&&(map.size() > itemLimit))
{
final LinkedEntry<T,K> pprev=prev.prev;
if((prev.lastTouch<touchTimeout)||(prev.birthDate<maxAgeTimeout))
remove(prev.first);
else
if(prev.priority > this.threshHoldToExpand)
expands=1; // dont want to count the same ones every time through
prev=pprev;
}
itemLimit+=expands;
}
}
@Override
public synchronized K put(T arg0, K arg1) {
LinkedEntry<T,K> p=map.get(arg0);
if(p == null)
{
p=new LinkedEntry<T,K>(arg0,arg1);
map.put(arg0,p);
if(tail == null)
{
head=p;
tail=p;
p.index=itemLimit;
}
else
{
p.index=tail.index-1;
tail.next=p;
p.prev=tail;
tail=p;
}
}
else
{
if(p.second!=arg1)
p.second=arg1;
markFoundAgain(p);
}
trimDeadwood();
return arg1;
}
@Override
public synchronized void putAll(Map<? extends T, ? extends K> arg0) {
for(T t : arg0.keySet())
put(t,arg0.get(t));
}
@Override
public synchronized K remove(Object arg0) {
final LinkedEntry<T,K> p=map.get(arg0);
if(p == null) return null;
map.remove(arg0);
LinkedEntry<T,K> pn=p.next;
while((pn != null)&&(tail != pn))
{
pn.index++;
pn=pn.next;
}
if(head == p) head=p.next;
if(tail == p) tail=p.prev;
if(p.prev != null)
p.prev.next=p.next;
if(p.next!=null)
p.next.prev=p.prev;
return p.second;
}
@Override
public int size() {
return map.size();
}
@Override
public synchronized Collection<K> values() {
final Collection<K> c= new Vector<K>(map.size());
for(T t : map.keySet())
c.add(map.get(t).second);
return c;
}
}
|
git-svn-id: svn://192.168.1.10/public/CoffeeMud@10969 0d6f1817-ed0e-0410-87c9-987e46238f29
|
com/planet_ink/coffee_mud/core/collections/PrioritizingLimitedMap.java
| ||
Java
|
apache-2.0
|
78c623d41ccd5ae296d8ab38dd27f9e1b828a6ba
| 0
|
achikin/testrail-jenkins-plugin,achikin/testrail-jenkins-plugin
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package testrail.testrail;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import testrail.testrail.JunitResults.Testcase;
import testrail.testrail.TestRailObjects.*;
import org.apache.commons.lang.StringUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import javax.xml.ws.http.HTTPException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.InterruptedException;
import java.util.List;
import static testrail.testrail.Utils.*;
/**
* Created by Drew on 3/19/14.
*/
public class TestRailClient {
private String host;
private String user;
private String password;
public void setHost(String host) { this.host = host; }
public void setUser(String user) { this.user = user; }
public void setPassword(String password) {this.password = password; }
public String getHost() { return this.host; }
public String getUser() { return this.user; }
public String getPassword() { return this.password; }
public TestRailClient(String host, String user, String password) {
this.host = host;
this.user = user;
this.password = password;
}
private HttpClient setUpHttpClient(HttpMethod method) {
HttpClient httpclient = new HttpClient();
httpclient.getParams().setAuthenticationPreemptive(true);
httpclient.getState().setCredentials(
AuthScope.ANY,
new UsernamePasswordCredentials(this.user, this.password)
);
method.setDoAuthentication(true);
method.addRequestHeader("Content-Type", "application/json");
return httpclient;
}
private TestRailResponse httpGet(String path) throws IOException {
TestRailResponse response;
do {
response = httpGetInt(path);
if (response.getStatus() == 429) {
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
log(e.toString());
}
}
} while (response.getStatus() == 429);
return response;
}
private TestRailResponse httpGetInt(String path) throws IOException {
TestRailResponse result;
GetMethod get = new GetMethod(host + "/" + path);
HttpClient httpclient = setUpHttpClient(get);
try {
Integer status = httpclient.executeMethod(get);
String body = new String(get.getResponseBody(), get.getResponseCharSet());
result = new TestRailResponse(status, body);
} finally {
get.releaseConnection();
}
return result;
}
private TestRailResponse httpPost(String path, String payload)
throws UnsupportedEncodingException, IOException, HTTPException {
TestRailResponse response;
do {
response = httpPostInt(path, payload);
if (response.getStatus() == 429) {
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
log(e.toString());
}
}
} while (response.getStatus() == 429);
return response;
}
private TestRailResponse httpPostInt(String path, String payload)
throws UnsupportedEncodingException, IOException, HTTPException {
TestRailResponse result;
PostMethod post = new PostMethod(host + "/" + path);
HttpClient httpclient = setUpHttpClient(post);
try {
StringRequestEntity requestEntity = new StringRequestEntity(
payload,
"application/json",
"UTF-8"
);
post.setRequestEntity(requestEntity);
Integer status = httpclient.executeMethod(post);
String body = new String(post.getResponseBody(), post.getResponseCharSet());
result = new TestRailResponse(status, body);
} finally {
post.releaseConnection();
}
return result;
}
public boolean serverReachable() throws IOException {
boolean result = false;
HttpClient httpclient = new HttpClient();
GetMethod get = new GetMethod(host);
try {
httpclient.executeMethod(get);
result = true;
} catch (java.net.UnknownHostException e) {
// nop - we default to result == false
} finally {
get.releaseConnection();
}
return result;
}
public boolean authenticationWorks() throws IOException {
TestRailResponse response = httpGet("/index.php?/api/v2/get_projects");
return (200 == response.getStatus());
}
public Project[] getProjects() throws IOException, ElementNotFoundException {
String body = httpGet("/index.php?/api/v2/get_projects").getBody();
JSONArray json = new JSONArray(body);
Project[] projects = new Project[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Project p = new Project();
p.setName(o.getString("name"));
p.setId(o.getInt("id"));
projects[i] = p;
}
return projects;
}
public int getProjectId(String projectName) throws IOException, ElementNotFoundException {
Project[] projects = getProjects();
for(int i = 0; i < projects.length; i++) {
if (projects[i].getName().equals(projectName)) {
return projects[i].getId();
}
}
throw new ElementNotFoundException(projectName);
}
public Suite[] getSuits(int projectId) throws IOException, ElementNotFoundException {
String body = httpGet("/index.php?/api/v2/get_suites/" + projectId).getBody();
JSONArray json;
try {
json = new JSONArray(body);
} catch (JSONException e) {
return new Suite[0];
}
Suite[] suites = new Suite[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Suite s = new Suite();
s.setName(o.getString("name"));
s.setId(o.getInt("id"));
suites[i] = s;
}
return suites;
}
public String getCasesString(int projectId, int suiteId) {
String result = "index.php?/api/v2/get_cases/" + projectId + "&suite_id=" + suiteId;
return result;
}
public Case[] getCases(int projectId, int suiteId) throws IOException, ElementNotFoundException {
// "/#{project_id}&suite_id=#{suite_id}#{section_string}"
String body = httpGet("index.php?/api/v2/get_cases/" + projectId + "&suite_id=" + suiteId).getBody();
JSONArray json = new JSONArray(body);
Case[] cases = new Case[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
cases[i] = createCaseFromJson(o);
}
return cases;
}
public Section[] getSections(int projectId, int suiteId) throws IOException, ElementNotFoundException {
String body = httpGet("index.php?/api/v2/get_sections/" + projectId + "&suite_id=" + suiteId).getBody();
JSONArray json = new JSONArray(body);
Section[] sects = new Section[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
sects[i] = createSectionFromJSON(o);
}
return sects;
}
private Section createSectionFromJSON(JSONObject o) {
Section s = new Section();
s.setName(o.getString("name"));
s.setId(o.getInt("id"));
if (!o.isNull("parent_id")) {
s.setParentId(String.valueOf(o.getInt("parent_id")));
} else {
s.setParentId("null");
}
s.setSuiteId(o.getInt("suite_id"));
return s;
}
public Section addSection(String sectionName, int projectId, int suiteId, String parentId) throws IOException, ElementNotFoundException {
Section section = new Section();
String payload = new JSONObject().put("name", sectionName).put("suite_id", suiteId).put("parent_id", parentId).toString();
String body = httpPost("index.php?/api/v2/add_section/" + projectId , payload).getBody();
JSONObject o = new JSONObject(body);
return createSectionFromJSON(o);
}
private Case createCaseFromJson(JSONObject o) {
Case s = new Case();
s.setTitle(o.getString("title"));
s.setId(o.getInt("id"));
s.setSectionId(o.getInt("section_id"));
s.setRefs(o.optString("refs"));
return s;
}
public Case addCase(Testcase caseToAdd, int sectionId) throws IOException {
JSONObject payload = new JSONObject().put("title", caseToAdd.getName());
if (!StringUtils.isEmpty(caseToAdd.getRefs())) {
payload.put("refs", caseToAdd.getRefs());
}
String body = httpPost("index.php?/api/v2/add_case/" + sectionId, payload.toString()).getBody();
Case c = createCaseFromJson(new JSONObject(body));
return c;
}
public TestRailResponse addResultsForCases(int runId, Results results) throws IOException {
JSONArray a = new JSONArray();
for (int i = 0; i < results.getResults().size(); i++) {
JSONObject o = new JSONObject();
Result r = results.getResults().get(i);
o.put("case_id", r.getCaseId()).put("status_id", r.getStatusId()).put("comment", r.getComment()).put("elapsed", r.getElapsedTimeString());
a.put(o);
}
String payload = new JSONObject().put("results", a).toString();
log(payload);
TestRailResponse response = httpPost("index.php?/api/v2/add_results_for_cases/" + runId, payload);
return response;
}
public int addRun(int projectId, int suiteId, String milestoneID, String description)
throws IOException {
String payload = new JSONObject().put("suite_id", suiteId).put("description", description).put("milestone_id", milestoneID).toString();
String body = httpPost("index.php?/api/v2/add_run/" + projectId, payload).getBody();
return new JSONObject(body).getInt("id");
}
public Milestone[] getMilestones(int projectId) throws IOException, ElementNotFoundException {
String body = httpGet("index.php?/api/v2/get_milestones/" + projectId).getBody();
JSONArray json;
try {
json = new JSONArray(body);
} catch (JSONException e) {
return new Milestone[0];
}
Milestone[] suites = new Milestone[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Milestone s = new Milestone();
s.setName(o.getString("name"));
s.setId(String.valueOf(o.getInt("id")));
suites[i] = s;
}
return suites;
}
public String getMilestoneID(String milesoneName, int projectId) throws IOException, ElementNotFoundException {
for (Milestone mstone: getMilestones(projectId)) {
if (mstone.getName().equals(milesoneName)) {
return mstone.getId();
}
}
throw new ElementNotFoundException("Milestone id not found.");
}
public boolean closeRun(int runId)
throws IOException {
String payload = "";
int status = httpPost("index.php?/api/v2/close_run/" + runId, payload).getStatus();
return (200 == status);
}
}
|
src/main/java/testrail/testrail/TestRailClient.java
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package testrail.testrail;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import testrail.testrail.JunitResults.Testcase;
import testrail.testrail.TestRailObjects.*;
import org.apache.commons.lang.StringUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import javax.xml.ws.http.HTTPException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.InterruptedException;
import java.util.List;
import static testrail.testrail.Utils.*;
/**
* Created by Drew on 3/19/14.
*/
public class TestRailClient {
private String host;
private String user;
private String password;
public void setHost(String host) { this.host = host; }
public void setUser(String user) { this.user = user; }
public void setPassword(String password) {this.password = password; }
public String getHost() { return this.host; }
public String getUser() { return this.user; }
public String getPassword() { return this.password; }
public TestRailClient(String host, String user, String password) {
this.host = host;
this.user = user;
this.password = password;
}
private HttpClient setUpHttpClient(HttpMethod method) {
HttpClient httpclient = new HttpClient();
httpclient.getParams().setAuthenticationPreemptive(true);
httpclient.getState().setCredentials(
AuthScope.ANY,
new UsernamePasswordCredentials(this.user, this.password)
);
method.setDoAuthentication(true);
method.addRequestHeader("Content-Type", "application/json");
return httpclient;
}
private TestRailResponse httpGet(String path) throws IOException {
TestRailResponse response;
do {
response = httpGetInt(path);
if (response.getStatus() == 429) {
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
log(e.toString());
}
}
} while (response.getStatus() == 429);
return response;
}
private TestRailResponse httpGetInt(String path) throws IOException {
TestRailResponse result;
GetMethod get = new GetMethod(host + "/" + path);
HttpClient httpclient = setUpHttpClient(get);
try {
Integer status = httpclient.executeMethod(get);
String body = new String(get.getResponseBody(), get.getResponseCharSet());
result = new TestRailResponse(status, body);
} finally {
get.releaseConnection();
}
return result;
}
private TestRailResponse httpPost(String path, String payload)
throws UnsupportedEncodingException, IOException, HTTPException {
TestRailResponse response;
do {
response = httpPostInt(path, payload);
if (response.getStatus() == 429) {
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
log(e.toString());
}
}
} while (response.getStatus() == 429);
return response;
}
private TestRailResponse httpPostInt(String path, String payload)
throws UnsupportedEncodingException, IOException, HTTPException {
TestRailResponse result;
PostMethod post = new PostMethod(host + "/" + path);
HttpClient httpclient = setUpHttpClient(post);
try {
StringRequestEntity requestEntity = new StringRequestEntity(
payload,
"application/json",
"UTF-8"
);
post.setRequestEntity(requestEntity);
Integer status = httpclient.executeMethod(post);
String body = new String(post.getResponseBody(), post.getResponseCharSet());
result = new TestRailResponse(status, body);
} finally {
post.releaseConnection();
}
return result;
}
public boolean serverReachable() throws IOException {
boolean result = false;
HttpClient httpclient = new HttpClient();
GetMethod get = new GetMethod(host);
try {
httpclient.executeMethod(get);
result = true;
} catch (java.net.UnknownHostException e) {
// nop - we default to result == false
} finally {
get.releaseConnection();
}
return result;
}
public boolean authenticationWorks() throws IOException {
TestRailResponse response = httpGet("/index.php?/api/v2/get_projects");
return (200 == response.getStatus());
}
public Project[] getProjects() throws IOException, ElementNotFoundException {
String body = httpGet("/index.php?/api/v2/get_projects").getBody();
JSONArray json = new JSONArray(body);
Project[] projects = new Project[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Project p = new Project();
p.setName(o.getString("name"));
p.setId(o.getInt("id"));
projects[i] = p;
}
return projects;
}
public int getProjectId(String projectName) throws IOException, ElementNotFoundException {
Project[] projects = getProjects();
for(int i = 0; i < projects.length; i++) {
if (projects[i].getName().equals(projectName)) {
return projects[i].getId();
}
}
throw new ElementNotFoundException(projectName);
}
public Suite[] getSuits(int projectId) throws IOException, ElementNotFoundException {
String body = httpGet("/index.php?/api/v2/get_suites/" + projectId).getBody();
JSONArray json;
try {
json = new JSONArray(body);
} catch (JSONException e) {
return new Suite[0];
}
Suite[] suites = new Suite[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Suite s = new Suite();
s.setName(o.getString("name"));
s.setId(o.getInt("id"));
suites[i] = s;
}
return suites;
}
public String getCasesString(int projectId, int suiteId) {
String result = "index.php?/api/v2/get_cases/" + projectId + "&suite_id=" + suiteId;
return result;
}
public Case[] getCases(int projectId, int suiteId) throws IOException, ElementNotFoundException {
// "/#{project_id}&suite_id=#{suite_id}#{section_string}"
String body = httpGet("index.php?/api/v2/get_cases/" + projectId + "&suite_id=" + suiteId).getBody();
JSONArray json = new JSONArray(body);
Case[] cases = new Case[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
cases[i] = createCaseFromJson(o);
}
return cases;
}
public Section[] getSections(int projectId, int suiteId) throws IOException, ElementNotFoundException {
String body = httpGet("index.php?/api/v2/get_sections/" + projectId + "&suite_id=" + suiteId).getBody();
JSONArray json = new JSONArray(body);
Section[] sects = new Section[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
sects[i] = createSectionFromJSON(o);
}
return sects;
}
private Section createSectionFromJSON(JSONObject o) {
Section s = new Section();
s.setName(o.getString("name"));
s.setId(o.getInt("id"));
if (!o.isNull("parent_id")) {
s.setParentId(String.valueOf(o.getInt("parent_id")));
} else {
s.setParentId("null");
}
s.setSuiteId(o.getInt("suite_id"));
return s;
}
public Section addSection(String sectionName, int projectId, int suiteId, String parentId) throws IOException, ElementNotFoundException {
Section section = new Section();
String payload = new JSONObject().put("name", sectionName).put("suite_id", suiteId).put("parent_id", parentId).toString();
String body = httpPost("index.php?/api/v2/add_section/" + projectId , payload).getBody();
JSONObject o = new JSONObject(body);
return createSectionFromJSON(o);
}
private Case createCaseFromJson(JSONObject o) {
Case s = new Case();
s.setTitle(o.getString("title"));
s.setId(o.getInt("id"));
s.setSectionId(o.getInt("section_id"));
return s;
}
public Case addCase(Testcase caseToAdd, int sectionId) throws IOException {
JSONObject payload = new JSONObject().put("title", caseToAdd.getName());
if (!StringUtils.isEmpty(caseToAdd.getRefs())) {
payload.put("refs", caseToAdd.getRefs());
}
String body = httpPost("index.php?/api/v2/add_case/" + sectionId, payload.toString()).getBody();
Case c = createCaseFromJson(new JSONObject(body));
return c;
}
public TestRailResponse addResultsForCases(int runId, Results results) throws IOException {
JSONArray a = new JSONArray();
for (int i = 0; i < results.getResults().size(); i++) {
JSONObject o = new JSONObject();
Result r = results.getResults().get(i);
o.put("case_id", r.getCaseId()).put("status_id", r.getStatusId()).put("comment", r.getComment()).put("elapsed", r.getElapsedTimeString());
a.put(o);
}
String payload = new JSONObject().put("results", a).toString();
log(payload);
TestRailResponse response = httpPost("index.php?/api/v2/add_results_for_cases/" + runId, payload);
return response;
}
public int addRun(int projectId, int suiteId, String milestoneID, String description)
throws IOException {
String payload = new JSONObject().put("suite_id", suiteId).put("description", description).put("milestone_id", milestoneID).toString();
String body = httpPost("index.php?/api/v2/add_run/" + projectId, payload).getBody();
return new JSONObject(body).getInt("id");
}
public Milestone[] getMilestones(int projectId) throws IOException, ElementNotFoundException {
String body = httpGet("index.php?/api/v2/get_milestones/" + projectId).getBody();
JSONArray json;
try {
json = new JSONArray(body);
} catch (JSONException e) {
return new Milestone[0];
}
Milestone[] suites = new Milestone[json.length()];
for (int i = 0; i < json.length(); i++) {
JSONObject o = json.getJSONObject(i);
Milestone s = new Milestone();
s.setName(o.getString("name"));
s.setId(String.valueOf(o.getInt("id")));
suites[i] = s;
}
return suites;
}
public String getMilestoneID(String milesoneName, int projectId) throws IOException, ElementNotFoundException {
for (Milestone mstone: getMilestones(projectId)) {
if (mstone.getName().equals(milesoneName)) {
return mstone.getId();
}
}
throw new ElementNotFoundException("Milestone id not found.");
}
public boolean closeRun(int runId)
throws IOException {
String payload = "";
int status = httpPost("index.php?/api/v2/close_run/" + runId, payload).getStatus();
return (200 == status);
}
}
|
added support for refs field - reading refs from Testrail
|
src/main/java/testrail/testrail/TestRailClient.java
|
added support for refs field - reading refs from Testrail
|
|
Java
|
apache-2.0
|
2607819d2e948824487f83ee9350af2d63c31fc3
| 0
|
toby1984/jASM_16,toby1984/jASM_16
|
/**
* Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codesourcery.jasm16.ast;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import de.codesourcery.jasm16.compiler.GenericCompilationError;
import de.codesourcery.jasm16.compiler.ICompilationError;
import de.codesourcery.jasm16.compiler.ICompilationUnit;
import de.codesourcery.jasm16.exceptions.EOFException;
import de.codesourcery.jasm16.exceptions.ParseException;
import de.codesourcery.jasm16.lexer.ILexer;
import de.codesourcery.jasm16.lexer.IToken;
import de.codesourcery.jasm16.lexer.TokenType;
import de.codesourcery.jasm16.parser.IParseContext;
import de.codesourcery.jasm16.parser.IParser.ParserOption;
import de.codesourcery.jasm16.scanner.IScanner;
import de.codesourcery.jasm16.utils.ITextRegion;
import de.codesourcery.jasm16.utils.TextRegion;
/**
* Abstract base-class of all AST nodes.
*
* <p>AST nodes are created for one or more tokens in the input stream.</p>
* <p>Each AST node keeps track of the source code location ({@link ITextRegion} it
* was created from so editors etc. have an easy time associating source code
* with the AST.</p>
*
* <p>Keeping track of the source code locations is slightly complicated
* because not all tokens (e.g. whitespace,EOL) become part of the AST, so this
* class actually uses two {@link ITextRegion} fields to keep track of the
* source code range covered by the AST node (or it's children) and
* the input that was actually traversed while this subtree was constructed.</p>.
*
* <p>Make sure you understand how {@link #getTextRegion()} , {@link #recalculateTextRegion(boolean)},
* {@link #setTextRegionIncludingAllTokens(ITextRegion)} and {@link #mergeWithAllTokensTextRegion(ASTNode)}
* work.</p>
*
* <p>This class also implements the parse error recovery mechanism, check out {@link #parse(IParseContext)} to
* see how it actually works.</p>
*
* @author tobias.gierke@code-sourcery.de
*/
public abstract class ASTNode
{
private static final Logger LOG = Logger.getLogger(ASTNode.class);
/**
* Default tokens to look for when trying to recover from
* a parse error.
*
* @see IParseContext#setErrorRecoveryTokenTypes(TokenType[])
* @see ILexer#advanceTo(TokenType, boolean)
*/
public static final TokenType[] DEFAULT_ERROR_RECOVERY_TOKEN = new TokenType[]{TokenType.EOL, TokenType.SINGLE_LINE_COMMENT};
private ASTNode parent;
private final List<ASTNode> children = new ArrayList<ASTNode>();
/**
* This text range covers <b>all</b> tokens that were consumed while
* parsing this node (including whitespace, EOL etc.).
*/
private ITextRegion textRegionIncludingAllTokens;
/**
* Cached value of {@link #textRegionIncludingAllTokens} plus {@link #getTextRegion()} of all
* child nodes.
*/
private ITextRegion actualTextRegion;
/**
* Creates a new instance.
*
* <p>This instance will have no parent
* and {@link #textRegionIncludingAllTokens} and {@link #actualTextRegion}
* will be <code>null</code>.</p>
*/
public ASTNode() {
}
/**
* Creates a new AST node for a given source code location.
*
* @param allTokensRegion text range covered by <b>this</b> node, never <code>null</code>
*/
protected ASTNode(ITextRegion allTokensRegion)
{
if (allTokensRegion == null) {
throw new IllegalArgumentException("allTokensRegion must not be NULL.");
}
this.textRegionIncludingAllTokens = new TextRegion( allTokensRegion );
}
/**
* Returns the actual source code region covered by
* this AST node (and it's child nodes).
*
* <p>Due to the way parsing works (recursive descent...), an AST node always
* covers at least the text range that is covered by it's child nodes.</p>
*
* <p>
* The actual source code region covered by <b>this</b> node is composed
* of the actual source code regions of all child nodes (=invoking
* {@link #getTextRegion()} on each child) <b>PLUS</b>
* this node's <i>'all-tokens' text range</i> (see .
* </p>
*
* @return
* @see #mergeTextRegion(ITextRegion)
* @see #mergeWithAllTokensTextRegion(ASTNode)
*/
public final ITextRegion getTextRegion()
{
if ( actualTextRegion == null )
{
recalculateTextRegion(true);
return actualTextRegion;
}
return actualTextRegion;
}
/**
* Merges the actual source code region covered by a node with
* this node's {@link #textRegionIncludingAllTokens}.
*
* <p>This method is used during expression folding/evaluation to
* preserve the text range covered by an AST node when an AST node
* gets replaced with a newly created node representing calculated value.
* </p>
* <p>Not using this method during expression folding will cause the location
* of all tokens that do not resemble actual AST nodes (read: almost all tokens)
* to be permanently lost.</p>
*
* @param node
* @see ITextRegion#merge(ITextRegion)
*/
protected final void mergeWithAllTokensTextRegion(ASTNode node)
{
mergeWithAllTokensTextRegion( node.getTextRegion() );
}
/**
* Merges the source code region with this node's {@link #textRegionIncludingAllTokens}.
*
* <p>This method is used during expression folding/evaluation to
* preserve the text range covered by an AST node when an AST node
* gets replaced with a newly created node representing calculated value.
* </p>
* <p>Not using this method during expression folding will cause the location
* of all tokens that do not resemble actual AST nodes (read: almost all tokens)
* to be permanently lost.</p>
*
* @param range
* @see ITextRegion#merge(ITextRegion)
*/
protected final void mergeWithAllTokensTextRegion(ITextRegion range)
{
if ( this.textRegionIncludingAllTokens == null && this.actualTextRegion != null ) {
this.textRegionIncludingAllTokens = this.actualTextRegion;
}
if ( this.textRegionIncludingAllTokens == null )
{
this.textRegionIncludingAllTokens = new TextRegion( range );
} else {
this.textRegionIncludingAllTokens.merge( range );
}
if ( this.actualTextRegion != null ) {
this.actualTextRegion = null;
if ( getParent() != null ) { // maybe a parent node already called getTextRegion() on this child...
getParent().recalculateTextRegion(true);
}
}
}
protected final void mergeTextRegion(ITextRegion range)
{
final int oldValue = TextRegion.hashCode( this.actualTextRegion );
if ( this.actualTextRegion == null && textRegionIncludingAllTokens != null)
{
this.actualTextRegion = new TextRegion( textRegionIncludingAllTokens );
}
if ( this.actualTextRegion != null ) {
this.actualTextRegion.merge( range );
} else {
this.actualTextRegion = new TextRegion( range );
}
if ( oldValue != TextRegion.hashCode( this.actualTextRegion ) && getParent() != null ) {
getParent().mergeTextRegion( this.actualTextRegion );
}
}
protected void setTextRegionIncludingAllTokens( ITextRegion textRegion )
{
this.textRegionIncludingAllTokens = new TextRegion( textRegion );
this.actualTextRegion = null;
recalculateTextRegion(true);
}
private void recalculateTextRegion(boolean recalculateParents)
{
final int oldValue = TextRegion.hashCode( this.actualTextRegion );
ITextRegion range = textRegionIncludingAllTokens != null ? new TextRegion( textRegionIncludingAllTokens ) : null;
for ( ASTNode child : this.children)
{
if ( range == null ) {
range = new TextRegion( child.getTextRegion() );
} else {
range.merge( child.getTextRegion() );
}
}
this.actualTextRegion = range;
if ( recalculateParents &&
oldValue != TextRegion.hashCode( this.actualTextRegion ) &&
getParent() != null )
{
getParent().recalculateTextRegion(true);
}
}
/**
* Creates a copy of this AST node (and optionally all it's children recursively).
*
* @param shallow whether to only copy this node or also recursively clone
* all child nodes as well.
*
* @return
*/
public ASTNode createCopy(boolean shallow) {
ASTNode result = copySingleNode();
if ( actualTextRegion != null ) {
result.actualTextRegion = new TextRegion( actualTextRegion );
}
if ( textRegionIncludingAllTokens != null ) {
result.textRegionIncludingAllTokens = new TextRegion( textRegionIncludingAllTokens );
}
if ( ! shallow ) {
for ( ASTNode child : children ) {
final ASTNode copy = child.createCopy( shallow );
result.addChild( copy , null );
}
}
return result;
}
/**
* Returns an <b>independent</b> copy of this node <b>without</b>
* any of it's children.
*
* @return
*/
public abstract ASTNode copySingleNode();
/**
* Check this AST node or any of it's child nodes is of class
* {@link UnparsedContentNode}.
*
* @return
*/
public final boolean hasErrors()
{
final boolean[] hasErrors = new boolean[] { false };
final ISimpleASTNodeVisitor<UnparsedContentNode> visitor = new ISimpleASTNodeVisitor<UnparsedContentNode>() {
@Override
public boolean visit(UnparsedContentNode node)
{
hasErrors[0] = true;
return false;
}
};
ASTUtils.visitNodesByType( this , visitor , UnparsedContentNode.class );
return hasErrors[0];
}
/**
* Swap a direct child of this node with some other node.
*
* @param childToSwap
* @param otherNode
*/
public final void swapChild( ASTNode childToSwap, ASTNode otherNode) {
if ( childToSwap == null ) {
throw new IllegalArgumentException("childToSwap must not be NULL");
}
if ( otherNode == null ) {
throw new IllegalArgumentException("otherNode must not be NULL");
}
assertSupportsChildNodes();
final int idx = children.indexOf( childToSwap );
if ( idx == -1 )
{
throw new IllegalArgumentException("Node "+childToSwap+" is not a child of "+this);
}
final ASTNode otherParent = otherNode.getParent();
if ( otherParent == null ) {
throw new IllegalArgumentException("Node "+otherNode+" has no parent?");
}
final int otherIdx = otherParent.indexOf( otherNode );
setChild( idx , otherNode );
otherParent.setChild( otherIdx , childToSwap );
recalculateTextRegion(true);
otherParent.recalculateTextRegion( true );
}
/**
* Returns the index of a direct child.
*
* @param node
* @return
*/
public final int indexOf(ASTNode node) {
return children.indexOf( node );
}
/**
* Inserts a new child node at a specific position.
*
* @param index
* @param newChild
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
* @return
*/
public final ASTNode insertChild(int index,ASTNode newChild,IParseContext context)
{
try {
return addChild( index , newChild );
} finally {
if ( context != null ) {
context.setRecoveringFromParseError( false );
}
}
}
/**
* Replaces a child node at a specific position.
*
* @param index
* @param newChild
*/
public final void setChild( int index, ASTNode newChild)
{
if ( newChild == null ) {
throw new IllegalArgumentException("newChild must not be NULL");
}
if ( index < 0 || index >= children.size() ) {
throw new IndexOutOfBoundsException("Invalid index "+index+" ( must be >= 0 and < "+children.size()+")");
}
assertSupportsChildNodes();
children.set( index , newChild );
newChild.setParent( this );
}
/**
* Returns the Nth child.
*
* @param index
* @return
* @throws IndexOutOfBoundsException if the index is either less than zero or larger than {@link #getChildCount()} -1
*/
public final ASTNode child(int index)
{
if ( index < 0 || index >= children.size() ) {
throw new IndexOutOfBoundsException("Invalid index "+index+" , node "+this+
" has only "+children.size()+" children");
}
return children.get( index );
}
/**
* Adds child nodes to this node.
*
* @param nodes
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
*/
public final void addChildren(Collection<? extends ASTNode> nodes,IParseContext context)
{
if (nodes == null) {
throw new IllegalArgumentException("node must not be NULL.");
}
boolean recovering = context == null ? false : context.isRecoveringFromParseError();
try {
for ( ASTNode node : nodes)
{
if ( recovering && !(node instanceof UnparsedContentNode) ) {
recovering = false;
}
addChild( 0 , node );
}
} finally {
if ( context != null ) {
context.setRecoveringFromParseError( recovering );
}
}
}
/**
* Returns whether this AST node supports having child nodes.
*
* <p>If a node does not support having child nodes, calling
* and of the methods that add/change child nodes will trigger
* an {@link UnsupportedOperationException}.
*
* @return
*/
public abstract boolean supportsChildNodes();
/**
* Add a child node.
*
* @param node
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
* @return
*/
public final ASTNode addChild(ASTNode node,IParseContext context)
{
try {
return addChild( children.size() , node );
} finally {
if ( context != null && context.isRecoveringFromParseError() && !(node instanceof UnparsedContentNode) ) {
context.setRecoveringFromParseError( false );
}
}
}
private final ASTNode addChild(int index , ASTNode node)
{
if (node == null) {
throw new IllegalArgumentException("node must not be NULL.");
}
// all nodes must accept this
if ( !(node instanceof UnparsedContentNode ) ) {
assertSupportsChildNodes();
}
if ( index == children.size() ) {
this.children.add( node );
} else if ( index < 0 ) {
throw new IndexOutOfBoundsException("Invalid child index "+index);
} else if ( index < children.size() ) {
this.children.add( index , node );
} else {
throw new IndexOutOfBoundsException("Child index "+index+" is out of range, node "+this+" only has "+getChildCount()+" children.");
}
if ( node.textRegionIncludingAllTokens != null )
{
if ( this.textRegionIncludingAllTokens == null ) {
this.textRegionIncludingAllTokens = new TextRegion( node.textRegionIncludingAllTokens );
} else {
this.textRegionIncludingAllTokens.merge( node.textRegionIncludingAllTokens );
}
}
mergeTextRegion( node.getTextRegion() );
node.setParent( this );
return node;
}
protected final void assertSupportsChildNodes()
{
if ( ! supportsChildNodes() ) {
throw new UnsupportedOperationException("Cannot add children to node "+this+" that does not support child nodes");
}
}
/**
* Returns the number of direct children this node has.
*
* @return
*/
public final int getChildCount() {
return children.size();
}
/**
* Returns whether this node has any children.
*
* @return
*/
public final boolean hasChildren() {
return ! children.isEmpty();
}
/**
* Returns the child nodes of this node.
*
* @return
*/
public final List<ASTNode> getChildren()
{
return new ArrayList<ASTNode>( children );
}
private final void setParent(ASTNode parent)
{
this.parent = parent;
}
/**
* Returns the parent node of this node.
*
* @return parent or <code>null</code> if this node has no parent
*/
public final ASTNode getParent()
{
return parent;
}
/**
* Parse source code (recursive decent parsing).
*
* <p>
* This method delegates to {@link #parseInternal(IParseContext)} and
* takes care of handling any <code>Exceptions</code> thrown by this
* method appropriately.
* </p>
* <p>
* The idiom used to continue parsing after encountering a parse error
* is (see below for a detailed explanation):
* <pre>
* try {
* context.mark();
* // setErrorRecoveryTokens( TOKENS );
* addChild( new SomeASTNode().parseInternal( context );
* } catch(Exception e) {
* addCompilationErrorAndAdvanceParser( e , context );
* } finally {
* context.clearMark();
* // setErrorRecoveryTokens( DEFAULT_TOKENS );
* }
* // continue here regardless of parse error
* </pre>
* </p>
* <p>
* <h3>Parse error recovery</h3>
* </p>
* <p>Parse error recovery is tricky because it involves several different
* parts of the application to interact correctly.</p>
* <p><h4>Part 1 - The scanner</h4></p>
* <p>The {@link IScanner} provides random access to the input stream using the {@link IScanner#setCurrentParseIndex(int)} method.</p>
* <p><h4>Part 2 - The lexer</h4></p>
* <p>The {@link ILexer} internally manages a stack of state information (current line number, line starting offset, current parse index,parsed tokens).
* This state can be remembered/recalled using the {@link ILexer#mark()} , {@link ILexer#reset()} methods. The {@link ILexer#clearMark()} method
* removes the last remembered state from the internal stack.</p>
* <p><h4>Part 3 - {@link #parse(IParseContext)}</h4></p>
* <p>Upon entry, this method remembers the lexer's state by calling {@link ILexer#mark()}. It then invokes {@link #parseInternal(IParseContext)} inside
* a <code>try/finally</code> block. If the <code>parseInternal()</code> method fails with an exception, {@link #addCompilationErrorAndAdvanceParser(Exception, IParseContext)} is invoked.
* The <code>finally</code> block of this method calls {@link ILexer#clearMark()} to remove the no longer needed lexer state information from the lexer's internal stack
* and ensures that even if we saw a {@link OutOfMemoryError}, the lexer's internal stack does not grow infinitely.</p>
* <p><h4>Part 4 - {@link #addCompilationErrorAndAdvanceParser(Exception, IParseContext)}</h4></p>
* <p>This method first invokes {@link ILexer#reset()} to reset the lexer to the state it was when {@link #parse(IParseContext)} got called. It then
* uses {@link ILexer#advanceTo(TokenType[], boolean)} to advance until a suitable token (see {@link #getParseRecoveryTokenTypes()} is found.
* </p>
* <p>All tokens skipped during advancing will combined into a {@link UnparsedContentNode} that is attached to <b>this</b> node.</p>
* <p>If the parse context is <b>not</b> in recovery mode yet (see {@link IParseContext#isRecoveringFromParseError()} , an {@link ICompilationError} will be
* added to the current compilation unit using {@link ICompilationUnit#addMarker(de.codesourcery.jasm16.compiler.IMarker)} and
* the context will switch to error recovery mode by invoking {@link IParseContext#setRecoveringFromParseError(boolean)}}.</p>
* <p>If the parse context was already in recovery mode when {@link #onError(Exception, IParseContext)} got invoked, <b>no</b> compilation error will
* be added to the current compilation unit since we obviously haven't recovered from the last error yet.</p>
* <p><h4>Part 5 - {@link ASTNode#addChild(ASTNode, IParseContext)} and friends</h4></p>
* All {@link ASTNode} methods that actually add one or more new child nodes to a node will reset the parse' contexts error recovery flag when
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode}.</p>
*
* @param context
* @return AST node parsed from the current parse position. Usually that
* will be an instance of the class this method was invoked on but in case
* of compilation errors this method may just return an {@link UnparsedContentNode}
* so be <b>careful</b> when assuming the actual type returned by this method.
*/
public final ASTNode parse(IParseContext context)
{
context.mark();
try {
ASTNode result = parseInternal( context );
return result;
}
catch(Exception e)
{
return addCompilationErrorAndAdvanceParser( e , context );
} finally {
context.clearMark();
}
}
private final ICompilationError wrapException(Exception e, IParseContext context)
{
final int errorOffset;
final ITextRegion errorRange;
if ( e instanceof ParseException )
{
errorRange = ((ParseException) e).getTextRegion();
errorOffset = errorRange.getStartingOffset();
} else if ( e instanceof ICompilationError) {
errorOffset = ((ICompilationError) e).getErrorOffset();
errorRange = ((ICompilationError) e).getLocation();
} else if ( e instanceof java.text.ParseException ) {
errorOffset = ((java.text.ParseException) e).getErrorOffset();
errorRange = new TextRegion(errorOffset,0);
} else if ( e instanceof EOFException) {
errorOffset = ((EOFException) e).getErrorOffset();
errorRange = new TextRegion(errorOffset,0);
} else {
errorOffset = context.currentParseIndex();
errorRange = new TextRegion(errorOffset,0);
}
final ICompilationError result;
if ( e instanceof ICompilationError)
{
result = (ICompilationError) e;
} else
{
String msg=e.getMessage();
if ( StringUtils.isBlank( msg ) ) {
msg = "< no error message >";
}
result = new GenericCompilationError( msg , context.getCompilationUnit() , e );
result.setErrorOffset( errorOffset );
result.setLocation( errorRange );
}
if ( result.getErrorOffset() != -1 )
{
if ( result.getLineNumber() == -1 || result.getColumnNumber() == -1 || result.getLineStartOffset() == -1 )
{
if ( result.getLineStartOffset() == -1 ) {
result.setLineStartOffset( context.getCurrentLineStartOffset() );
}
if (result.getLineNumber() == -1) {
result.setLineNumber( context.getCurrentLineNumber() );
}
if (result.getColumnNumber() == -1 )
{
int column = result.getErrorOffset() - context.getCurrentLineStartOffset()+1; // columns start at 1
if ( column > 0 ) {
result.setColumnNumber( column );
} else {
LOG.warn("wrapException(): Error offset "+result.getErrorOffset()+" is not on current line "+context.getCurrentLineNumber()+", starting at "+
context.getCurrentLineStartOffset());
}
}
}
if ( result.getLocation() == null && errorRange != null ) {
result.setLocation( errorRange );
}
}
return result;
}
/**
* Add compilation error and switch parser to recovery mode if it isn't already.
*
* See {@link #addCompilationErrorAndAdvanceParser(String, int, Exception, IParseContext)} for a description of how this method works.
*
* @param e
* @param context
* @return {@link UnparsedContentNode} that was added as a child to <b>this</b> node
*/
protected final ASTNode addCompilationErrorAndAdvanceParser(Exception e , IParseContext context)
{
return addCompilationErrorAndAdvanceParser( wrapException( e , context ) , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(Exception e , TokenType[] recoveryTokens, IParseContext context)
{
return addCompilationErrorAndAdvanceParser( wrapException( e , context ) , recoveryTokens , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(ICompilationError error, IParseContext context)
{
return addCompilationErrorAndAdvanceParser( error , DEFAULT_ERROR_RECOVERY_TOKEN , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(ICompilationError error, TokenType[] recoveryTokens, IParseContext context)
{
context.reset();
final List<IToken> tokens = context.advanceTo( recoveryTokens , false );
final UnparsedContentNode result = new UnparsedContentNode( error.getMessage() , error.getErrorOffset() , tokens );
if ( context.hasParserOption( ParserOption.DEBUG_MODE ) ) {
LOG.error("addCompilationErrorAndAdvanceParser(): [in_parse_error_recovery: "+context.isRecoveringFromParseError() +"] error="+error,error.getCause() );
}
addChild( result , context );
if ( ! context.isRecoveringFromParseError() )
{
context.getCompilationUnit().addMarker( error );
/*
* This flag will be reset when an ASTNode that is not a UnparsedContentNode is added to the AST
* (because this indicates that the parser (at least temporarily) was able to re-synchronize again.
*/
context.setRecoveringFromParseError( true );
}
return result;
}
/**
* Method to be implemented by subclasses, does the actual recursive-descent parsing.
*
* <p>
* Parse exceptions thrown by implementations will be attached to the
* current compilation unit as {@link ICompilationError} instances ; the
* parser will then switch to error recovery mode and advance to the next token that
* has one of the token types returned by {@link #getParseRecoveryTokenTypes()}.
* </p>
* <p>
* See {@link #parse(IParseContext)}} for a detailed description of the error recovery mechanism.
* </p>
* @param context
* @return
* @throws ParseException
*/
protected abstract ASTNode parseInternal(IParseContext context) throws ParseException;
@Override
public String toString()
{
final StringBuilder builder = new StringBuilder();
for (Iterator<ASTNode> it = children.iterator(); it.hasNext();) {
ASTNode child = it.next();
builder.append( child.toString() );
if ( it.hasNext() ) {
builder.append(" , ");
}
}
return getClass().getSimpleName()+" { "+builder.toString()+" }";
}
/**
* Replace a direct child of this node with another one.
*
* @param child
* @param newNode
*/
public final void replaceChild(ASTNode child , ASTNode newNode ) {
if ( child == null ) {
throw new IllegalArgumentException("child must not be NULL");
}
assertSupportsChildNodes();
final int idx = children.indexOf( child );
if ( idx == -1 ) {
throw new IllegalArgumentException("Node "+child+" is not a child of "+this);
}
setChild( idx , newNode );
recalculateTextRegion(true);
}
/**
* Returns the path to the root node.
*
* @return path to the root node, first element is the root node
* while the last element is THIS node.
*/
public final ASTNode[] getPathToRoot() {
List<ASTNode> path = new ArrayList<ASTNode>();
ASTNode current = this;
do {
path.add( current );
current = current.getParent();
} while( current != null );
Collections.reverse( path );
return path.toArray( new ASTNode[ path.size() ] );
}
/**
* Returns all AST nodes <b>below</b> this AST node
* that overlap with a specific {@link ITextRegion}.
*
* @param visible
* @return
*/
public final List<ASTNode> getNodesInRange(ITextRegion visible)
{
final List<ASTNode> result = new ArrayList<ASTNode>();
for ( ASTNode child : children )
{
if ( child.getTextRegion().overlaps( visible ) ) {
result.add( child );
}
}
return result;
}
/**
* Recursively discovers the AST node that starts closest
* to a given source code offset.
*
* @param offset
* @return source code node or <code>null</code> if neither
* this node nor any of it's children cover the given offset
*/
public final ASTNode getNodeInRange(int offset)
{
final ITextRegion region = getTextRegion();
if ( region == null || ! region.contains( offset ) )
{
return null;
}
ASTNode result = this;
for ( ASTNode child : children ) {
ASTNode tmp = child.getNodeInRange( offset );
if ( tmp != null )
{
final int delta1 = Math.abs( offset - result.getTextRegion().getStartingOffset() );
final int delta2 = Math.abs( offset - tmp.getTextRegion().getStartingOffset() );
if ( delta2 < delta1 || ! child.hasChildren() ) {
result = tmp;
}
}
}
return result;
}
}
|
src/main/java/de/codesourcery/jasm16/ast/ASTNode.java
|
/**
* Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codesourcery.jasm16.ast;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import de.codesourcery.jasm16.compiler.GenericCompilationError;
import de.codesourcery.jasm16.compiler.ICompilationError;
import de.codesourcery.jasm16.compiler.ICompilationUnit;
import de.codesourcery.jasm16.exceptions.EOFException;
import de.codesourcery.jasm16.exceptions.ParseException;
import de.codesourcery.jasm16.lexer.ILexer;
import de.codesourcery.jasm16.lexer.IToken;
import de.codesourcery.jasm16.lexer.TokenType;
import de.codesourcery.jasm16.parser.IParseContext;
import de.codesourcery.jasm16.parser.IParser.ParserOption;
import de.codesourcery.jasm16.scanner.IScanner;
import de.codesourcery.jasm16.utils.ITextRegion;
import de.codesourcery.jasm16.utils.TextRegion;
/**
* Abstract base-class of all AST nodes.
*
* <p>AST nodes are created for one or more tokens in the input stream.</p>
* <p>Each AST node keeps track of the source code location ({@link ITextRegion} it
* was created from so editors etc. have an easy time associating source code
* with the AST.</p>
*
* <p>Keeping track of the source code locations is slightly complicated
* because not all tokens (e.g. whitespace,EOL) become part of the AST, so this
* class actually uses two {@link ITextRegion} fields to keep track of the
* source code range covered by the AST node (or it's children) and
* the input that was actually traversed while this subtree was constructed.</p>.
*
* <p>Make sure you understand how {@link #getTextRegion()} , {@link #recalculateTextRegion(boolean)},
* {@link #setTextRegionIncludingAllTokens(ITextRegion)} and {@link #mergeWithAllTokensTextRegion(ASTNode)}
* work.</p>
*
* <p>This class also implements the parse error recovery mechanism, check out {@link #parse(IParseContext)} to
* see how it actually works.</p>
*
* @author tobias.gierke@code-sourcery.de
*/
public abstract class ASTNode
{
private static final Logger LOG = Logger.getLogger(ASTNode.class);
/**
* Default tokens to look for when trying to recover from
* a parse error.
*
* @see IParseContext#setErrorRecoveryTokenTypes(TokenType[])
* @see ILexer#advanceTo(TokenType, boolean)
*/
public static final TokenType[] DEFAULT_ERROR_RECOVERY_TOKEN = new TokenType[]{TokenType.EOL, TokenType.SINGLE_LINE_COMMENT};
private ASTNode parent;
private final List<ASTNode> children = new ArrayList<ASTNode>();
/**
* This text range covers <b>all</b> tokens that were consumed while
* parsing this node (including whitespace, EOL etc.).
*/
private ITextRegion textRegionIncludingAllTokens;
/**
* Cached value of {@link #textRegionIncludingAllTokens} plus {@link #getTextRegion()} of all
* child nodes.
*/
private ITextRegion actualTextRegion;
/**
* Creates a new instance.
*
* <p>This instance will have no parent
* and {@link #textRegionIncludingAllTokens} and {@link #actualTextRegion}
* will be <code>null</code>.</p>
*/
public ASTNode() {
}
/**
* Creates a new AST node for a given source code location.
*
* @param allTokensRegion text range covered by <b>this</b> node, never <code>null</code>
*/
protected ASTNode(ITextRegion allTokensRegion)
{
if (allTokensRegion == null) {
throw new IllegalArgumentException("allTokensRegion must not be NULL.");
}
this.textRegionIncludingAllTokens = new TextRegion( allTokensRegion );
}
/**
* Returns the actual source code region covered by
* this AST node (and it's child nodes).
*
* <p>Due to the way parsing works (recursive descent...), an AST node always
* covers at least the text range that is covered by it's child nodes.</p>
*
* <p>
* The actual source code region covered by <b>this</b> node is composed
* of the actual source code regions of all child nodes (=invoking
* {@link #getTextRegion()} on each child) <b>PLUS</b>
* this node's <i>'all-tokens' text range</i> (see .
* </p>
*
* @return
* @see #mergeTextRegion(ITextRegion)
* @see #mergeWithAllTokensTextRegion(ASTNode)
*/
public final ITextRegion getTextRegion()
{
if ( actualTextRegion == null )
{
recalculateTextRegion(true);
return actualTextRegion;
}
return actualTextRegion;
}
/**
* Merges the actual source code region covered by a node with
* this node's {@link #textRegionIncludingAllTokens}.
*
* <p>This method is used during expression folding/evaluation to
* preserve the text range covered by an AST node when an AST node
* gets replaced with a newly created node representing calculated value.
* </p>
* <p>Not using this method during expression folding will cause the location
* of all tokens that do not resemble actual AST nodes (read: almost all tokens)
* to be permanently lost.</p>
*
* @param node
* @see ITextRegion#merge(ITextRegion)
*/
protected final void mergeWithAllTokensTextRegion(ASTNode node)
{
mergeWithAllTokensTextRegion( node.getTextRegion() );
}
/**
* Merges the source code region with this node's {@link #textRegionIncludingAllTokens}.
*
* <p>This method is used during expression folding/evaluation to
* preserve the text range covered by an AST node when an AST node
* gets replaced with a newly created node representing calculated value.
* </p>
* <p>Not using this method during expression folding will cause the location
* of all tokens that do not resemble actual AST nodes (read: almost all tokens)
* to be permanently lost.</p>
*
* @param range
* @see ITextRegion#merge(ITextRegion)
*/
protected final void mergeWithAllTokensTextRegion(ITextRegion range)
{
if ( this.textRegionIncludingAllTokens == null && this.actualTextRegion != null ) {
this.textRegionIncludingAllTokens = this.actualTextRegion;
}
if ( this.textRegionIncludingAllTokens == null )
{
this.textRegionIncludingAllTokens = new TextRegion( range );
} else {
this.textRegionIncludingAllTokens.merge( range );
}
if ( this.actualTextRegion != null ) {
this.actualTextRegion = null;
if ( getParent() != null ) { // maybe a parent node already called getTextRegion() on this child...
getParent().recalculateTextRegion(true);
}
}
}
protected final void mergeTextRegion(ITextRegion range)
{
final int oldValue = TextRegion.hashCode( this.actualTextRegion );
if ( this.actualTextRegion == null && textRegionIncludingAllTokens != null)
{
this.actualTextRegion = new TextRegion( textRegionIncludingAllTokens );
}
if ( this.actualTextRegion != null ) {
this.actualTextRegion.merge( range );
} else {
this.actualTextRegion = new TextRegion( range );
}
if ( oldValue != TextRegion.hashCode( this.actualTextRegion ) && getParent() != null ) {
getParent().mergeTextRegion( this.actualTextRegion );
}
}
protected void setTextRegionIncludingAllTokens( ITextRegion textRegion )
{
this.textRegionIncludingAllTokens = new TextRegion( textRegion );
this.actualTextRegion = null;
recalculateTextRegion(true);
}
private void recalculateTextRegion(boolean recalculateParents)
{
final int oldValue = TextRegion.hashCode( this.actualTextRegion );
ITextRegion range = textRegionIncludingAllTokens != null ? new TextRegion( textRegionIncludingAllTokens ) : null;
for ( ASTNode child : this.children)
{
if ( range == null ) {
range = new TextRegion( child.getTextRegion() );
} else {
range.merge( child.getTextRegion() );
}
}
this.actualTextRegion = range;
if ( recalculateParents &&
oldValue != TextRegion.hashCode( this.actualTextRegion ) &&
getParent() != null )
{
getParent().recalculateTextRegion(true);
}
}
/**
* Creates a copy of this AST node (and optionally all it's children recursively).
*
* @param shallow whether to only copy this node or also recursively clone
* all child nodes as well.
*
* @return
*/
public ASTNode createCopy(boolean shallow) {
ASTNode result = copySingleNode();
if ( actualTextRegion != null ) {
result.actualTextRegion = new TextRegion( actualTextRegion );
}
if ( textRegionIncludingAllTokens != null ) {
result.textRegionIncludingAllTokens = new TextRegion( textRegionIncludingAllTokens );
}
if ( ! shallow ) {
for ( ASTNode child : children ) {
final ASTNode copy = child.createCopy( shallow );
result.addChild( copy , null );
}
}
return result;
}
/**
* Returns an <b>independent</b> copy of this node <b>without</b>
* any of it's children.
*
* @return
*/
public abstract ASTNode copySingleNode();
/**
* Check this AST node or any of it's child nodes is of class
* {@link UnparsedContentNode}.
*
* @return
*/
public final boolean hasErrors()
{
final boolean[] hasErrors = new boolean[] { false };
final ISimpleASTNodeVisitor<UnparsedContentNode> visitor = new ISimpleASTNodeVisitor<UnparsedContentNode>() {
@Override
public boolean visit(UnparsedContentNode node)
{
hasErrors[0] = true;
return false;
}
};
ASTUtils.visitNodesByType( this , visitor , UnparsedContentNode.class );
return hasErrors[0];
}
/**
* Swap a direct child of this node with some other node.
*
* @param childToSwap
* @param otherNode
*/
public final void swapChild( ASTNode childToSwap, ASTNode otherNode) {
if ( childToSwap == null ) {
throw new IllegalArgumentException("childToSwap must not be NULL");
}
if ( otherNode == null ) {
throw new IllegalArgumentException("otherNode must not be NULL");
}
assertSupportsChildNodes();
final int idx = children.indexOf( childToSwap );
if ( idx == -1 )
{
throw new IllegalArgumentException("Node "+childToSwap+" is not a child of "+this);
}
final ASTNode otherParent = otherNode.getParent();
if ( otherParent == null ) {
throw new IllegalArgumentException("Node "+otherNode+" has no parent?");
}
final int otherIdx = otherParent.indexOf( otherNode );
setChild( idx , otherNode );
otherParent.setChild( otherIdx , childToSwap );
recalculateTextRegion(true);
otherParent.recalculateTextRegion( true );
}
/**
* Returns the index of a direct child.
*
* @param node
* @return
*/
public final int indexOf(ASTNode node) {
return children.indexOf( node );
}
/**
* Inserts a new child node at a specific position.
*
* @param index
* @param newChild
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
* @return
*/
public final ASTNode insertChild(int index,ASTNode newChild,IParseContext context)
{
try {
return addChild( index , newChild );
} finally {
if ( context != null ) {
context.setRecoveringFromParseError( false );
}
}
}
/**
* Replaces a child node at a specific position.
*
* @param index
* @param newChild
*/
public final void setChild( int index, ASTNode newChild)
{
if ( newChild == null ) {
throw new IllegalArgumentException("newChild must not be NULL");
}
if ( index < 0 || index >= children.size() ) {
throw new IndexOutOfBoundsException("Invalid index "+index+" ( must be >= 0 and < "+children.size()+")");
}
assertSupportsChildNodes();
children.set( index , newChild );
newChild.setParent( this );
}
/**
* Returns the Nth child.
*
* @param index
* @return
* @throws IndexOutOfBoundsException if the index is either less than zero or larger than {@link #getChildCount()} -1
*/
public final ASTNode child(int index)
{
if ( index < 0 || index >= children.size() ) {
throw new IndexOutOfBoundsException("Invalid index "+index+" , node "+this+
" has only "+children.size()+" children");
}
return children.get( index );
}
/**
* Adds child nodes to this node.
*
* @param nodes
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
*/
public final void addChildren(Collection<? extends ASTNode> nodes,IParseContext context)
{
if (nodes == null) {
throw new IllegalArgumentException("node must not be NULL.");
}
boolean recovering = context == null ? false : context.isRecoveringFromParseError();
try {
for ( ASTNode node : nodes)
{
if ( recovering && !(node instanceof UnparsedContentNode) ) {
recovering = false;
}
addChild( 0 , node );
}
} finally {
if ( context != null ) {
context.setRecoveringFromParseError( recovering );
}
}
}
/**
* Returns whether this AST node supports having child nodes.
*
* <p>If a node does not support having child nodes, calling
* and of the methods that add/change child nodes will trigger
* an {@link UnsupportedOperationException}.
*
* @return
*/
public abstract boolean supportsChildNodes();
/**
* Add a child node.
*
* @param node
* @param context parse context or <code>null</code>. If the context is not <code>null</code> and
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode} , the parse
* contexts error recovery flag ({@link IParseContext#isRecoveringFromParseError()}) will be reset. See {@link ASTNode#parse(IParseContext)} for
* a detailed explanation on parser error recovery.
* @return
*/
public final ASTNode addChild(ASTNode node,IParseContext context)
{
try {
return addChild( children.size() , node );
} finally {
if ( context != null && context.isRecoveringFromParseError() && !(node instanceof UnparsedContentNode) ) {
context.setRecoveringFromParseError( false );
}
}
}
private final ASTNode addChild(int index , ASTNode node)
{
if (node == null) {
throw new IllegalArgumentException("node must not be NULL.");
}
// all nodes must accept this
if ( !(node instanceof UnparsedContentNode ) ) {
assertSupportsChildNodes();
}
if ( index == children.size() ) {
this.children.add( node );
} else if ( index < 0 ) {
throw new IndexOutOfBoundsException("Invalid child index "+index);
} else if ( index < children.size() ) {
this.children.add( index , node );
} else {
throw new IndexOutOfBoundsException("Child index "+index+" is out of range, node "+this+" only has "+getChildCount()+" children.");
}
if ( node.textRegionIncludingAllTokens != null )
{
if ( this.textRegionIncludingAllTokens == null ) {
this.textRegionIncludingAllTokens = new TextRegion( node.textRegionIncludingAllTokens );
} else {
this.textRegionIncludingAllTokens.merge( node.textRegionIncludingAllTokens );
}
}
mergeTextRegion( node.getTextRegion() );
node.setParent( this );
return node;
}
protected final void assertSupportsChildNodes()
{
if ( ! supportsChildNodes() ) {
throw new UnsupportedOperationException("Cannot add children to node "+this+" that does not support child nodes");
}
}
/**
* Returns the number of direct children this node has.
*
* @return
*/
public final int getChildCount() {
return children.size();
}
/**
* Returns whether this node has any children.
*
* @return
*/
public final boolean hasChildren() {
return ! children.isEmpty();
}
/**
* Returns the child nodes of this node.
*
* @return
*/
public final List<ASTNode> getChildren()
{
return new ArrayList<ASTNode>( children );
}
private final void setParent(ASTNode parent)
{
this.parent = parent;
}
/**
* Returns the parent node of this node.
*
* @return parent or <code>null</code> if this node has no parent
*/
public final ASTNode getParent()
{
return parent;
}
/**
* Parse source code (recursive decent parsing).
*
* <p>
* This method delegates to {@link #parseInternal(IParseContext)} and
* takes care of handling any <code>Exceptions</code> thrown by this
* method appropriately.
* </p>
* <p>
* The idiom used to continue parsing after encountering a parse error
* is (see below for a detailed explanation):
* <pre>
* try {
* context.mark();
* // setErrorRecoveryTokens( TOKENS );
* addChild( new SomeASTNode().parseInternal( context );
* } catch(Exception e) {
* addCompilationErrorAndAdvanceParser( e , context );
* } finally {
* context.clearMark();
* // setErrorRecoveryTokens( DEFAULT_TOKENS );
* }
* // continue here regardless of parse error
* </pre>
* </p>
* <p>
* <h3>Parse error recovery</h3>
* </p>
* <p>Parse error recovery is tricky because it involves several different
* parts of the application to interact correctly.</p>
* <p><h4>Part 1 - The scanner</h4></p>
* <p>The {@link IScanner} provides random access to the input stream using the {@link IScanner#setCurrentParseIndex(int)} method.</p>
* <p><h4>Part 2 - The lexer</h4></p>
* <p>The {@link ILexer} internally manages a stack of state information (current line number, line starting offset, current parse index,parsed tokens).
* This state can be remembered/recalled using the {@link ILexer#mark()} , {@link ILexer#reset()} methods. The {@link ILexer#clearMark()} method
* removes the last remembered state from the internal stack.</p>
* <p><h4>Part 3 - {@link #parse(IParseContext)}</h4></p>
* <p>Upon entry, this method remembers the lexer's state by calling {@link ILexer#mark()}. It then invokes {@link #parseInternal(IParseContext)} inside
* a <code>try/finally</code> block. If the <code>parseInternal()</code> method fails with an exception, {@link #addCompilationErrorAndAdvanceParser(Exception, IParseContext)} is invoked.
* The <code>finally</code> block of this method calls {@link ILexer#clearMark()} to remove the no longer needed lexer state information from the lexer's internal stack
* and ensures that even if we saw a {@link OutOfMemoryError}, the lexer's internal stack does not grow infinitely.</p>
* <p><h4>Part 4 - {@link #addCompilationErrorAndAdvanceParser(Exception, IParseContext)}</h4></p>
* <p>This method first invokes {@link ILexer#reset()} to reset the lexer to the state it was when {@link #parse(IParseContext)} got called. It then
* uses {@link ILexer#advanceTo(TokenType[], boolean)} to advance until a suitable token (see {@link #getParseRecoveryTokenTypes()} is found.
* </p>
* <p>All tokens skipped during advancing will combined into a {@link UnparsedContentNode} that is attached to <b>this</b> node.</p>
* <p>If the parse context is <b>not</b> in recovery mode yet (see {@link IParseContext#isRecoveringFromParseError()} , an {@link ICompilationError} will be
* added to the current compilation unit using {@link ICompilationUnit#addMarker(de.codesourcery.jasm16.compiler.IMarker)} and
* the context will switch to error recovery mode by invoking {@link IParseContext#setRecoveringFromParseError(boolean)}}.</p>
* <p>If the parse context was already in recovery mode when {@link #onError(Exception, IParseContext)} got invoked, <b>no</b> compilation error will
* be added to the current compilation unit since we obviously haven't recovered from the last error yet.</p>
* <p><h4>Part 5 - {@link ASTNode#addChild(ASTNode, IParseContext)} and friends</h4></p>
* All {@link ASTNode} methods that actually add one or more new child nodes to a node will reset the parse' contexts error recovery flag when
* the node being added is <b>not</b> an instance of {@link UnparsedContentNode}.</p>
*
* @param context
* @return AST node parsed from the current parse position. Usually that
* will be an instance of the class this method was invoked on but in case
* of compilation errors this method may just return an {@link UnparsedContentNode}
* so be <b>careful</b> when assuming the actual type returned by this method.
*/
public final ASTNode parse(IParseContext context)
{
context.mark();
try {
ASTNode result = parseInternal( context );
return result;
}
catch(Exception e)
{
return addCompilationErrorAndAdvanceParser( e , context );
} finally {
context.clearMark();
}
}
private final ICompilationError wrapException(Exception e, IParseContext context)
{
final int errorOffset;
final ITextRegion errorRange;
if ( e instanceof ParseException )
{
errorRange = ((ParseException) e).getTextRegion();
errorOffset = errorRange.getStartingOffset();
} else if ( e instanceof ICompilationError) {
errorOffset = ((ICompilationError) e).getErrorOffset();
errorRange = ((ICompilationError) e).getLocation();
} else if ( e instanceof java.text.ParseException ) {
errorOffset = ((java.text.ParseException) e).getErrorOffset();
errorRange = new TextRegion(errorOffset,0);
} else if ( e instanceof EOFException) {
errorOffset = ((EOFException) e).getErrorOffset();
errorRange = new TextRegion(errorOffset,0);
} else {
errorOffset = context.currentParseIndex();
errorRange = new TextRegion(errorOffset,0);
}
final ICompilationError result;
if ( e instanceof ICompilationError)
{
result = (ICompilationError) e;
} else
{
String msg=e.getMessage();
if ( StringUtils.isBlank( msg ) ) {
msg = "< no error message >";
}
result = new GenericCompilationError( msg , context.getCompilationUnit() , e );
result.setErrorOffset( errorOffset );
result.setLocation( errorRange );
}
if ( result.getErrorOffset() != -1 )
{
if ( result.getLineNumber() == -1 || result.getColumnNumber() == -1 || result.getLineStartOffset() == -1 )
{
if ( result.getLineStartOffset() == -1 ) {
result.setLineStartOffset( context.getCurrentLineStartOffset() );
}
if (result.getLineNumber() == -1) {
result.setLineNumber( context.getCurrentLineNumber() );
}
if (result.getColumnNumber() == -1 )
{
int column = result.getErrorOffset() - context.getCurrentLineStartOffset()+1; // columns start at 1
if ( column > 0 ) {
result.setColumnNumber( column );
} else {
LOG.warn("wrapException(): Error offset "+result.getErrorOffset()+" is not on current line "+context.getCurrentLineNumber()+", starting at "+
context.getCurrentLineStartOffset());
}
}
}
if ( result.getLocation() == null && errorRange != null ) {
result.setLocation( errorRange );
}
}
return result;
}
/**
* Add compilation error and switch parser to recovery mode if it isn't already.
*
* See {@link #addCompilationErrorAndAdvanceParser(String, int, Exception, IParseContext)} for a description of how this method works.
*
* @param e
* @param context
* @return {@link UnparsedContentNode} that was added as a child to <b>this</b> node
*/
protected final ASTNode addCompilationErrorAndAdvanceParser(Exception e , IParseContext context)
{
return addCompilationErrorAndAdvanceParser( wrapException( e , context ) , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(Exception e , TokenType[] recoveryTokens, IParseContext context)
{
return addCompilationErrorAndAdvanceParser( wrapException( e , context ) , recoveryTokens , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(ICompilationError error, IParseContext context)
{
return addCompilationErrorAndAdvanceParser( error , DEFAULT_ERROR_RECOVERY_TOKEN , context );
}
protected final ASTNode addCompilationErrorAndAdvanceParser(ICompilationError error, TokenType[] recoveryTokens, IParseContext context)
{
context.reset();
final List<IToken> tokens = context.advanceTo( recoveryTokens , false );
final UnparsedContentNode result = new UnparsedContentNode( error.getMessage() , error.getErrorOffset() , tokens );
if ( context.hasParserOption( ParserOption.DEBUG_MODE ) ) {
LOG.error("addCompilationErrorAndAdvanceParser(): [in_parse_error_recovery: "+context.isRecoveringFromParseError() +"] error="+error,error.getCause() );
}
addChild( result , context );
if ( ! context.isRecoveringFromParseError() )
{
context.getCompilationUnit().addMarker( error );
/*
* This flag will be reset when an ASTNode that is not a UnparsedContentNode is added to the AST
* (because this indicates that the parser (at least temporarily) was able to re-synchronize again.
*/
context.setRecoveringFromParseError( true );
}
return result;
}
/**
* Method to be implemented by subclasses, does the actual recursive-descent parsing.
*
* <p>
* Parse exceptions thrown by implementations will be attached to the
* current compilation unit as {@link ICompilationError} instances ; the
* parser will then switch to error recovery mode and advance to the next token that
* has one of the token types returned by {@link #getParseRecoveryTokenTypes()}.
* </p>
* <p>
* See {@link #parse(IParseContext)}} for a detailed description of the error recovery mechanism.
* </p>
* @param context
* @return
* @throws ParseException
*/
protected abstract ASTNode parseInternal(IParseContext context) throws ParseException;
@Override
public String toString()
{
final StringBuilder builder = new StringBuilder();
for (Iterator<ASTNode> it = children.iterator(); it.hasNext();) {
ASTNode child = it.next();
builder.append( child.toString() );
if ( it.hasNext() ) {
builder.append(" , ");
}
}
return getClass().getSimpleName()+" { "+builder.toString()+" }";
}
/**
* Replace a direct child of this node with another one.
*
* @param child
* @param newNode
*/
public final void replaceChild(ASTNode child , ASTNode newNode ) {
if ( child == null ) {
throw new IllegalArgumentException("child must not be NULL");
}
assertSupportsChildNodes();
final int idx = children.indexOf( child );
if ( idx == -1 ) {
throw new IllegalArgumentException("Node "+child+" is not a child of "+this);
}
setChild( idx , newNode );
recalculateTextRegion(true);
}
/**
* Returns the path to the root node.
*
* @return path to the root node, first element is the root node
* while the last element is THIS node.
*/
public final ASTNode[] getPathToRoot() {
List<ASTNode> path = new ArrayList<ASTNode>();
ASTNode current = this;
do {
path.add( current );
current = current.getParent();
} while( current != null );
Collections.reverse( path );
return path.toArray( new ASTNode[ path.size() ] );
}
/**
* Returns all AST nodes <b>below</b> this AST node
* that overlap with a specific {@link ITextRegion}.
*
* @param visible
* @return
*/
public final List<ASTNode> getNodesInRange(ITextRegion visible)
{
final List<ASTNode> result = new ArrayList<ASTNode>();
for ( ASTNode child : children )
{
if ( child.getTextRegion().overlaps( visible ) ) {
result.add( child );
}
}
return result;
}
/**
* Recursively discovers the AST node that starts closest
* to a given source code offset.
*
* @param offset
* @return source code node or <code>null</code> if neither
* this node nor any of it's children cover the given offset
*/
public final ASTNode getNodeInRange(int offset)
{
if ( ! getTextRegion().contains( offset ) )
{
return null;
}
ASTNode result = this;
for ( ASTNode child : children ) {
ASTNode tmp = child.getNodeInRange( offset );
if ( tmp != null )
{
final int delta1 = Math.abs( offset - result.getTextRegion().getStartingOffset() );
final int delta2 = Math.abs( offset - tmp.getTextRegion().getStartingOffset() );
if ( delta2 < delta1 || ! child.hasChildren() ) {
result = tmp;
}
}
}
return result;
}
}
|
Fixed spurious NPE in ASTNode#getNodeInRange()
|
src/main/java/de/codesourcery/jasm16/ast/ASTNode.java
|
Fixed spurious NPE in ASTNode#getNodeInRange()
|
|
Java
|
apache-2.0
|
c3b7957edcb35311835198315eb9492eaa1b46a2
| 0
|
alexparvulescu/jackrabbit-oak,catholicon/jackrabbit-oak,code-distillery/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,catholicon/jackrabbit-oak,catholicon/jackrabbit-oak,stillalex/jackrabbit-oak,code-distillery/jackrabbit-oak,anchela/jackrabbit-oak,code-distillery/jackrabbit-oak,francescomari/jackrabbit-oak,stillalex/jackrabbit-oak,anchela/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,catholicon/jackrabbit-oak,alexparvulescu/jackrabbit-oak,stillalex/jackrabbit-oak,code-distillery/jackrabbit-oak,stillalex/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,alexkli/jackrabbit-oak,alexparvulescu/jackrabbit-oak,catholicon/jackrabbit-oak,anchela/jackrabbit-oak,francescomari/jackrabbit-oak,alexkli/jackrabbit-oak,francescomari/jackrabbit-oak,code-distillery/jackrabbit-oak,anchela/jackrabbit-oak,alexkli/jackrabbit-oak,alexkli/jackrabbit-oak,stillalex/jackrabbit-oak,alexparvulescu/jackrabbit-oak,francescomari/jackrabbit-oak,alexparvulescu/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,francescomari/jackrabbit-oak,alexkli/jackrabbit-oak,anchela/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.index;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Stopwatch;
import com.google.common.io.Closer;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.CorruptIndexHandler;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdate;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
import org.apache.jackrabbit.oak.plugins.index.NodeTraversalCallback;
import org.apache.jackrabbit.oak.plugins.index.importer.IndexerInfo;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.DirectoryFactory;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.FSDirectoryFactory;
import org.apache.jackrabbit.oak.plugins.index.progress.MetricRateEstimator;
import org.apache.jackrabbit.oak.plugins.index.progress.NodeCounterMBeanEstimator;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.metric.MetricStatisticsProvider;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EditorDiff;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.commit.VisibleEditor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.ASYNC_PROPERTY_NAME;
public class OutOfBandIndexer implements Closeable, IndexUpdateCallback, NodeTraversalCallback {
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* Index lane name which is used for indexing
*/
private static final String REINDEX_LANE = "offline-reindex-async";
/**
* Property name where previous value of 'async' is stored
*/
private static final String ASYNC_PREVIOUS = "async-previous";
/**
* Value stored in previous async property if the index is not async
* i.e. when a sync index is reindexed in out of band mode
*/
private static final String ASYNC_PREVIOUS_NONE = "none";
/**
* Directory name in output directory under which indexes are
* stored
*/
public static final String LOCAL_INDEX_ROOT_DIR = "indexes";
/**
* Checkpoint value which indicate that head state needs to be used
* This would be mostly used for testing purpose
*/
private static final String HEAD_AS_CHECKPOINT = "head";
private final Closer closer = Closer.create();
private final IndexHelper indexHelper;
private final String checkpoint;
private Map<String, String> checkpointInfo = Collections.emptyMap();
private NodeStore copyOnWriteStore;
private File localIndexDir;
//TODO Support for providing custom index definition i.e. where definition is not
//present in target repository
public OutOfBandIndexer(IndexHelper indexHelper, String checkpoint) {
this.indexHelper = checkNotNull(indexHelper);
this.checkpoint = checkNotNull(checkpoint);
}
public void reindex() throws CommitFailedException, IOException {
Stopwatch w = Stopwatch.createStarted();
NodeState checkpointedState = retrieveNodeStateForCheckpoint();
copyOnWriteStore = new MemoryNodeStore(checkpointedState);
NodeState baseState = copyOnWriteStore.getRoot();
//TODO Check for indexPaths being empty
log.info("Proceeding to index {} upto checkpoint {} {}", indexHelper.getIndexPaths(), checkpoint, checkpointInfo);
switchIndexLanesAndReindexFlag();
preformIndexUpdate(baseState);
writeMetaInfo();
File destDir = copyIndexFilesToOutput();
log.info("Indexing completed for indexes {} in {} and index files are copied to {}",
indexHelper.getIndexPaths(), w, IndexCommand.getPath(destDir));
}
private File getLocalIndexDir() throws IOException {
if (localIndexDir == null) {
localIndexDir = new File(indexHelper.getWorkDir(), LOCAL_INDEX_ROOT_DIR);
FileUtils.forceMkdir(localIndexDir);
}
return localIndexDir;
}
@Override
public void close() throws IOException {
closer.close();
}
//~---------------------------------------------------< callbacks >
@Override
public void indexUpdate() throws CommitFailedException {
}
@Override
public void traversedNode(PathSource pathSource) throws CommitFailedException {
}
private void preformIndexUpdate(NodeState baseState) throws IOException, CommitFailedException {
NodeBuilder builder = copyOnWriteStore.getRoot().builder();
IndexUpdate indexUpdate = new IndexUpdate(
createIndexEditorProvider(),
REINDEX_LANE,
copyOnWriteStore.getRoot(),
builder,
this,
this,
CommitInfo.EMPTY,
CorruptIndexHandler.NOOP
);
configureEstimators(indexUpdate);
//Do not use EmptyState as before otherwise the IndexUpdate would
//unnecessary traverse the whole repo post reindexing. With use of baseState
//It would only traverse the diff i.e. those index definitions paths
//whose lane has been changed
NodeState before = baseState;
NodeState after = copyOnWriteStore.getRoot();
CommitFailedException exception =
EditorDiff.process(VisibleEditor.wrap(indexUpdate), before, after);
if (exception != null) {
throw exception;
}
}
private IndexEditorProvider createIndexEditorProvider() throws IOException {
IndexEditorProvider lucene = createLuceneEditorProvider();
IndexEditorProvider property = new PropertyIndexEditorProvider().with(indexHelper.getMountInfoProvider());
return CompositeIndexEditorProvider.compose(asList(lucene, property));
}
private IndexEditorProvider createLuceneEditorProvider() throws IOException {
LuceneIndexHelper luceneIndexHelper = indexHelper.getLuceneIndexHelper();
DirectoryFactory dirFactory = new FSDirectoryFactory(getLocalIndexDir());
luceneIndexHelper.setDirectoryFactory(dirFactory);
return luceneIndexHelper.createEditorProvider();
}
private void switchIndexLanesAndReindexFlag() throws CommitFailedException {
NodeBuilder builder = copyOnWriteStore.getRoot().builder();
for (String indexPath : indexHelper.getIndexPaths()) {
//TODO Do it only for lucene indexes for now
NodeBuilder idxBuilder = NodeStoreUtils.childBuilder(builder, indexPath);
idxBuilder.setProperty(IndexConstants.REINDEX_PROPERTY_NAME, true);
switchLane(idxBuilder);
}
copyOnWriteStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
log.info("Switched the async lane for indexes at {} to {} and marked them for reindex", indexHelper.getIndexPaths(), REINDEX_LANE);
}
private NodeState retrieveNodeStateForCheckpoint() {
NodeState checkpointedState;
if (HEAD_AS_CHECKPOINT.equals(checkpoint)) {
checkpointedState = indexHelper.getNodeStore().getRoot();
log.warn("Using head state for indexing. Such an index cannot be imported back");
} else {
checkpointedState = indexHelper.getNodeStore().retrieve(checkpoint);
checkNotNull(checkpointedState, "Not able to retrieve revision referred via checkpoint [%s]", checkpoint);
checkpointInfo = indexHelper.getNodeStore().checkpointInfo(checkpoint);
}
return checkpointedState;
}
/**
* Make a copy of current async value and replace it with one required for offline reindexing
*/
static void switchLane(NodeBuilder idxBuilder) {
PropertyState currentAsyncState = idxBuilder.getProperty(ASYNC_PROPERTY_NAME);
PropertyState newAsyncState = PropertyStates.createProperty(ASYNC_PROPERTY_NAME, REINDEX_LANE, Type.STRING);
PropertyState previousAsyncState;
if (currentAsyncState == null) {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, ASYNC_PREVIOUS_NONE);
} else {
//Ensure that previous state is copied with correct type
if (currentAsyncState.isArray()) {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, currentAsyncState.getValue(Type.STRINGS), Type.STRINGS);
} else {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, currentAsyncState.getValue(Type.STRING), Type.STRING);
}
}
idxBuilder.setProperty(previousAsyncState);
idxBuilder.setProperty(newAsyncState);
}
private void writeMetaInfo() throws IOException {
new IndexerInfo(getLocalIndexDir(), checkpoint).save();
}
private File copyIndexFilesToOutput() throws IOException {
File destDir = new File(indexHelper.getOutputDir(), getLocalIndexDir().getName());
FileUtils.moveDirectoryToDirectory(getLocalIndexDir(), indexHelper.getOutputDir(), true);
return destDir;
}
private void configureEstimators(IndexUpdate indexUpdate) {
StatisticsProvider statsProvider = indexHelper.getStatisticsProvider();
if (statsProvider instanceof MetricStatisticsProvider) {
MetricRegistry registry = ((MetricStatisticsProvider) statsProvider).getRegistry();
indexUpdate.setTraversalRateEstimator(new MetricRateEstimator(REINDEX_LANE, registry));
}
NodeCounterMBeanEstimator estimator = new NodeCounterMBeanEstimator(indexHelper.getNodeStore());
indexUpdate.setNodeCountEstimator(estimator);
}
}
|
oak-run/src/main/java/org/apache/jackrabbit/oak/index/OutOfBandIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.index;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Stopwatch;
import com.google.common.io.Closer;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.CorruptIndexHandler;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdate;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
import org.apache.jackrabbit.oak.plugins.index.NodeTraversalCallback;
import org.apache.jackrabbit.oak.plugins.index.counter.jmx.NodeCounter;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.DirectoryFactory;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.FSDirectoryFactory;
import org.apache.jackrabbit.oak.plugins.index.progress.MetricRateEstimator;
import org.apache.jackrabbit.oak.plugins.index.progress.NodeCounterMBeanEstimator;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.metric.MetricStatisticsProvider;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EditorDiff;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.commit.VisibleEditor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.ASYNC_PROPERTY_NAME;
public class OutOfBandIndexer implements Closeable, IndexUpdateCallback, NodeTraversalCallback {
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* Index lane name which is used for indexing
*/
private static final String REINDEX_LANE = "offline-reindex-async";
/**
* Property name where previous value of 'async' is stored
*/
private static final String ASYNC_PREVIOUS = "async-previous";
/**
* Value stored in previous async property if the index is not async
* i.e. when a sync index is reindexed in out of band mode
*/
private static final String ASYNC_PREVIOUS_NONE = "none";
/**
* Directory name in output directory under which indexes are
* stored
*/
public static final String LOCAL_INDEX_ROOT_DIR = "indexes";
/**
* File name stored in final index directory which contains meta
* information like checkpoint details. This can be used by
* importer while importing the indexes
*/
private static final String INDEXER_META = "indexer-info.txt";
/**
* Checkpoint value which indicate that head state needs to be used
* This would be mostly used for testing purpose
*/
private static final String HEAD_AS_CHECKPOINT = "head";
private final Closer closer = Closer.create();
private final IndexHelper indexHelper;
private final String checkpoint;
private Map<String, String> checkpointInfo = Collections.emptyMap();
private NodeStore copyOnWriteStore;
private File localIndexDir;
//TODO Support for providing custom index definition i.e. where definition is not
//present in target repository
public OutOfBandIndexer(IndexHelper indexHelper, String checkpoint) {
this.indexHelper = checkNotNull(indexHelper);
this.checkpoint = checkNotNull(checkpoint);
}
public void reindex() throws CommitFailedException, IOException {
Stopwatch w = Stopwatch.createStarted();
NodeState checkpointedState = retrieveNodeStateForCheckpoint();
copyOnWriteStore = new MemoryNodeStore(checkpointedState);
NodeState baseState = copyOnWriteStore.getRoot();
//TODO Check for indexPaths being empty
log.info("Proceeding to index {} upto checkpoint {} {}", indexHelper.getIndexPaths(), checkpoint, checkpointInfo);
switchIndexLanesAndReindexFlag();
preformIndexUpdate(baseState);
writeMetaInfo();
File destDir = copyIndexFilesToOutput();
log.info("Indexing completed for indexes {} in {} and index files are copied to {}",
indexHelper.getIndexPaths(), w, IndexCommand.getPath(destDir));
}
private File getLocalIndexDir() throws IOException {
if (localIndexDir == null) {
localIndexDir = new File(indexHelper.getWorkDir(), LOCAL_INDEX_ROOT_DIR);
FileUtils.forceMkdir(localIndexDir);
}
return localIndexDir;
}
@Override
public void close() throws IOException {
closer.close();
}
//~---------------------------------------------------< callbacks >
@Override
public void indexUpdate() throws CommitFailedException {
}
@Override
public void traversedNode(PathSource pathSource) throws CommitFailedException {
}
private void preformIndexUpdate(NodeState baseState) throws IOException, CommitFailedException {
NodeBuilder builder = copyOnWriteStore.getRoot().builder();
IndexUpdate indexUpdate = new IndexUpdate(
createIndexEditorProvider(),
REINDEX_LANE,
copyOnWriteStore.getRoot(),
builder,
this,
this,
CommitInfo.EMPTY,
CorruptIndexHandler.NOOP
);
configureEstimators(indexUpdate);
//Do not use EmptyState as before otherwise the IndexUpdate would
//unnecessary traverse the whole repo post reindexing. With use of baseState
//It would only traverse the diff i.e. those index definitions paths
//whose lane has been changed
NodeState before = baseState;
NodeState after = copyOnWriteStore.getRoot();
CommitFailedException exception =
EditorDiff.process(VisibleEditor.wrap(indexUpdate), before, after);
if (exception != null) {
throw exception;
}
}
private IndexEditorProvider createIndexEditorProvider() throws IOException {
IndexEditorProvider lucene = createLuceneEditorProvider();
IndexEditorProvider property = new PropertyIndexEditorProvider().with(indexHelper.getMountInfoProvider());
return CompositeIndexEditorProvider.compose(asList(lucene, property));
}
private IndexEditorProvider createLuceneEditorProvider() throws IOException {
LuceneIndexHelper luceneIndexHelper = indexHelper.getLuceneIndexHelper();
DirectoryFactory dirFactory = new FSDirectoryFactory(getLocalIndexDir());
luceneIndexHelper.setDirectoryFactory(dirFactory);
return luceneIndexHelper.createEditorProvider();
}
private void switchIndexLanesAndReindexFlag() throws CommitFailedException {
NodeBuilder builder = copyOnWriteStore.getRoot().builder();
for (String indexPath : indexHelper.getIndexPaths()) {
//TODO Do it only for lucene indexes for now
NodeBuilder idxBuilder = NodeStoreUtils.childBuilder(builder, indexPath);
idxBuilder.setProperty(IndexConstants.REINDEX_PROPERTY_NAME, true);
switchLane(idxBuilder);
}
copyOnWriteStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
log.info("Switched the async lane for indexes at {} to {} and marked them for reindex", indexHelper.getIndexPaths(), REINDEX_LANE);
}
private NodeState retrieveNodeStateForCheckpoint() {
NodeState checkpointedState;
if (HEAD_AS_CHECKPOINT.equals(checkpoint)) {
checkpointedState = indexHelper.getNodeStore().getRoot();
log.warn("Using head state for indexing. Such an index cannot be imported back");
} else {
checkpointedState = indexHelper.getNodeStore().retrieve(checkpoint);
checkNotNull(checkpointedState, "Not able to retrieve revision referred via checkpoint [%s]", checkpoint);
checkpointInfo = indexHelper.getNodeStore().checkpointInfo(checkpoint);
}
return checkpointedState;
}
/**
* Make a copy of current async value and replace it with one required for offline reindexing
*/
static void switchLane(NodeBuilder idxBuilder) {
PropertyState currentAsyncState = idxBuilder.getProperty(ASYNC_PROPERTY_NAME);
PropertyState newAsyncState = PropertyStates.createProperty(ASYNC_PROPERTY_NAME, REINDEX_LANE, Type.STRING);
PropertyState previousAsyncState;
if (currentAsyncState == null) {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, ASYNC_PREVIOUS_NONE);
} else {
//Ensure that previous state is copied with correct type
if (currentAsyncState.isArray()) {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, currentAsyncState.getValue(Type.STRINGS), Type.STRINGS);
} else {
previousAsyncState = PropertyStates.createProperty(ASYNC_PREVIOUS, currentAsyncState.getValue(Type.STRING), Type.STRING);
}
}
idxBuilder.setProperty(previousAsyncState);
idxBuilder.setProperty(newAsyncState);
}
private void writeMetaInfo() throws IOException {
Properties props = new Properties();
props.put("checkpoint", checkpoint);
try (OutputStream os = FileUtils.openOutputStream(new File(getLocalIndexDir(), INDEXER_META))) {
props.store(os, "Indexer info");
}
}
private File copyIndexFilesToOutput() throws IOException {
File destDir = new File(indexHelper.getOutputDir(), getLocalIndexDir().getName());
FileUtils.moveDirectoryToDirectory(getLocalIndexDir(), indexHelper.getOutputDir(), true);
return destDir;
}
private void configureEstimators(IndexUpdate indexUpdate) {
StatisticsProvider statsProvider = indexHelper.getStatisticsProvider();
if (statsProvider instanceof MetricStatisticsProvider) {
MetricRegistry registry = ((MetricStatisticsProvider) statsProvider).getRegistry();
indexUpdate.setTraversalRateEstimator(new MetricRateEstimator(REINDEX_LANE, registry));
}
NodeCounterMBeanEstimator estimator = new NodeCounterMBeanEstimator(indexHelper.getNodeStore());
indexUpdate.setNodeCountEstimator(estimator);
}
}
|
OAK-6271 - Support for importing index files
Switch to IndexerInfo to manage the meta files for exported indexes
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1801423 13f79535-47bb-0310-9956-ffa450edef68
|
oak-run/src/main/java/org/apache/jackrabbit/oak/index/OutOfBandIndexer.java
|
OAK-6271 - Support for importing index files
|
|
Java
|
apache-2.0
|
e29fed3dad124848e4fe61e7c99d2c84d2499128
| 0
|
cgeo/cgeo,SammysHP/cgeo,matej116/cgeo,matej116/cgeo,samueltardieu/cgeo,cgeo/cgeo,auricgoldfinger/cgeo,superspindel/cgeo,S-Bartfast/cgeo,Bananeweizen/cgeo,cgeo/cgeo,samueltardieu/cgeo,auricgoldfinger/cgeo,auricgoldfinger/cgeo,mucek4/cgeo,superspindel/cgeo,rsudev/c-geo-opensource,S-Bartfast/cgeo,mucek4/cgeo,tobiasge/cgeo,mucek4/cgeo,SammysHP/cgeo,kumy/cgeo,pstorch/cgeo,rsudev/c-geo-opensource,Bananeweizen/cgeo,cgeo/cgeo,kumy/cgeo,kumy/cgeo,pstorch/cgeo,matej116/cgeo,S-Bartfast/cgeo,superspindel/cgeo,pstorch/cgeo,tobiasge/cgeo,rsudev/c-geo-opensource,Bananeweizen/cgeo,samueltardieu/cgeo,SammysHP/cgeo,tobiasge/cgeo
|
package cgeo.geocaching.network;
import junit.framework.TestCase;
import org.eclipse.jdt.annotation.NonNull;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
public class ParametersTest extends TestCase {
static List<Character> UNRESERVED;
static {
// unreserved characters: ALPHA / DIGIT / "-" / "." / "_" / "~"
final ArrayList<Character> unreserved = new ArrayList<Character>();
for (int i = 65; i <= 90; i++) {
unreserved.add((char) i); // uppercase
unreserved.add((char) (i + 32)); // lowercase
}
for (int i = 0; i < 10; i++) {
unreserved.add(Character.forDigit(i, 10));
}
unreserved.add('-');
unreserved.add('.');
unreserved.add('_');
unreserved.add('~');
ParametersTest.UNRESERVED = unreserved;
}
public static void testException() {
try {
final Parameters params = new Parameters("aaa", "AAA", "bbb");
params.clear(); // this will never be invoked, but suppresses warnings about unused objects
fail("Exception not raised");
} catch (InvalidParameterException e) {
// Ok
}
try {
final Parameters params = new Parameters("aaa", "AAA");
params.put("bbb", "BBB", "ccc");
fail("Exception not raised");
} catch (InvalidParameterException e) {
// Ok
}
}
public static void testMultipleValues() {
final Parameters params = new Parameters("aaa", "AAA", "bbb", "BBB");
params.put("ccc", "CCC", "ddd", "DDD");
assertThat(params.toString()).isEqualTo("aaa=AAA&bbb=BBB&ccc=CCC&ddd=DDD");
}
public static void testSort() {
final Parameters params = new Parameters();
params.put("aaa", "AAA");
params.put("ccc", "CCC");
params.put("bbb", "BBB");
assertThat(params.toString()).isEqualTo("aaa=AAA&ccc=CCC&bbb=BBB");
params.sort();
assertThat(params.toString()).isEqualTo("aaa=AAA&bbb=BBB&ccc=CCC");
}
public static void testToString() {
final Parameters params = new Parameters();
params.put("name", "foo&bar");
params.put("type", "moving");
assertThat(params.toString()).isEqualTo("name=foo%26bar&type=moving");
}
public static void testUnreservedCharactersMustNotBeEncoded() {
for (Character c : UNRESERVED) {
final @NonNull
String charAsString = String.valueOf(c);
assertEquals("wrong OAuth encoding for " + c, charAsString, Parameters.percentEncode(charAsString));
}
}
public static void testOtherCharactersMustBeEncoded() {
for (int i = 32; i < 127; i++) {
final Character c = (char) i;
if (!UNRESERVED.contains(c)) {
final @NonNull
String charAsString = String.valueOf(c);
final String encoded = Parameters.percentEncode(charAsString);
assertThat(charAsString).overridingErrorMessage("Character '" + charAsString + "' not encoded").isNotEqualTo(encoded);
assertThat(encoded).startsWith("%");
}
}
}
public static void testAsterisk() {
assertThat("*".equals(Parameters.percentEncode("*"))).isFalse();
}
public static void testPercentEncoding() {
final Parameters params = new Parameters("oauth_callback", "callback://www.cgeo.org/");
assertThat(params.toString()).isEqualTo("oauth_callback=callback://www.cgeo.org/");
params.usePercentEncoding();
assertThat(params.toString()).isEqualTo("oauth_callback=callback%3A%2F%2Fwww.cgeo.org%2F");
}
}
|
tests/src/cgeo/geocaching/network/ParametersTest.java
|
package cgeo.geocaching.network;
import junit.framework.TestCase;
import org.eclipse.jdt.annotation.NonNull;
import java.security.InvalidParameterException;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
public class ParametersTest extends TestCase {
static final List<Character> UNRESERVED;
static {
// unreserved characters: ALPHA / DIGIT / "-" / "." / "_" / "~"
ArrayList<Character> unreserved = new ArrayList<Character>();
for (int i = 65; i <= 90; i++) {
unreserved.add((char) i); // uppercase
unreserved.add((char) (i + 32)); // lowercase
}
for (int i = 0; i < 10; i++) {
unreserved.add(Character.forDigit(i, 10));
}
unreserved.add('-');
unreserved.add('.');
unreserved.add('_');
unreserved.add('~');
ParametersTest.UNRESERVED = unreserved;
}
public static void testException() {
try {
final Parameters params = new Parameters("aaa", "AAA", "bbb");
params.clear(); // this will never be invoked, but suppresses warnings about unused objects
fail("Exception not raised");
} catch (InvalidParameterException e) {
// Ok
}
try {
final Parameters params = new Parameters("aaa", "AAA");
params.put("bbb", "BBB", "ccc");
fail("Exception not raised");
} catch (InvalidParameterException e) {
// Ok
}
}
public static void testMultipleValues() {
final Parameters params = new Parameters("aaa", "AAA", "bbb", "BBB");
params.put("ccc", "CCC", "ddd", "DDD");
assertThat(params.toString()).isEqualTo("aaa=AAA&bbb=BBB&ccc=CCC&ddd=DDD");
}
public static void testSort() {
final Parameters params = new Parameters();
params.put("aaa", "AAA");
params.put("ccc", "CCC");
params.put("bbb", "BBB");
assertThat(params.toString()).isEqualTo("aaa=AAA&ccc=CCC&bbb=BBB");
params.sort();
assertThat(params.toString()).isEqualTo("aaa=AAA&bbb=BBB&ccc=CCC");
}
public static void testToString() {
final Parameters params = new Parameters();
params.put("name", "foo&bar");
params.put("type", "moving");
assertThat(params.toString()).isEqualTo("name=foo%26bar&type=moving");
}
public static void testUnreservedCharactersMustNotBeEncoded() {
for (Character c : UNRESERVED) {
final @NonNull
String charAsString = String.valueOf(c);
assertEquals("wrong OAuth encoding for " + c, charAsString, Parameters.percentEncode(charAsString));
}
}
public static void testOtherCharactersMustBeEncoded() {
for (int i = 32; i < 127; i++) {
final Character c = (char) i;
if (!UNRESERVED.contains(c)) {
final @NonNull
String charAsString = String.valueOf(c);
final String encoded = Parameters.percentEncode(charAsString);
assertThat(charAsString).overridingErrorMessage("Character '" + charAsString + "' not encoded").isNotEqualTo(encoded);
assertThat(encoded).startsWith("%");
}
}
}
public static void testAsterisk() {
assertThat("*".equals(Parameters.percentEncode("*"))).isFalse();
}
public static void testPercentEncoding() {
final Parameters params = new Parameters("oauth_callback", "callback://www.cgeo.org/");
assertThat(params.toString()).isEqualTo("oauth_callback=callback://www.cgeo.org/");
params.usePercentEncoding();
assertThat(params.toString()).isEqualTo("oauth_callback=callback%3a%2f%2fwww.cgeo.org%2f");
}
}
|
Fix remaining errors in test
|
tests/src/cgeo/geocaching/network/ParametersTest.java
|
Fix remaining errors in test
|
|
Java
|
apache-2.0
|
f0d86277328bebcc7a5fdede7959ba7a4f2f73e1
| 0
|
CloudSlang/score,CloudSlang/score
|
/*
* Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.score.events;
/**
* User:
* Date: 20/07/2014
*/
public class EventConstants {
public static final String SCORE_ERROR_EVENT = "SCORE_ERROR_EVENT";
public static final String SCORE_PAUSED_EVENT = "SCORE_PAUSED_EVENT";
public static final String SCORE_FINISHED_EVENT = "SCORE_FINISHED_EVENT";
public static final String SCORE_STARTED_BRANCH_EVENT = "SCORE_STARTED_BRANCH_EVENT";
public static final String SCORE_FINISHED_BRANCH_EVENT = "SCORE_FINISHED_BRANCH_EVENT";
public static final String SCORE_RESUMED_BRANCH_EVENT = "SCORE_RESUMED_BRANCH_EVENT";
public static final String SCORE_PAUSED_BRANCH_EVENT = "SCORE_PAUSED_BRANCH_EVENT";
public static final String SCORE_FAILURE_EVENT = "SCORE_FAILURE_EVENT";
public static final String SCORE_BRANCH_FAILURE_EVENT = "SCORE_BRANCH_FAILURE_EVENT";
public static final String SCORE_NO_WORKER_FAILURE_EVENT = "SCORE_NO_WORKER_FAILURE_EVENT";
public static final String SCORE_STEP_SPLIT_ERROR = "STEP_SPLIT_ERROR";
public static final String SCORE_STEP_NAV_ERROR = "STEP_NAV_ERROR";
public static final String SCORE_ERROR_MSG = "error_message";
public static final String SCORE_ERROR_LOG_MSG = "logMessage";
public static final String SCORE_ERROR_TYPE = "SCORE_ERROR_TYPE";
public static final String EXECUTION_CONTEXT = "EXECUTION_CONTEXT";
public static final String SCORE_RUN_ENV = "SCORE_RUN_ENV";
public static final String IS_BRANCH = "IS_BRANCH";
public static final String PAUSE_ID = "PAUSE_ID";
public static final String EXECUTION_ID = "EXECUTION_ID";
public static final String STEP_PATH = "STEP_PATH";
public static final String SPLIT_ID = "SPLIT_ID";
public static final String BRANCH_ID = "BRANCH_ID";
public static final String FLOW_UUID = "FLOW_UUID";
public static final String WORKER_EXECUTION_MONITOR = "WORKER_EXECUTION_MONITOR";
public static final String EXECUTION_ID_CONTEXT = "executionIdContext";
public static final String MAVEN_DEPENDENCY_BUILD = "MAVEN_DEPENDENCY_BUILD";
public static final String MAVEN_DEPENDENCY_BUILD_FINISHED = "MAVEN_DEPENDENCY_BUILD_FINISHED";
}
|
score-api/src/main/java/io/cloudslang/score/events/EventConstants.java
|
/*
* Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.score.events;
/**
* User:
* Date: 20/07/2014
*/
public class EventConstants {
public static final String SCORE_ERROR_EVENT = "SCORE_ERROR_EVENT";
public static final String SCORE_PAUSED_EVENT = "SCORE_PAUSED_EVENT";
public static final String SCORE_FINISHED_EVENT = "SCORE_FINISHED_EVENT";
public static final String SCORE_STARTED_BRANCH_EVENT = "SCORE_STARTED_BRANCH_EVENT";
public static final String SCORE_FINISHED_BRANCH_EVENT = "SCORE_FINISHED_BRANCH_EVENT";
public static final String SCORE_FAILURE_EVENT = "SCORE_FAILURE_EVENT";
public static final String SCORE_BRANCH_FAILURE_EVENT = "SCORE_BRANCH_FAILURE_EVENT";
public static final String SCORE_NO_WORKER_FAILURE_EVENT = "SCORE_NO_WORKER_FAILURE_EVENT";
public static final String SCORE_STEP_SPLIT_ERROR = "STEP_SPLIT_ERROR";
public static final String SCORE_STEP_NAV_ERROR = "STEP_NAV_ERROR";
public static final String SCORE_ERROR_MSG = "error_message";
public static final String SCORE_ERROR_LOG_MSG = "logMessage";
public static final String SCORE_ERROR_TYPE = "SCORE_ERROR_TYPE";
public static final String EXECUTION_CONTEXT = "EXECUTION_CONTEXT";
public static final String SCORE_RUN_ENV = "SCORE_RUN_ENV";
public static final String IS_BRANCH = "IS_BRANCH";
public static final String PAUSE_ID = "PAUSE_ID";
public static final String EXECUTION_ID = "EXECUTION_ID";
public static final String STEP_PATH = "STEP_PATH";
public static final String SPLIT_ID = "SPLIT_ID";
public static final String BRANCH_ID = "BRANCH_ID";
public static final String FLOW_UUID = "FLOW_UUID";
public static final String WORKER_EXECUTION_MONITOR = "WORKER_EXECUTION_MONITOR";
public static final String EXECUTION_ID_CONTEXT = "executionIdContext";
public static final String MAVEN_DEPENDENCY_BUILD = "MAVEN_DEPENDENCY_BUILD";
public static final String MAVEN_DEPENDENCY_BUILD_FINISHED = "MAVEN_DEPENDENCY_BUILD_FINISHED";
}
|
Added PAUSED and RESUMED events for branches. (#316)
|
score-api/src/main/java/io/cloudslang/score/events/EventConstants.java
|
Added PAUSED and RESUMED events for branches. (#316)
|
|
Java
|
apache-2.0
|
2ce15a30775b347bff98a5388de013d9bef3704a
| 0
|
wso2/siddhi,tishan89/siddhi,ramindu90/siddhi,miyurud/siddhi,mohanvive/siddhi,ksdperera/siddhi,dilini-muthumala/siddhi,ChariniNana/siddhi,ChariniNana/siddhi,mohanvive/siddhi,suhothayan/siddhi,minudika/siddhi,gokul/siddhi,wso2/siddhi,tishan89/siddhi,ksdperera/siddhi,dilini-muthumala/siddhi,grainier/siddhi,ramindu90/siddhi,suhothayan/siddhi,grainier/siddhi,nadundesilva/siddhi,gokul/siddhi,minudika/siddhi,nadundesilva/siddhi,miyurud/siddhi
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.util.snapshot;
import org.apache.log4j.Logger;
import org.wso2.siddhi.core.config.SiddhiAppContext;
import org.wso2.siddhi.core.util.ThreadBarrier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Service level implementation to take/restore snapshots of processing elements.
*/
public class SnapshotService {
private static final Logger log = Logger.getLogger(SnapshotService.class);
private final ThreadBarrier threadBarrier;
private HashMap<String, List<Snapshotable>> snapshotableMap = new HashMap<String, List<Snapshotable>>();
private SiddhiAppContext siddhiAppContext;
public SnapshotService(SiddhiAppContext siddhiAppContext) {
this.siddhiAppContext = siddhiAppContext;
this.threadBarrier = siddhiAppContext.getThreadBarrier();
}
public synchronized void addSnapshotable(String queryName, Snapshotable snapshotable) {
List<Snapshotable> snapshotableList = snapshotableMap.get(queryName);
// if List does not exist create it
if (snapshotableList == null) {
snapshotableList = new ArrayList<Snapshotable>();
snapshotableList.add(snapshotable);
snapshotableMap.put(queryName, snapshotableList);
} else {
// add if item is not already in list
if (!snapshotableList.contains(snapshotable)) {
snapshotableList.add(snapshotable);
}
}
}
public byte[] snapshot() {
HashMap<String, Map<String, Object>> snapshots = new HashMap<>(snapshotableMap.size());
List<Snapshotable> snapshotableList;
byte[] serializedSnapshots;
if (log.isDebugEnabled()) {
log.debug("Taking snapshot ...");
}
try {
threadBarrier.lock();
for (Map.Entry<String, List<Snapshotable>> entry : snapshotableMap.entrySet()) {
snapshotableList = entry.getValue();
snapshotableList.forEach(snapshotableElement -> snapshots.put(snapshotableElement.getElementId(),
snapshotableElement.currentState()));
}
if (log.isDebugEnabled()) {
log.debug("Snapshot serialization started ...");
}
serializedSnapshots = ByteSerializer.objectToByte(snapshots, siddhiAppContext);
if (log.isDebugEnabled()) {
log.debug("Snapshot serialization finished.");
}
} finally {
threadBarrier.unlock();
}
if (log.isDebugEnabled()) {
log.debug("Snapshot taken for Siddhi app '" + siddhiAppContext.getName() + "'");
}
return serializedSnapshots;
}
public Map<String, Object> queryState(String queryName) {
Map<String, Object> state = new HashMap<>();
try {
// Lock the threads in Siddhi
threadBarrier.lock();
List<Snapshotable> list = snapshotableMap.get(queryName);
if (list != null) {
for (Snapshotable element : list) {
Map<String, Object> elementState = element.currentState();
String elementId = element.getElementId();
state.put(elementId, elementState);
}
}
} finally {
threadBarrier.unlock();
}
log.debug("Taking snapshot finished.");
return state;
}
public void restore(byte[] snapshot) {
Map<String, Map<String, Object>> snapshots = (Map<String, Map<String, Object>>)
ByteSerializer.byteToObject(snapshot, siddhiAppContext);
List<Snapshotable> snapshotableList;
try {
threadBarrier.lock();
for (Map.Entry<String, List<Snapshotable>> entry : snapshotableMap.entrySet()) {
snapshotableList = entry.getValue();
for (Snapshotable snapshotable : snapshotableList) {
try {
snapshotable.restoreState(snapshots.get(snapshotable.getElementId()));
} catch (Throwable t) {
log.error("State if Siddhi app " + siddhiAppContext.getName() + " not restored properly " +
"because the Siddhi app may have changed since the last persist. Clean the old " +
"revisions from the database/file system for a fresh deployment of Siddhi app");
}
}
}
} finally {
threadBarrier.unlock();
}
}
}
|
modules/siddhi-core/src/main/java/org/wso2/siddhi/core/util/snapshot/SnapshotService.java
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.util.snapshot;
import org.apache.log4j.Logger;
import org.wso2.siddhi.core.config.SiddhiAppContext;
import org.wso2.siddhi.core.util.ThreadBarrier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Service level implementation to take/restore snapshots of processing elements.
*/
public class SnapshotService {
private static final Logger log = Logger.getLogger(SnapshotService.class);
private final ThreadBarrier threadBarrier;
private HashMap<String, List<Snapshotable>> snapshotableMap = new HashMap<String, List<Snapshotable>>();
private SiddhiAppContext siddhiAppContext;
public SnapshotService(SiddhiAppContext siddhiAppContext) {
this.siddhiAppContext = siddhiAppContext;
this.threadBarrier = siddhiAppContext.getThreadBarrier();
}
public synchronized void addSnapshotable(String queryName, Snapshotable snapshotable) {
List<Snapshotable> snapshotableList = snapshotableMap.get(queryName);
// if List does not exist create it
if (snapshotableList == null) {
snapshotableList = new ArrayList<Snapshotable>();
snapshotableList.add(snapshotable);
snapshotableMap.put(queryName, snapshotableList);
} else {
// add if item is not already in list
if (!snapshotableList.contains(snapshotable)) {
snapshotableList.add(snapshotable);
}
}
}
public byte[] snapshot() {
HashMap<String, Map<String, Object>> snapshots = new HashMap<>(snapshotableMap.size());
List<Snapshotable> snapshotableList;
byte[] serializedSnapshots;
if (log.isDebugEnabled()) {
log.debug("Taking snapshot ...");
}
try {
threadBarrier.lock();
for (Map.Entry<String, List<Snapshotable>> entry : snapshotableMap.entrySet()) {
snapshotableList = entry.getValue();
snapshotableList.forEach(snapshotableElement -> snapshots.put(snapshotableElement.getElementId(),
snapshotableElement.currentState()));
}
if (log.isDebugEnabled()) {
log.debug("Snapshot serialization started ...");
}
serializedSnapshots = ByteSerializer.objectToByte(snapshots, siddhiAppContext);
if (log.isDebugEnabled()) {
log.debug("Snapshot serialization finished.");
}
} finally {
threadBarrier.unlock();
}
if (log.isDebugEnabled()) {
log.debug("Snapshot taken for Siddhi app '" + siddhiAppContext.getName() + "'");
}
return serializedSnapshots;
}
public Map<String, Object> queryState(String queryName) {
Map<String, Object> state = new HashMap<>();
try {
// Lock the threads in Siddhi
threadBarrier.lock();
List<Snapshotable> list = snapshotableMap.get(queryName);
if (list != null) {
for (Snapshotable element : list) {
Map<String, Object> elementState = element.currentState();
String elementId = element.getElementId();
state.put(elementId, elementState);
}
}
} finally {
threadBarrier.unlock();
}
log.debug("Taking snapshot finished.");
return state;
}
public void restore(byte[] snapshot) {
Map<String, Map<String, Object>> snapshots = (Map<String, Map<String, Object>>)
ByteSerializer.byteToObject(snapshot, siddhiAppContext);
List<Snapshotable> snapshotableList;
try {
threadBarrier.lock();
for (Map.Entry<String, List<Snapshotable>> entry : snapshotableMap.entrySet()) {
snapshotableList = entry.getValue();
for (Snapshotable snapshotable : snapshotableList) {
snapshotable.restoreState(snapshots.get(snapshotable.getElementId()));
}
}
} finally {
threadBarrier.unlock();
}
}
}
|
Fixed restore new version of siddhi app
|
modules/siddhi-core/src/main/java/org/wso2/siddhi/core/util/snapshot/SnapshotService.java
|
Fixed restore new version of siddhi app
|
|
Java
|
apache-2.0
|
5a68960529d5f20d81ff208654926a824491bc0e
| 0
|
mariusmoe/datamod
|
package gui;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.image.Image;
import javafx.stage.Stage;
import javafx.scene.Parent;
import javafx.scene.Scene;
public class Main extends Application {
@Override
public void start(Stage primaryStage) {
try {
Parent root = FXMLLoader.load((Main.class.getResource("RootLayout.fxml")));
Scene scene = new Scene(root);
primaryStage.getIcons().add(new Image("/dagbokapplication/RunningMan.ico"));
primaryStage.setScene(scene);
primaryStage.setTitle("Registrer trening");
primaryStage.setMinHeight(1000);
primaryStage.setMinWidth(950);
primaryStage.show();
} catch(Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
launch(args);
}
}
|
src/gui/Main.java
|
package gui;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.image.Image;
import javafx.stage.Stage;
import javafx.scene.Parent;
import javafx.scene.Scene;
public class Main extends Application {
@Override
public void start(Stage primaryStage) {
try {
Parent root = FXMLLoader.load((Main.class.getResource("../gui/RootLayout.fxml")));
Scene scene = new Scene(root);
primaryStage.getIcons().add(new Image("/dagbokapplication/RunningMan.ico"));
primaryStage.setScene(scene);
primaryStage.setTitle("Registrer trening");
primaryStage.setMinHeight(1000);
primaryStage.setMinWidth(950);
primaryStage.show();
} catch(Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
launch(args);
}
}
|
Successfully created the jar file. A path error was the issue
|
src/gui/Main.java
|
Successfully created the jar file. A path error was the issue
|
|
Java
|
apache-2.0
|
599547f22dbaa3fb71e0106ce87ac5c3ba20e5fe
| 0
|
cefolger/needsmoredojo,cefolger/needsmoredojo
|
package com.chrisfolger.needsmoredojo.base;
import com.chrisfolger.needsmoredojo.conventions.MismatchedImportsDetector;
import com.chrisfolger.needsmoredojo.refactoring.DeclareFinder;
import com.intellij.lang.javascript.psi.*;
import com.intellij.openapi.ui.Messages;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.FilenameIndex;
import com.intellij.psi.search.GlobalSearchScope;
import java.util.*;
public class ImportCreator
{
private int getScore(String item)
{
Map<String, Integer> scores = new HashMap<String, Integer>();
scores.put("dojo", 5);
scores.put("dijit", 4);
scores.put("dojox", 2);
scores.put("dgrid", 1);
for(String key : scores.keySet())
{
if(item.indexOf(key) != -1)
{
return scores.get(key);
}
}
return 0;
}
public String[] getPossibleDojoImports(PsiFile psiFile, String module)
{
PsiFile[] files = null;
try
{
files = FilenameIndex.getFilesByName(psiFile.getProject(), module + ".js", GlobalSearchScope.projectScope(psiFile.getProject()));
}
catch(NullPointerException exc)
{
return new String[] { module };
}
List<String> choices = new ArrayList<String>();
String[] dojoLibraries = new String[] { "dojo", "dijit", "dojox", "dgrid", "util"};
for(int i=0;i<files.length;i++)
{
PsiFile file = files[i];
PsiDirectory directory = file.getContainingDirectory();
String result = directory.toString();
// parse dojo libraries only
int firstIndex = Integer.MAX_VALUE;
String firstLibrary = null;
for(String library : dojoLibraries)
{
int index = result.indexOf(library);
if(index > -1 && index < firstIndex)
{
firstIndex = index;
firstLibrary = library;
}
}
if(firstLibrary != null)
{
result = result.substring(result.indexOf(firstLibrary));
result = result.replace('\\', '/') + '/' + module;
choices.add(result);
}
}
Collections.sort(choices, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return getScore(o1) - getScore(o2);
}
});
choices.add(module);
return choices.toArray(new String[0]);
}
protected void createImport(String module, JSArrayLiteralExpression imports, JSParameterList parameters)
{
if(imports.getChildren().length == 0)
{
Messages.showInfoMessage("Need at least one import already present", "Add new AMD import");
return;
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getChildren()[0], String.format("'%s',", module), "\n");
JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], MismatchedImportsDetector.defineToParameter(module) + ",", " ");
}
}
public void addImport(PsiFile file, final String module)
{
JSRecursiveElementVisitor visitor = new DeclareFinder().getDefineVisitor(new DeclareFinder.CompletionCallback() {
@Override
public void run(Object[] result) {
JSCallExpression callExpression = (JSCallExpression) result[0];
JSFunction function = (JSFunction) result[1];
createImport(module, (JSArrayLiteralExpression) callExpression.getArguments()[0], function.getParameterList());
}
});
file.acceptChildren(visitor);
}
}
|
src/com/chrisfolger/needsmoredojo/base/ImportCreator.java
|
package com.chrisfolger.needsmoredojo.base;
import com.chrisfolger.needsmoredojo.conventions.MismatchedImportsDetector;
import com.chrisfolger.needsmoredojo.refactoring.DeclareFinder;
import com.intellij.lang.javascript.psi.*;
import com.intellij.openapi.ui.Messages;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.FilenameIndex;
import com.intellij.psi.search.GlobalSearchScope;
import java.util.*;
public class ImportCreator
{
private int getScore(String item)
{
Map<String, Integer> scores = new HashMap<String, Integer>();
scores.put("dojo", 5);
scores.put("dijit", 4);
scores.put("dojox", 2);
scores.put("dgrid", 1);
for(String key : scores.keySet())
{
if(item.indexOf(key) != -1)
{
return scores.get(key);
}
}
return 0;
}
public String[] getPossibleDojoImports(PsiFile psiFile, String module)
{
PsiFile[] files = null;
try
{
files = FilenameIndex.getFilesByName(psiFile.getProject(), module + ".js", GlobalSearchScope.allScope(psiFile.getProject()));
}
catch(NullPointerException exc)
{
return new String[] { module };
}
List<String> choices = new ArrayList<String>();
String[] dojoLibraries = new String[] { "dojo", "dijit", "dojox", "dgrid", "util"};
for(int i=0;i<files.length;i++)
{
PsiFile file = files[i];
PsiDirectory directory = file.getContainingDirectory();
String result = directory.toString();
// parse dojo libraries only
int firstIndex = Integer.MAX_VALUE;
String firstLibrary = null;
for(String library : dojoLibraries)
{
int index = result.indexOf(library);
if(index > -1 && index < firstIndex)
{
firstIndex = index;
firstLibrary = library;
}
}
if(firstLibrary != null)
{
result = result.substring(result.indexOf(firstLibrary));
result = result.replace('\\', '/') + '/' + module;
choices.add(result);
}
}
Collections.sort(choices, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return getScore(o1) - getScore(o2);
}
});
choices.add(module);
return choices.toArray(new String[0]);
}
protected void createImport(String module, JSArrayLiteralExpression imports, JSParameterList parameters)
{
if(imports.getChildren().length == 0)
{
Messages.showInfoMessage("Need at least one import already present", "Add new AMD import");
return;
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getChildren()[0], String.format("'%s',", module), "\n");
JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], MismatchedImportsDetector.defineToParameter(module) + ",", " ");
}
}
public void addImport(PsiFile file, final String module)
{
JSRecursiveElementVisitor visitor = new DeclareFinder().getDefineVisitor(new DeclareFinder.CompletionCallback() {
@Override
public void run(Object[] result) {
JSCallExpression callExpression = (JSCallExpression) result[0];
JSFunction function = (JSFunction) result[1];
createImport(module, (JSArrayLiteralExpression) callExpression.getArguments()[0], function.getParameterList());
}
});
file.acceptChildren(visitor);
}
}
|
add unstable jar
|
src/com/chrisfolger/needsmoredojo/base/ImportCreator.java
|
add unstable jar
|
|
Java
|
apache-2.0
|
59fe654f8bee57b9858657fda5a8fbbc0a418861
| 0
|
xiaoyanit/cgeo,auricgoldfinger/cgeo,cgeo/cgeo,SammysHP/cgeo,auricgoldfinger/cgeo,KublaikhanGeek/cgeo,pstorch/cgeo,madankb/cgeo,samueltardieu/cgeo,lewurm/cgeo,marco-dev/c-geo-opensource,matej116/cgeo,rsudev/c-geo-opensource,S-Bartfast/cgeo,Bananeweizen/cgeo,cgeo/cgeo,ThibaultR/cgeo,tobiasge/cgeo,ThibaultR/cgeo,pstorch/cgeo,samueltardieu/cgeo,SammysHP/cgeo,xiaoyanit/cgeo,Huertix/cgeo,superspindel/cgeo,mucek4/cgeo,matej116/cgeo,KublaikhanGeek/cgeo,brok85/cgeo,Huertix/cgeo,rsudev/c-geo-opensource,lewurm/cgeo,brok85/cgeo,KublaikhanGeek/cgeo,ThibaultR/cgeo,vishwakulkarni/cgeo,lewurm/cgeo,madankb/cgeo,schwabe/cgeo,tobiasge/cgeo,xiaoyanit/cgeo,kumy/cgeo,vishwakulkarni/cgeo,yummy222/cgeo,pstorch/cgeo,brok85/cgeo,vishwakulkarni/cgeo,mucek4/cgeo,yummy222/cgeo,Bananeweizen/cgeo,SammysHP/cgeo,schwabe/cgeo,marco-dev/c-geo-opensource,auricgoldfinger/cgeo,cgeo/cgeo,superspindel/cgeo,schwabe/cgeo,Bananeweizen/cgeo,cgeo/cgeo,tobiasge/cgeo,kumy/cgeo,superspindel/cgeo,rsudev/c-geo-opensource,yummy222/cgeo,schwabe/cgeo,S-Bartfast/cgeo,Huertix/cgeo,S-Bartfast/cgeo,marco-dev/c-geo-opensource,kumy/cgeo,mucek4/cgeo,samueltardieu/cgeo,madankb/cgeo,matej116/cgeo
|
package cgeo.geocaching.connector.ec;
import cgeo.geocaching.DataStore;
import cgeo.geocaching.Geocache;
import cgeo.geocaching.connector.LogResult;
import cgeo.geocaching.enumerations.CacheSize;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags.SaveFlag;
import cgeo.geocaching.enumerations.LogType;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.files.GPX10Parser;
import cgeo.geocaching.geopoint.Geopoint;
import cgeo.geocaching.geopoint.Viewport;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.utils.JsonUtils;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.SynchronizedDateFormat;
import ch.boye.httpclientandroidlib.HttpResponse;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
public class ECApi {
private static final String API_HOST = "https://extremcaching.com/exports/";
private static final ECLogin ecLogin = ECLogin.getInstance();
private static final SynchronizedDateFormat LOG_DATE_FORMAT = new SynchronizedDateFormat("yyyy-MM-dd HH:mm:ss.SSSZ", TimeZone.getTimeZone("UTC"), Locale.US);
public static String getIdFromGeocode(final String geocode) {
return StringUtils.removeStartIgnoreCase(geocode, "EC");
}
public static Geocache searchByGeoCode(final String geocode) {
final Parameters params = new Parameters("id", getIdFromGeocode(geocode));
final HttpResponse response = apiRequest("gpx.php", params);
final Collection<Geocache> caches = importCachesFromGPXResponse(response);
if (CollectionUtils.isNotEmpty(caches)) {
return caches.iterator().next();
}
return null;
}
public static Collection<Geocache> searchByBBox(final Viewport viewport) {
if (viewport.getLatitudeSpan() == 0 || viewport.getLongitudeSpan() == 0) {
return Collections.emptyList();
}
final Parameters params = new Parameters("fnc", "bbox");
params.add("lat1", String.valueOf(viewport.getLatitudeMin()));
params.add("lat2", String.valueOf(viewport.getLatitudeMax()));
params.add("lon1", String.valueOf(viewport.getLongitudeMin()));
params.add("lon2", String.valueOf(viewport.getLongitudeMax()));
final HttpResponse response = apiRequest(params);
return importCachesFromJSON(response);
}
public static Collection<Geocache> searchByCenter(final Geopoint center) {
final Parameters params = new Parameters("fnc", "center");
params.add("distance", "20");
params.add("lat", String.valueOf(center.getLatitude()));
params.add("lon", String.valueOf(center.getLongitude()));
final HttpResponse response = apiRequest(params);
return importCachesFromJSON(response);
}
public static LogResult postLog(final Geocache cache, final LogType logType, final Calendar date, final String log) {
return postLog(cache, logType, date, log, false);
}
public static LogResult postLog(final Geocache cache, final LogType logType, final Calendar date, final String log, boolean isRetry) {
final Parameters params = new Parameters("cache_id", cache.getGeocode());
params.add("type", logType.type);
params.add("log", log);
params.add("date", LOG_DATE_FORMAT.format(date.getTime()));
params.add("sid", ecLogin.getSessionId());
final String uri = API_HOST + "log.php";
final HttpResponse response = Network.postRequest(uri, params);
if (response == null) {
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
if (!isRetry && response.getStatusLine().getStatusCode() == 403) {
if (ecLogin.login() == StatusCode.NO_ERROR) {
apiRequest(uri, params, true);
}
}
if (response.getStatusLine().getStatusCode() != 200) {
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
final String data = Network.getResponseDataAlways(response);
if (!StringUtils.isBlank(data) && StringUtils.contains(data, "success")) {
if (logType == LogType.FOUND_IT || logType == LogType.ATTENDED) {
ecLogin.setActualCachesFound(ecLogin.getActualCachesFound() + 1);
}
final String uid = StringUtils.remove(data, "success:");
return new LogResult(StatusCode.NO_ERROR, uid);
}
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
private static HttpResponse apiRequest(final Parameters params) {
return apiRequest("api.php", params);
}
private static HttpResponse apiRequest(final String uri, final Parameters params) {
return apiRequest(uri, params, false);
}
private static HttpResponse apiRequest(final String uri, final Parameters params, final boolean isRetry) {
// add session and cgeo marker on every request
if (!isRetry) {
params.add("cgeo", "1");
params.add("sid", ecLogin.getSessionId());
}
final HttpResponse response = Network.getRequest(API_HOST + uri, params);
if (response == null) {
return null;
}
// retry at most one time
if (!isRetry && response.getStatusLine().getStatusCode() == 403) {
if (ecLogin.login() == StatusCode.NO_ERROR) {
return apiRequest(uri, params, true);
}
}
if (response.getStatusLine().getStatusCode() != 200) {
return null;
}
return response;
}
private static Collection<Geocache> importCachesFromGPXResponse(final HttpResponse response) {
if (response == null) {
return Collections.emptyList();
}
try {
return new GPX10Parser(StoredList.TEMPORARY_LIST.id).parse(response.getEntity().getContent(), null);
} catch (Exception e) {
Log.e("Error importing gpx from extremcaching.com", e);
return Collections.emptyList();
}
}
private static List<Geocache> importCachesFromJSON(final HttpResponse response) {
if (response != null) {
try {
final JsonNode json = JsonUtils.reader.readTree(Network.getResponseDataAlways(response));
if (!json.isArray()) {
return Collections.emptyList();
}
final List<Geocache> caches = new ArrayList<>(json.size());
for (final JsonNode node: json) {
final Geocache cache = parseCache(node);
if (cache != null) {
caches.add(cache);
}
}
return caches;
} catch (IOException | ClassCastException e) {
Log.w("importCachesFromJSON", e);
}
}
return Collections.emptyList();
}
private static Geocache parseCache(final JsonNode response) {
try {
final Geocache cache = new Geocache();
cache.setReliableLatLon(true);
cache.setGeocode("EC" + response.get("cache_id").asText());
cache.setName(response.get("title").asText());
cache.setCoords(new Geopoint(response.get("lat").asText(), response.get("lon").asText()));
cache.setType(getCacheType(response.get("type").asText()));
cache.setDifficulty((float) response.get("difficulty").asDouble());
cache.setTerrain((float) response.get("terrain").asDouble());
cache.setSize(CacheSize.getById(response.get("size").asText()));
cache.setFound(response.get("found").asInt() == 1);
DataStore.saveCache(cache, EnumSet.of(SaveFlag.CACHE));
return cache;
} catch (final NullPointerException e) {
Log.e("ECApi.parseCache", e);
return null;
}
}
private static CacheType getCacheType(final String cacheType) {
if (cacheType.equalsIgnoreCase("Tradi")) {
return CacheType.TRADITIONAL;
}
if (cacheType.equalsIgnoreCase("Multi")) {
return CacheType.MULTI;
}
if (cacheType.equalsIgnoreCase("Event")) {
return CacheType.EVENT;
}
if (cacheType.equalsIgnoreCase("Mystery")) {
return CacheType.MYSTERY;
}
return CacheType.UNKNOWN;
}
}
|
main/src/cgeo/geocaching/connector/ec/ECApi.java
|
package cgeo.geocaching.connector.ec;
import cgeo.geocaching.DataStore;
import cgeo.geocaching.Geocache;
import cgeo.geocaching.connector.LogResult;
import cgeo.geocaching.enumerations.CacheSize;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags.SaveFlag;
import cgeo.geocaching.enumerations.LogType;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.files.GPX10Parser;
import cgeo.geocaching.geopoint.Geopoint;
import cgeo.geocaching.geopoint.Viewport;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.utils.JsonUtils;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.SynchronizedDateFormat;
import ch.boye.httpclientandroidlib.HttpResponse;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
public class ECApi {
private static final String API_HOST = "https://extremcaching.com/exports/";
private static final ECLogin ecLogin = ECLogin.getInstance();
private static final SynchronizedDateFormat LOG_DATE_FORMAT = new SynchronizedDateFormat("yyyy-MM-dd HH:mm:ss.SSSZ", TimeZone.getTimeZone("UTC"), Locale.US);
public static String getIdFromGeocode(final String geocode) {
return StringUtils.removeStartIgnoreCase(geocode, "EC");
}
public static Geocache searchByGeoCode(final String geocode) {
final Parameters params = new Parameters("id", getIdFromGeocode(geocode));
final HttpResponse response = apiRequest("gpx.php", params);
final Collection<Geocache> caches = importCachesFromGPXResponse(response);
if (CollectionUtils.isNotEmpty(caches)) {
return caches.iterator().next();
}
return null;
}
public static Collection<Geocache> searchByBBox(final Viewport viewport) {
if (viewport.getLatitudeSpan() == 0 || viewport.getLongitudeSpan() == 0) {
return Collections.emptyList();
}
final Parameters params = new Parameters("fnc", "bbox");
params.add("lat1", String.valueOf(viewport.getLatitudeMin()));
params.add("lat2", String.valueOf(viewport.getLatitudeMax()));
params.add("lon1", String.valueOf(viewport.getLongitudeMin()));
params.add("lon2", String.valueOf(viewport.getLongitudeMax()));
final HttpResponse response = apiRequest(params);
return importCachesFromJSON(response);
}
public static Collection<Geocache> searchByCenter(final Geopoint center) {
final Parameters params = new Parameters("fnc", "center");
params.add("distance", "20");
params.add("lat", String.valueOf(center.getLatitude()));
params.add("lon", String.valueOf(center.getLongitude()));
final HttpResponse response = apiRequest(params);
return importCachesFromJSON(response);
}
public static LogResult postLog(final Geocache cache, final LogType logType, final Calendar date, final String log) {
return postLog(cache, logType, date, log, false);
}
public static LogResult postLog(final Geocache cache, final LogType logType, final Calendar date, final String log, boolean isRetry) {
final Parameters params = new Parameters("cache_id", cache.getGeocode());
params.add("type", logType.type);
params.add("log", log);
params.add("date", LOG_DATE_FORMAT.format(date.getTime()));
params.add("sid", ecLogin.getSessionId());
final String uri = API_HOST + "log.php";
final HttpResponse response = Network.postRequest(uri, params);
if (response == null) {
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
if (!isRetry && response.getStatusLine().getStatusCode() == 403) {
if (ecLogin.login() == StatusCode.NO_ERROR) {
apiRequest(uri, params, true);
}
}
if (response.getStatusLine().getStatusCode() != 200) {
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
final String data = Network.getResponseDataAlways(response);
if (!StringUtils.isBlank(data) && StringUtils.contains(data, "success")) {
if (logType == LogType.FOUND_IT || logType == LogType.ATTENDED) {
ecLogin.setActualCachesFound(ecLogin.getActualCachesFound() + 1);
}
final String uid = StringUtils.remove(data, "success:");
return new LogResult(StatusCode.NO_ERROR, uid);
}
return new LogResult(StatusCode.LOG_POST_ERROR_EC, "");
}
private static HttpResponse apiRequest(final Parameters params) {
return apiRequest("api.php", params);
}
private static HttpResponse apiRequest(final String uri, final Parameters params) {
return apiRequest(uri, params, false);
}
private static HttpResponse apiRequest(final String uri, final Parameters params, final boolean isRetry) {
// add session and cgeo marker on every request
if (!isRetry) {
params.add("cgeo", "1");
params.add("sid", ecLogin.getSessionId());
}
final HttpResponse response = Network.getRequest(API_HOST + uri, params);
if (response == null) {
return null;
}
// retry at most one time
if (!isRetry && response.getStatusLine().getStatusCode() == 403) {
if (ecLogin.login() == StatusCode.NO_ERROR) {
return apiRequest(uri, params, true);
}
}
if (response.getStatusLine().getStatusCode() != 200) {
return null;
}
return response;
}
private static Collection<Geocache> importCachesFromGPXResponse(final HttpResponse response) {
if (response == null) {
return Collections.emptyList();
}
try {
return new GPX10Parser(StoredList.TEMPORARY_LIST.id).parse(response.getEntity().getContent(), null);
} catch (Exception e) {
Log.e("Error importing gpx from extremcaching.com", e);
return Collections.emptyList();
}
}
private static List<Geocache> importCachesFromJSON(final HttpResponse response) {
if (response != null) {
try {
final String data = Network.getResponseDataAlways(response);
if (StringUtils.isBlank(data) || StringUtils.equals(data, "[]")) {
return Collections.emptyList();
}
final ArrayNode json = (ArrayNode) JsonUtils.reader.readTree(data);
final List<Geocache> caches = new LinkedList<>();
for (final JsonNode node: json) {
final Geocache cache = parseCache(node);
if (cache != null) {
caches.add(cache);
}
}
return caches;
} catch (IOException | ClassCastException e) {
Log.w("importCachesFromJSON", e);
}
}
return Collections.emptyList();
}
private static Geocache parseCache(final JsonNode response) {
try {
final Geocache cache = new Geocache();
cache.setReliableLatLon(true);
cache.setGeocode("EC" + response.get("cache_id").asText());
cache.setName(response.get("title").asText());
cache.setCoords(new Geopoint(response.get("lat").asText(), response.get("lon").asText()));
cache.setType(getCacheType(response.get("type").asText()));
cache.setDifficulty((float) response.get("difficulty").asDouble());
cache.setTerrain((float) response.get("terrain").asDouble());
cache.setSize(CacheSize.getById(response.get("size").asText()));
cache.setFound(response.get("found").asInt() == 1);
DataStore.saveCache(cache, EnumSet.of(SaveFlag.CACHE));
return cache;
} catch (final NullPointerException e) {
Log.e("ECApi.parseCache", e);
return null;
}
}
private static CacheType getCacheType(final String cacheType) {
if (cacheType.equalsIgnoreCase("Tradi")) {
return CacheType.TRADITIONAL;
}
if (cacheType.equalsIgnoreCase("Multi")) {
return CacheType.MULTI;
}
if (cacheType.equalsIgnoreCase("Event")) {
return CacheType.EVENT;
}
if (cacheType.equalsIgnoreCase("Mystery")) {
return CacheType.MYSTERY;
}
return CacheType.UNKNOWN;
}
}
|
Use more idiomatic constructs from Jackson library
|
main/src/cgeo/geocaching/connector/ec/ECApi.java
|
Use more idiomatic constructs from Jackson library
|
|
Java
|
apache-2.0
|
1332b0d049f60239cc3001f9bf49b83444ec8cab
| 0
|
naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper,naver/arcus-zookeeper
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.jmx;
import java.util.Enumeration;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Shared utilities
*/
public class ManagedUtil {
private static final Logger LOG = LoggerFactory.getLogger(ManagedUtil.class);
private static boolean isLog4jJmxEnabled() {
boolean enabled = false;
if (Boolean.getBoolean("zookeeper.jmx.log4j.disable")) {
LOG.info("Log4j 1.2 jmx support is disabled by property.");
} else {
try {
Class.forName("org.apache.log4j.jmx.HierarchyDynamicMBean");
enabled = true;
LOG.info("Log4j 1.2 jmx support found and enabled.");
} catch (ClassNotFoundException e) {
LOG.info("Log4j 1.2 jmx support not found; jmx disabled.");
}
}
return enabled;
}
/**
* Register the log4j JMX mbeans. Set system property
* "zookeeper.jmx.log4j.disable" to true to disable registration.
* @see http://logging.apache.org/log4j/1.2/apidocs/index.html?org/apache/log4j/jmx/package-summary.html
* @throws JMException if registration fails
*/
@SuppressWarnings("rawtypes")
public static void registerLog4jMBeans() throws JMException {
if (isLog4jJmxEnabled()) {
LOG.debug("registerLog4jMBeans()");
MBeanServer mbs = MBeanRegistry.getInstance().getPlatformMBeanServer();
try {
// Create and Register the top level Log4J MBean
// org.apache.log4j.jmx.HierarchyDynamicMBean hdm = new org.apache.log4j.jmx.HierarchyDynamicMBean();
Object hdm = Class.forName("org.apache.log4j.jmx.HierarchyDynamicMBean").getConstructor().newInstance();
String mbean = System.getProperty("zookeeper.jmx.log4j.mbean", "log4j:hierarchy=default");
ObjectName mbo = new ObjectName(mbean);
mbs.registerMBean(hdm, mbo);
// Add the root logger to the Hierarchy MBean
// org.apache.log4j.Logger rootLogger =
// org.apache.log4j.Logger.getRootLogger();
Object rootLogger = Class.forName("org.apache.log4j.Logger")
.getMethod("getRootLogger", (Class<?>[]) null)
.invoke(null, (Object[]) null);
// hdm.addLoggerMBean(rootLogger.getName());
Object rootLoggerName = rootLogger.getClass()
.getMethod("getName", (Class<?>[]) null)
.invoke(rootLogger, (Object[]) null);
hdm.getClass().getMethod("addLoggerMBean", String.class)
.invoke(hdm, rootLoggerName);
// Get each logger from the Log4J Repository and add it to the
// Hierarchy MBean created above.
// org.apache.log4j.spi.LoggerRepository r =
// org.apache.log4j.LogManager.getLoggerRepository();
Object r = Class.forName("org.apache.log4j.LogManager")
.getMethod("getLoggerRepository", (Class<?>[]) null)
.invoke(null, (Object[]) null);
// Enumeration enumer = r.getCurrentLoggers();
Enumeration enumer = (Enumeration) r.getClass()
.getMethod("getCurrentLoggers", (Class<?>[]) null)
.invoke(r, (Object[]) null);
while (enumer.hasMoreElements()) {
Object logger = enumer.nextElement();
// hdm.addLoggerMBean(logger.getName());
Object loggerName = logger.getClass()
.getMethod("getName", (Class<?>[]) null)
.invoke(logger, (Object[]) null);
hdm.getClass().getMethod("addLoggerMBean", String.class)
.invoke(hdm, loggerName);
}
} catch (Exception e) {
LOG.error("Problems while registering log4j 1.2 jmx beans!", e);
throw new JMException(e.toString());
}
}
}
}
|
zookeeper-server/src/main/java/org/apache/zookeeper/jmx/ManagedUtil.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.jmx;
import java.util.Enumeration;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Shared utilities
*/
public class ManagedUtil {
private static final Logger LOG = LoggerFactory.getLogger(ManagedUtil.class);
private static boolean isLog4jJmxEnabled() {
boolean enabled = false;
try {
Class.forName("org.apache.log4j.spi.LoggerRepository");
if (Boolean.getBoolean("zookeeper.jmx.log4j.disable")) {
LOG.info("Log4j found but jmx support is disabled.");
} else {
enabled = true;
LOG.info("Log4j found with jmx enabled.");
}
} catch (ClassNotFoundException e) {
LOG.info("Log4j not found.");
}
return enabled;
}
/**
* Register the log4j JMX mbeans. Set environment variable
* "zookeeper.jmx.log4j.disable" to true to disable registration.
* @see http://logging.apache.org/log4j/1.2/apidocs/index.html?org/apache/log4j/jmx/package-summary.html
* @throws JMException if registration fails
*/
@SuppressWarnings("rawtypes")
public static void registerLog4jMBeans() throws JMException {
if (isLog4jJmxEnabled()) {
LOG.debug("registerLog4jMBeans()");
MBeanServer mbs = MBeanRegistry.getInstance().getPlatformMBeanServer();
try {
// Create and Register the top level Log4J MBean
// org.apache.log4j.jmx.HierarchyDynamicMBean hdm = new org.apache.log4j.jmx.HierarchyDynamicMBean();
Object hdm = Class.forName("org.apache.log4j.jmx.HierarchyDynamicMBean").getConstructor().newInstance();
String mbean = System.getProperty("zookeeper.jmx.log4j.mbean", "log4j:hierarchy=default");
ObjectName mbo = new ObjectName(mbean);
mbs.registerMBean(hdm, mbo);
// Add the root logger to the Hierarchy MBean
// org.apache.log4j.Logger rootLogger =
// org.apache.log4j.Logger.getRootLogger();
Object rootLogger = Class.forName("org.apache.log4j.Logger")
.getMethod("getRootLogger", (Class<?>[]) null)
.invoke(null, (Object[]) null);
// hdm.addLoggerMBean(rootLogger.getName());
Object rootLoggerName = rootLogger.getClass()
.getMethod("getName", (Class<?>[]) null)
.invoke(rootLogger, (Object[]) null);
hdm.getClass().getMethod("addLoggerMBean", String.class)
.invoke(hdm, rootLoggerName);
// Get each logger from the Log4J Repository and add it to the
// Hierarchy MBean created above.
// org.apache.log4j.spi.LoggerRepository r =
// org.apache.log4j.LogManager.getLoggerRepository();
Object r = Class.forName("org.apache.log4j.LogManager")
.getMethod("getLoggerRepository", (Class<?>[]) null)
.invoke(null, (Object[]) null);
// Enumeration enumer = r.getCurrentLoggers();
Enumeration enumer = (Enumeration) r.getClass()
.getMethod("getCurrentLoggers", (Class<?>[]) null)
.invoke(r, (Object[]) null);
while (enumer.hasMoreElements()) {
Object logger = enumer.nextElement();
// hdm.addLoggerMBean(logger.getName());
Object loggerName = logger.getClass()
.getMethod("getName", (Class<?>[]) null)
.invoke(logger, (Object[]) null);
hdm.getClass().getMethod("addLoggerMBean", String.class)
.invoke(hdm, loggerName);
}
} catch (Exception e) {
LOG.error("Problems while registering log4j jmx beans!", e);
throw new JMException(e.toString());
}
}
}
}
|
ZOOKEEPER-3737: Detect log4j 1.2 jmx support better
* Look for jmx class that exists only in the log4j 1.2 jar, but not in
the log4j2 1.2 compatibility jar.
* Check if disabled before attempting to detect log4j 1.2 jmx classes.
* Update log messages to highlight that they are referring to log4j 1.2
and not log4j2 or other versions.
* Minor javadoc fixup
Author: Christopher Tubbs <ctubbsii@apache.org>
Reviewers: 5b53a183ea2bd2d082534bd4ca654905d183733b@apache.org
Closes #1270 from ctubbsii/ZK-3737
Change-Id: I7ebd7a28386d3fee33fcc9078d7f573dc766e8ee
(cherry picked from commit 5a2332058deba04313ad37fa05cbb1515c83b8e6)
Signed-off-by: Patrick Hunt <5b53a183ea2bd2d082534bd4ca654905d183733b@apache.org>
|
zookeeper-server/src/main/java/org/apache/zookeeper/jmx/ManagedUtil.java
|
ZOOKEEPER-3737: Detect log4j 1.2 jmx support better
|
|
Java
|
apache-2.0
|
0dcb25716f55cde3566e50583467f2cf5d61a477
| 0
|
apache/felix-dev,apache/felix-dev,apache/felix-dev,apache/felix-dev
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.felix.webconsole.internal.misc;
import java.io.*;
import java.net.URL;
import java.text.*;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.felix.webconsole.*;
import org.apache.felix.webconsole.internal.BaseWebConsolePlugin;
import org.apache.felix.webconsole.internal.Util;
import org.osgi.framework.ServiceReference;
import org.osgi.util.tracker.ServiceTracker;
public class ConfigurationRender extends BaseWebConsolePlugin
{
public static final String LABEL = "config";
public static final String TITLE = "Configuration Status";
private static final String[] CSS_REFS =
{ "res/ui/configurationrender.css" };
/**
* Formatter pattern to generate a relative path for the generation
* of the plain text or zip file representation of the status. The file
* name consists of a base name and the current time of status generation.
*/
private static final SimpleDateFormat FILE_NAME_FORMAT = new SimpleDateFormat( "'" + LABEL
+ "/configuration-status-'yyyyMMdd'-'HHmmZ" );
/**
* Formatter pattern to render the current time of status generation.
*/
private static final DateFormat DISPLAY_DATE_FORMAT = SimpleDateFormat.getDateTimeInstance( SimpleDateFormat.LONG,
SimpleDateFormat.LONG, Locale.US );
private ServiceTracker cfgPrinterTracker;
private int cfgPrinterTrackerCount;
private SortedMap configurationPrinters = new TreeMap();
public String getTitle()
{
return TITLE;
}
public String getLabel()
{
return LABEL;
}
protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException,
IOException
{
if ( request.getPathInfo().endsWith( ".txt" ) )
{
response.setContentType( "text/plain; charset=utf-8" );
ConfigurationWriter pw = new PlainTextConfigurationWriter( response.getWriter() );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_TXT );
pw.flush();
}
else if ( request.getPathInfo().endsWith( ".zip" ) )
{
String type = getServletContext().getMimeType( request.getPathInfo() );
if ( type == null )
{
type = "application/x-zip";
}
response.setContentType( type );
ZipOutputStream zip = new ZipOutputStream( response.getOutputStream() );
zip.setLevel( 9 );
zip.setMethod( ZipOutputStream.DEFLATED );
final ConfigurationWriter pw = new ZipConfigurationWriter( zip );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_ZIP );
pw.flush();
addAttachments( pw, ConfigurationPrinter.MODE_ZIP );
zip.finish();
}
else
{
super.doGet( request, response );
}
}
protected String[] getCssReferences()
{
return CSS_REFS;
}
protected void renderContent( HttpServletRequest request, HttpServletResponse response ) throws IOException
{
ConfigurationWriter pw = new HtmlConfigurationWriter( response.getWriter() );
String appRoot = ( String ) request.getAttribute( WebConsoleConstants.ATTR_APP_ROOT );
Util.script( pw, appRoot, "tw-1.1.js" );
Util.startScript( pw );
pw.println( " $(document).ready(function(){" );
// set up the tabs (still hidden)
pw.println( " $('#cfgprttabs').tabworld({speed:0});" );
// show the finished tabs
pw.println( " $('#divcfgprttabs').removeClass('divcfgprttabshidden');" );
// hide the "please wait" message
pw.println( " $('#divcfgprttabswait').addClass('divcfgprttabshidden');" );
pw.println( " });" );
Util.endScript( pw );
final Date currentTime = new Date();
synchronized ( DISPLAY_DATE_FORMAT )
{
pw.println( "<p>Date: " + DISPLAY_DATE_FORMAT.format( currentTime ) + "</p>" );
}
synchronized ( FILE_NAME_FORMAT )
{
String fileName = FILE_NAME_FORMAT.format( currentTime );
pw.println( "<p>Download as <a href='" + fileName + ".txt'>[Single File]</a> or as <a href='" + fileName
+ ".zip'>[ZIP]</a></p>" );
}
// display some information while the data is loading
pw.println( "<div id='divcfgprttabswait'>Loading status information. Please wait....</div>" );
// load the data (hidden to begin with)
pw.println( "<div id='divcfgprttabs' class='divcfgprttabshidden'>" );
pw.println( "<ul id='cfgprttabs'>" );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_WEB );
pw.println( "</ul>" );
pw.println( "</div>" );
pw.flush();
}
private void printConfigurationStatus( ConfigurationWriter pw, final String mode )
{
this.printSystemProperties( pw );
this.printThreads( pw );
for ( Iterator cpi = getConfigurationPrinters().iterator(); cpi.hasNext(); )
{
final PrinterDesc desc = (PrinterDesc) cpi.next();
if ( desc.match(mode) )
{
printConfigurationPrinter( pw, desc.printer, mode );
}
}
}
private Collection getConfigurationPrinters()
{
if ( cfgPrinterTracker == null )
{
cfgPrinterTracker = new ServiceTracker( getBundleContext(), ConfigurationPrinter.SERVICE, null );
cfgPrinterTracker.open();
cfgPrinterTrackerCount = -1;
}
if ( cfgPrinterTrackerCount != cfgPrinterTracker.getTrackingCount() )
{
SortedMap cp = new TreeMap();
ServiceReference[] refs = cfgPrinterTracker.getServiceReferences();
if ( refs != null )
{
for ( int i = 0; i < refs.length; i++ )
{
ConfigurationPrinter cfgPrinter = ( ConfigurationPrinter ) cfgPrinterTracker.getService(refs[i]);
if ( cfgPrinter != null )
{
cp.put( cfgPrinter.getTitle(), new PrinterDesc(cfgPrinter, refs[i].getProperty(ConfigurationPrinter.PROPERTY_MODES)) );
}
}
}
configurationPrinters = cp;
cfgPrinterTrackerCount = cfgPrinterTracker.getTrackingCount();
}
return configurationPrinters.values();
}
private void printSystemProperties( ConfigurationWriter pw )
{
pw.title( "System properties" );
Properties props = System.getProperties();
SortedSet keys = new TreeSet( props.keySet() );
for ( Iterator ki = keys.iterator(); ki.hasNext(); )
{
Object key = ki.next();
infoLine( pw, null, ( String ) key, props.get( key ) );
}
pw.end();
}
// This is Sling stuff, we comment it out for now
// private void printRawFrameworkProperties(PrintWriter pw) {
// pw.println("*** Raw Framework properties:");
//
// File file = new File(getBundleContext().getProperty("sling.home"),
// "sling.properties");
// if (file.exists()) {
// Properties props = new Properties();
// InputStream ins = null;
// try {
// ins = new FileInputStream(file);
// props.load(ins);
// } catch (IOException ioe) {
// // handle or ignore
// } finally {
// IOUtils.closeQuietly(ins);
// }
//
// SortedSet keys = new TreeSet(props.keySet());
// for (Iterator ki = keys.iterator(); ki.hasNext();) {
// Object key = ki.next();
// infoLine(pw, null, (String) key, props.get(key));
// }
//
// } else {
// pw.println(" No Framework properties in " + file);
// }
//
// pw.println();
// }
private void printConfigurationPrinter( final ConfigurationWriter pw,
final ConfigurationPrinter cp,
final String mode )
{
pw.title( cp.getTitle() );
if ( cp instanceof ModeAwareConfigurationPrinter )
{
((ModeAwareConfigurationPrinter)cp).printConfiguration( pw , mode);
}
else
{
cp.printConfiguration( pw );
}
pw.end();
}
public static void infoLine( PrintWriter pw, String indent, String label, Object value )
{
if ( indent != null )
{
pw.print( indent );
}
if ( label != null )
{
pw.print( label );
pw.print( " = " );
}
pw.print( asString( value ) );
pw.println();
}
private static String asString( final Object value )
{
if ( value == null )
{
return "n/a";
}
else if ( value.getClass().isArray() )
{
StringBuffer dest = new StringBuffer();
Object[] values = ( Object[] ) value;
for ( int j = 0; j < values.length; j++ )
{
if ( j > 0 )
dest.append( ", " );
dest.append( values[j] );
}
return dest.toString();
}
else
{
return value.toString();
}
}
private void printThreads( ConfigurationWriter pw )
{
// first get the root thread group
ThreadGroup rootGroup = Thread.currentThread().getThreadGroup();
while ( rootGroup.getParent() != null )
{
rootGroup = rootGroup.getParent();
}
pw.title( "Threads" );
printThreadGroup( pw, rootGroup );
int numGroups = rootGroup.activeGroupCount();
ThreadGroup[] groups = new ThreadGroup[2 * numGroups];
rootGroup.enumerate( groups );
for ( int i = 0; i < groups.length; i++ )
{
printThreadGroup( pw, groups[i] );
}
pw.end();
}
private void printThreadGroup( PrintWriter pw, ThreadGroup group )
{
if ( group != null )
{
StringBuffer info = new StringBuffer();
info.append("ThreadGroup ").append(group.getName());
info.append( " [" );
info.append( "maxprio=" ).append( group.getMaxPriority() );
info.append( ", parent=" );
if ( group.getParent() != null )
{
info.append( group.getParent().getName() );
}
else
{
info.append( '-' );
}
info.append( ", isDaemon=" ).append( group.isDaemon() );
info.append( ", isDestroyed=" ).append( group.isDestroyed() );
info.append( ']' );
infoLine( pw, null, null, info.toString() );
int numThreads = group.activeCount();
Thread[] threads = new Thread[numThreads * 2];
group.enumerate( threads, false );
for ( int i = 0; i < threads.length; i++ )
{
printThread( pw, threads[i] );
}
pw.println();
}
}
private void printThread( PrintWriter pw, Thread thread )
{
if ( thread != null )
{
StringBuffer info = new StringBuffer();
info.append("Thread ").append( thread.getName() );
info.append( " [" );
info.append( "priority=" ).append( thread.getPriority() );
info.append( ", alive=" ).append( thread.isAlive() );
info.append( ", daemon=" ).append( thread.isDaemon() );
info.append( ", interrupted=" ).append( thread.isInterrupted() );
info.append( ", loader=" ).append( thread.getContextClassLoader() );
info.append( ']' );
infoLine( pw, " ", null, info.toString() );
}
}
private abstract static class ConfigurationWriter extends PrintWriter
{
ConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
abstract void title( String title );
abstract void end();
public void handleAttachments(final String title, final URL[] urls)
throws IOException
{
throw new UnsupportedOperationException("handleAttachments not supported by this configuration writer: " + this);
}
}
private static class HtmlConfigurationWriter extends ConfigurationWriter
{
// whether or not to filter "<" signs in the output
private boolean doFilter;
HtmlConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
public void title( String title )
{
println( "<li>" );
println( title );
println( "<q>" );
doFilter = true;
}
public void end()
{
doFilter = false;
println( "</q>" );
println( "</li>" );
}
// IE has an issue with white-space:pre in our case so, we write
// <br/> instead of [CR]LF to get the line break. This also works
// in other browsers.
public void println()
{
if ( doFilter )
{
super.write( "<br/>", 0, 5 );
}
else
{
super.println();
}
}
// write the character unmodified unless filtering is enabled and
// the character is a "<" in which case < is written
public void write( final int character )
{
if ( doFilter && character == '<' )
{
super.write( "<" );
}
else
{
super.write( character );
}
}
// write the characters unmodified unless filtering is enabled in
// which case the writeFiltered(String) method is called for filtering
public void write( final char[] chars, final int off, final int len )
{
if ( doFilter )
{
writeFiltered( new String( chars, off, len ) );
}
else
{
super.write( chars, off, len );
}
}
// write the string unmodified unless filtering is enabled in
// which case the writeFiltered(String) method is called for filtering
public void write( final String string, final int off, final int len )
{
if ( doFilter )
{
writeFiltered( string.substring( off, len ) );
}
else
{
super.write( string, off, len );
}
}
// helper method filter the string for "<" before writing
private void writeFiltered( final String string )
{
if ( string.indexOf( '<' ) >= 0 )
{
// TODO: replace with WebConsoleUtil.escapeHtml()
// this "convoluted" code replaces "<" by "<"
final StringTokenizer tokener = new StringTokenizer( string, "<", true );
while ( tokener.hasMoreElements() )
{
final String token = tokener.nextToken();
if ( "<".equals( token ) )
{
super.write( "<" );
}
else
{
super.write( token );
}
}
}
else
{
// no filtering needed write as is
super.write( string, 0, string.length() );
}
}
}
private void addAttachments( final ConfigurationWriter cf, final String mode )
throws IOException
{
for ( Iterator cpi = getConfigurationPrinters().iterator(); cpi.hasNext(); )
{
// check if printer supports zip mode
final PrinterDesc desc = (PrinterDesc) cpi.next();
if ( desc.match(mode) )
{
// check if printer implements binary configuration printer
if ( desc.printer instanceof AttachmentProvider )
{
final URL[] attachments = ((AttachmentProvider)desc.printer).getAttachments(mode);
if ( attachments != null )
{
cf.handleAttachments(desc.printer.getTitle(), attachments);
}
}
}
}
}
private static final class PrinterDesc
{
private final String[] modes;
public final ConfigurationPrinter printer;
private static final List CUSTOM_MODES = new ArrayList();
static
{
CUSTOM_MODES.add(ConfigurationPrinter.MODE_TXT);
CUSTOM_MODES.add(ConfigurationPrinter.MODE_WEB);
CUSTOM_MODES.add(ConfigurationPrinter.MODE_ZIP);
}
public PrinterDesc(final ConfigurationPrinter printer, final Object modes)
{
this.printer = printer;
if ( modes == null || !(modes instanceof String || modes instanceof String[]) )
{
this.modes = null;
}
else
{
if ( modes instanceof String )
{
if ( CUSTOM_MODES.contains(modes) )
{
this.modes = new String[] {modes.toString()};
}
else
{
this.modes = null;
}
}
else
{
final String[] values = (String[])modes;
boolean valid = values.length > 0;
for(int i=0; i<values.length; i++)
{
if ( !CUSTOM_MODES.contains(values[i]) )
{
valid = false;
break;
}
}
if ( valid)
{
this.modes = values;
}
else
{
this.modes = null;
}
}
}
}
public boolean match(final String mode)
{
if ( this.modes == null)
{
return true;
}
for(int i=0; i<this.modes.length; i++)
{
if ( this.modes[i].equals(mode) )
{
return true;
}
}
return false;
}
}
private static class PlainTextConfigurationWriter extends ConfigurationWriter
{
PlainTextConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
public void title( String title )
{
print( "*** " );
print( title );
println( ":" );
}
public void end()
{
println();
}
}
private static class ZipConfigurationWriter extends ConfigurationWriter
{
private final ZipOutputStream zip;
private int counter;
ZipConfigurationWriter( ZipOutputStream zip )
{
super( new OutputStreamWriter( zip ) );
this.zip = zip;
}
public void title( String title )
{
String name = MessageFormat.format( "{0,number,000}-{1}.txt", new Object[]
{ new Integer( counter ), title } );
counter++;
ZipEntry entry = new ZipEntry( name );
try
{
zip.putNextEntry( entry );
}
catch ( IOException ioe )
{
// should handle
}
}
private OutputStream startFile( String title, String name)
{
final String path = MessageFormat.format( "{0,number,000}-{1}/{2}", new Object[]
{ new Integer( counter ), title, name } );
ZipEntry entry = new ZipEntry( path );
try
{
zip.putNextEntry( entry );
}
catch ( IOException ioe )
{
// should handle
}
return zip;
}
public void handleAttachments( final String title, final URL[] attachments)
throws IOException
{
for(int i = 0; i < attachments.length; i++)
{
final URL current = attachments[i];
final String path = current.getPath();
final String name;
if ( path == null || path.length() == 0 )
{
// sanity code, we should have a path, but if not let's just create
// some random name
name = UUID.randomUUID().toString();
}
else
{
final int pos = path.lastIndexOf('/');
name = (pos == -1 ? path : path.substring(pos + 1));
}
final OutputStream os = this.startFile(title, name);
final InputStream is = current.openStream();
try
{
IOUtils.copy(is, os);
}
finally
{
IOUtils.closeQuietly(is);
}
this.end();
}
}
public void end()
{
flush();
try
{
zip.closeEntry();
}
catch ( IOException ioe )
{
// should handle
}
}
}
}
|
webconsole/src/main/java/org/apache/felix/webconsole/internal/misc/ConfigurationRender.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.felix.webconsole.internal.misc;
import java.io.*;
import java.net.URL;
import java.text.*;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.felix.webconsole.*;
import org.apache.felix.webconsole.internal.BaseWebConsolePlugin;
import org.apache.felix.webconsole.internal.Util;
import org.osgi.framework.ServiceReference;
import org.osgi.util.tracker.ServiceTracker;
public class ConfigurationRender extends BaseWebConsolePlugin
{
public static final String LABEL = "config";
public static final String TITLE = "Configuration Status";
private static final String[] CSS_REFS =
{ "res/ui/configurationrender.css" };
/**
* Formatter pattern to generate a relative path for the generation
* of the plain text or zip file representation of the status. The file
* name consists of a base name and the current time of status generation.
*/
private static final SimpleDateFormat FILE_NAME_FORMAT = new SimpleDateFormat( "'" + LABEL
+ "/configuration-status-'yyyyMMdd'-'HHmmZ" );
/**
* Formatter pattern to render the current time of status generation.
*/
private static final DateFormat DISPLAY_DATE_FORMAT = SimpleDateFormat.getDateTimeInstance( SimpleDateFormat.LONG,
SimpleDateFormat.LONG, Locale.US );
private ServiceTracker cfgPrinterTracker;
private int cfgPrinterTrackerCount;
private SortedMap configurationPrinters = new TreeMap();
public String getTitle()
{
return TITLE;
}
public String getLabel()
{
return LABEL;
}
protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException,
IOException
{
if ( request.getPathInfo().endsWith( ".txt" ) )
{
response.setContentType( "text/plain; charset=utf-8" );
ConfigurationWriter pw = new PlainTextConfigurationWriter( response.getWriter() );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_TXT );
pw.flush();
}
else if ( request.getPathInfo().endsWith( ".zip" ) )
{
String type = getServletContext().getMimeType( request.getPathInfo() );
if ( type == null )
{
type = "application/x-zip";
}
response.setContentType( type );
ZipOutputStream zip = new ZipOutputStream( response.getOutputStream() );
zip.setLevel( 9 );
zip.setMethod( ZipOutputStream.DEFLATED );
final ZipConfigurationWriter pw = new ZipConfigurationWriter( zip );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_ZIP );
pw.flush();
addBinaries( pw );
zip.finish();
}
else
{
super.doGet( request, response );
}
}
protected String[] getCssReferences()
{
return CSS_REFS;
}
protected void renderContent( HttpServletRequest request, HttpServletResponse response ) throws IOException
{
ConfigurationWriter pw = new HtmlConfigurationWriter( response.getWriter() );
String appRoot = ( String ) request.getAttribute( WebConsoleConstants.ATTR_APP_ROOT );
Util.script( pw, appRoot, "tw-1.1.js" );
Util.startScript( pw );
pw.println( " $(document).ready(function(){" );
// set up the tabs (still hidden)
pw.println( " $('#cfgprttabs').tabworld({speed:0});" );
// show the finished tabs
pw.println( " $('#divcfgprttabs').removeClass('divcfgprttabshidden');" );
// hide the "please wait" message
pw.println( " $('#divcfgprttabswait').addClass('divcfgprttabshidden');" );
pw.println( " });" );
Util.endScript( pw );
final Date currentTime = new Date();
synchronized ( DISPLAY_DATE_FORMAT )
{
pw.println( "<p>Date: " + DISPLAY_DATE_FORMAT.format( currentTime ) + "</p>" );
}
synchronized ( FILE_NAME_FORMAT )
{
String fileName = FILE_NAME_FORMAT.format( currentTime );
pw.println( "<p>Download as <a href='" + fileName + ".txt'>[Single File]</a> or as <a href='" + fileName
+ ".zip'>[ZIP]</a></p>" );
}
// display some information while the data is loading
pw.println( "<div id='divcfgprttabswait'>Loading status information. Please wait....</div>" );
// load the data (hidden to begin with)
pw.println( "<div id='divcfgprttabs' class='divcfgprttabshidden'>" );
pw.println( "<ul id='cfgprttabs'>" );
printConfigurationStatus( pw, ConfigurationPrinter.MODE_WEB );
pw.println( "</ul>" );
pw.println( "</div>" );
pw.flush();
}
private void printConfigurationStatus( ConfigurationWriter pw, final String mode )
{
this.printSystemProperties( pw );
this.printThreads( pw );
for ( Iterator cpi = getConfigurationPrinters().iterator(); cpi.hasNext(); )
{
final PrinterDesc desc = (PrinterDesc) cpi.next();
if ( desc.match(mode) )
{
printConfigurationPrinter( pw, desc.printer, mode );
}
}
}
private Collection getConfigurationPrinters()
{
if ( cfgPrinterTracker == null )
{
cfgPrinterTracker = new ServiceTracker( getBundleContext(), ConfigurationPrinter.SERVICE, null );
cfgPrinterTracker.open();
cfgPrinterTrackerCount = -1;
}
if ( cfgPrinterTrackerCount != cfgPrinterTracker.getTrackingCount() )
{
SortedMap cp = new TreeMap();
ServiceReference[] refs = cfgPrinterTracker.getServiceReferences();
if ( refs != null )
{
for ( int i = 0; i < refs.length; i++ )
{
ConfigurationPrinter cfgPrinter = ( ConfigurationPrinter ) cfgPrinterTracker.getService(refs[i]);
if ( cfgPrinter != null )
{
cp.put( cfgPrinter.getTitle(), new PrinterDesc(cfgPrinter, refs[i].getProperty(ConfigurationPrinter.PROPERTY_MODES)) );
}
}
}
configurationPrinters = cp;
cfgPrinterTrackerCount = cfgPrinterTracker.getTrackingCount();
}
return configurationPrinters.values();
}
private void printSystemProperties( ConfigurationWriter pw )
{
pw.title( "System properties" );
Properties props = System.getProperties();
SortedSet keys = new TreeSet( props.keySet() );
for ( Iterator ki = keys.iterator(); ki.hasNext(); )
{
Object key = ki.next();
infoLine( pw, null, ( String ) key, props.get( key ) );
}
pw.end();
}
// This is Sling stuff, we comment it out for now
// private void printRawFrameworkProperties(PrintWriter pw) {
// pw.println("*** Raw Framework properties:");
//
// File file = new File(getBundleContext().getProperty("sling.home"),
// "sling.properties");
// if (file.exists()) {
// Properties props = new Properties();
// InputStream ins = null;
// try {
// ins = new FileInputStream(file);
// props.load(ins);
// } catch (IOException ioe) {
// // handle or ignore
// } finally {
// IOUtils.closeQuietly(ins);
// }
//
// SortedSet keys = new TreeSet(props.keySet());
// for (Iterator ki = keys.iterator(); ki.hasNext();) {
// Object key = ki.next();
// infoLine(pw, null, (String) key, props.get(key));
// }
//
// } else {
// pw.println(" No Framework properties in " + file);
// }
//
// pw.println();
// }
private void printConfigurationPrinter( final ConfigurationWriter pw,
final ConfigurationPrinter cp,
final String mode )
{
pw.title( cp.getTitle() );
if ( cp instanceof ModeAwareConfigurationPrinter )
{
((ModeAwareConfigurationPrinter)cp).printConfiguration( pw , mode);
}
else
{
cp.printConfiguration( pw );
}
pw.end();
}
public static void infoLine( PrintWriter pw, String indent, String label, Object value )
{
if ( indent != null )
{
pw.print( indent );
}
if ( label != null )
{
pw.print( label );
pw.print( " = " );
}
pw.print( asString( value ) );
pw.println();
}
private static String asString( final Object value )
{
if ( value == null )
{
return "n/a";
}
else if ( value.getClass().isArray() )
{
StringBuffer dest = new StringBuffer();
Object[] values = ( Object[] ) value;
for ( int j = 0; j < values.length; j++ )
{
if ( j > 0 )
dest.append( ", " );
dest.append( values[j] );
}
return dest.toString();
}
else
{
return value.toString();
}
}
private void printThreads( ConfigurationWriter pw )
{
// first get the root thread group
ThreadGroup rootGroup = Thread.currentThread().getThreadGroup();
while ( rootGroup.getParent() != null )
{
rootGroup = rootGroup.getParent();
}
pw.title( "Threads" );
printThreadGroup( pw, rootGroup );
int numGroups = rootGroup.activeGroupCount();
ThreadGroup[] groups = new ThreadGroup[2 * numGroups];
rootGroup.enumerate( groups );
for ( int i = 0; i < groups.length; i++ )
{
printThreadGroup( pw, groups[i] );
}
pw.end();
}
private void printThreadGroup( PrintWriter pw, ThreadGroup group )
{
if ( group != null )
{
StringBuffer info = new StringBuffer();
info.append("ThreadGroup ").append(group.getName());
info.append( " [" );
info.append( "maxprio=" ).append( group.getMaxPriority() );
info.append( ", parent=" );
if ( group.getParent() != null )
{
info.append( group.getParent().getName() );
}
else
{
info.append( '-' );
}
info.append( ", isDaemon=" ).append( group.isDaemon() );
info.append( ", isDestroyed=" ).append( group.isDestroyed() );
info.append( ']' );
infoLine( pw, null, null, info.toString() );
int numThreads = group.activeCount();
Thread[] threads = new Thread[numThreads * 2];
group.enumerate( threads, false );
for ( int i = 0; i < threads.length; i++ )
{
printThread( pw, threads[i] );
}
pw.println();
}
}
private void printThread( PrintWriter pw, Thread thread )
{
if ( thread != null )
{
StringBuffer info = new StringBuffer();
info.append("Thread ").append( thread.getName() );
info.append( " [" );
info.append( "priority=" ).append( thread.getPriority() );
info.append( ", alive=" ).append( thread.isAlive() );
info.append( ", daemon=" ).append( thread.isDaemon() );
info.append( ", interrupted=" ).append( thread.isInterrupted() );
info.append( ", loader=" ).append( thread.getContextClassLoader() );
info.append( ']' );
infoLine( pw, " ", null, info.toString() );
}
}
private abstract static class ConfigurationWriter extends PrintWriter
{
ConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
abstract void title( String title );
abstract void end();
}
private static class HtmlConfigurationWriter extends ConfigurationWriter
{
// whether or not to filter "<" signs in the output
private boolean doFilter;
HtmlConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
public void title( String title )
{
println( "<li>" );
println( title );
println( "<q>" );
doFilter = true;
}
public void end()
{
doFilter = false;
println( "</q>" );
println( "</li>" );
}
// IE has an issue with white-space:pre in our case so, we write
// <br/> instead of [CR]LF to get the line break. This also works
// in other browsers.
public void println()
{
if ( doFilter )
{
super.write( "<br/>", 0, 5 );
}
else
{
super.println();
}
}
// write the character unmodified unless filtering is enabled and
// the character is a "<" in which case < is written
public void write( final int character )
{
if ( doFilter && character == '<' )
{
super.write( "<" );
}
else
{
super.write( character );
}
}
// write the characters unmodified unless filtering is enabled in
// which case the writeFiltered(String) method is called for filtering
public void write( final char[] chars, final int off, final int len )
{
if ( doFilter )
{
writeFiltered( new String( chars, off, len ) );
}
else
{
super.write( chars, off, len );
}
}
// write the string unmodified unless filtering is enabled in
// which case the writeFiltered(String) method is called for filtering
public void write( final String string, final int off, final int len )
{
if ( doFilter )
{
writeFiltered( string.substring( off, len ) );
}
else
{
super.write( string, off, len );
}
}
// helper method filter the string for "<" before writing
private void writeFiltered( final String string )
{
if ( string.indexOf( '<' ) >= 0 )
{
// TODO: replace with WebConsoleUtil.escapeHtml()
// this "convoluted" code replaces "<" by "<"
final StringTokenizer tokener = new StringTokenizer( string, "<", true );
while ( tokener.hasMoreElements() )
{
final String token = tokener.nextToken();
if ( "<".equals( token ) )
{
super.write( "<" );
}
else
{
super.write( token );
}
}
}
else
{
// no filtering needed write as is
super.write( string, 0, string.length() );
}
}
}
private void addBinaries( final ZipConfigurationWriter cf )
throws IOException
{
final String mode = ConfigurationPrinter.MODE_ZIP;
for ( Iterator cpi = getConfigurationPrinters().iterator(); cpi.hasNext(); )
{
// check if printer supports zip mode
final PrinterDesc desc = (PrinterDesc) cpi.next();
if ( desc.match(mode) )
{
// check if printer implements binary configuration printer
if ( desc.printer instanceof AttachmentProvider )
{
final URL[] attachments = ((AttachmentProvider)desc.printer).getAttachments(mode);
if ( attachments != null )
{
for(int i = 0; i < attachments.length; i++)
{
final URL current = attachments[i];
final String path = current.getPath();
final String name;
if ( path == null || path.length() == 0 )
{
// sanity code, we should have a path, but if not let's just create
// some random name
name = UUID.randomUUID().toString();
}
else
{
final int pos = path.lastIndexOf('/');
name = (pos == -1 ? path : path.substring(pos + 1));
}
final OutputStream os = cf.startFile(desc.printer.getTitle(), name);
final InputStream is = current.openStream();
try
{
IOUtils.copy(is, os);
}
finally
{
IOUtils.closeQuietly(is);
}
cf.end();
}
}
}
}
}
}
private static final class PrinterDesc
{
private final String[] modes;
public final ConfigurationPrinter printer;
private static final List CUSTOM_MODES = new ArrayList();
static
{
CUSTOM_MODES.add(ConfigurationPrinter.MODE_TXT);
CUSTOM_MODES.add(ConfigurationPrinter.MODE_WEB);
CUSTOM_MODES.add(ConfigurationPrinter.MODE_ZIP);
}
public PrinterDesc(final ConfigurationPrinter printer, final Object modes)
{
this.printer = printer;
if ( modes == null || !(modes instanceof String || modes instanceof String[]) )
{
this.modes = null;
}
else
{
if ( modes instanceof String )
{
if ( CUSTOM_MODES.contains(modes) )
{
this.modes = new String[] {modes.toString()};
}
else
{
this.modes = null;
}
}
else
{
final String[] values = (String[])modes;
boolean valid = values.length > 0;
for(int i=0; i<values.length; i++)
{
if ( !CUSTOM_MODES.contains(values[i]) )
{
valid = false;
break;
}
}
if ( valid)
{
this.modes = values;
}
else
{
this.modes = null;
}
}
}
}
public boolean match(final String mode)
{
if ( this.modes == null)
{
return true;
}
for(int i=0; i<this.modes.length; i++)
{
if ( this.modes[i].equals(mode) )
{
return true;
}
}
return false;
}
}
private static class PlainTextConfigurationWriter extends ConfigurationWriter
{
PlainTextConfigurationWriter( Writer delegatee )
{
super( delegatee );
}
public void title( String title )
{
print( "*** " );
print( title );
println( ":" );
}
public void end()
{
println();
}
}
private static class ZipConfigurationWriter extends ConfigurationWriter
{
private final ZipOutputStream zip;
private int counter;
ZipConfigurationWriter( ZipOutputStream zip )
{
super( new OutputStreamWriter( zip ) );
this.zip = zip;
}
public void title( String title )
{
String name = MessageFormat.format( "{0,number,000}-{1}.txt", new Object[]
{ new Integer( counter ), title } );
counter++;
ZipEntry entry = new ZipEntry( name );
try
{
zip.putNextEntry( entry );
}
catch ( IOException ioe )
{
// should handle
}
}
public OutputStream startFile( String title, String name)
{
final String path = MessageFormat.format( "{0,number,000}-{1}/{2}", new Object[]
{ new Integer( counter ), title, name } );
ZipEntry entry = new ZipEntry( path );
try
{
zip.putNextEntry( entry );
}
catch ( IOException ioe )
{
// should handle
}
return zip;
}
public void end()
{
flush();
try
{
zip.closeEntry();
}
catch ( IOException ioe )
{
// should handle
}
}
}
}
|
FELIX-1993 : Enhance configuration printer support
Move attachment handling into the writer
git-svn-id: e057f57e93a604d3b43d277ae69bde5ebf332112@904008 13f79535-47bb-0310-9956-ffa450edef68
|
webconsole/src/main/java/org/apache/felix/webconsole/internal/misc/ConfigurationRender.java
|
FELIX-1993 : Enhance configuration printer support Move attachment handling into the writer
|
|
Java
|
bsd-2-clause
|
e4cd1fbaf9678b2d5b5e276a4f831a85e1d4e4e1
| 0
|
eddysystems/eddy,eddysystems/eddy
|
package com.eddysystems.eddy.engine;
import com.eddysystems.eddy.EddyPlugin;
import com.eddysystems.eddy.LightDocument;
import com.eddysystems.eddy.PreferenceData;
import com.eddysystems.eddy.Preferences;
import com.intellij.codeInsight.daemon.impl.ShowIntentionsPass;
import com.intellij.codeInsight.intention.impl.IntentionHintComponent;
import com.intellij.formatting.*;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LanguageFormatting;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.java.parser.JavaParser;
import com.intellij.lang.java.parser.JavaParserUtil;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.RuntimeInterruptedException;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.DummyHolder;
import com.intellij.psi.impl.source.JavaDummyElement;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.codeStyle.CodeFormatterFacade;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.impl.source.tree.RecursiveTreeElementVisitor;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import scala.Function2;
import scala.Unit$;
import scala.runtime.AbstractFunction1;
import scala.runtime.AbstractFunction2;
import scala.runtime.BoxedUnit;
import scala.util.Try;
import tarski.Environment.Env;
import tarski.Memory;
import tarski.Scores.Alt;
import tarski.Tarski;
import tarski.Tarski.ShowStmts;
import tarski.Tokens.*;
import utility.Locations.Loc;
import utility.Utility.Unchecked;
import java.util.ArrayList;
import java.util.List;
import static com.eddysystems.eddy.engine.Utility.*;
import static tarski.Tokens.*;
import static utility.Utility.unchecked;
public class Eddy {
final private Project project;
final private Memory.Info base;
final private Editor editor;
final private Document document;
public Editor getEditor() { return editor; }
public PsiFile getFile() {
final PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(document);
assert file != null;
return file;
}
public static class Input {
final TextRange range;
final List<Loc<Token>> input;
final PsiElement place;
final String before_text;
Input(final TextRange range, final List<Loc<Token>> input, final PsiElement place, final String before_text) {
this.range = range;
this.input = input;
this.place = place;
this.before_text = before_text;
}
public String getText() {
return before_text;
}
}
// The results of the interpretation
public static class Output {
final private Eddy eddy;
final public Input input;
final public List<Alt<ShowStmts>> results;
// Mutable field: which output we've selected. If we haven't explicitly selected something, offset < 0.
private int selected = -1;
Output(final Eddy eddy, final Input input, final List<Alt<ShowStmts>> results) {
this.eddy = eddy;
this.input = input;
this.results = results;
}
static String format(final ShowStmts ss, final ShowFlags f) {
return f.den() ? ss.den() : f.abbreviate() ? ss.abbrev() : ss.full();
}
public String format(final int i, final ShowFlags f) {
return format(results.get(i).x(),f);
}
public List<String> formats(final ShowFlags f, final boolean probs) {
final List<String> fs = new ArrayList<String>(results.size());
for (final Alt<ShowStmts> a : results)
fs.add(format(a.x(),f));
if (probs) {
for (int i = 0; i < fs.size(); ++i) {
fs.set(i, String.format("%f: %s", results.get(i).p(), fs.get(i)));
}
}
return fs;
}
public String[] getResultSummary() {
return formats(new ShowFlags(false,true,true), true).toArray(new String[results.size()]);
}
public boolean foundSomething() {
return !results.isEmpty();
}
// Did we find useful meanings, and are those meanings different from what's already there?
public boolean shouldShowHint() {
for (final Alt<ShowStmts> r : results)
if (r.x().similar(input.input))
return false; // We found what's already there
return !results.isEmpty();
}
// Is there only one realistic option (or did the user explicitly select one)?
public boolean single() {
return results.size() == 1 || selected >= 0;
}
public boolean nextBestResult() {
if (shouldShowHint() && results.size()>1) {
selected = (Math.max(0,selected)+1)%results.size();
return true;
}
return false;
}
public boolean prevBestResult() {
if (shouldShowHint() && results.size()>1) {
selected = (Math.max(0,selected)+results.size()-1)%results.size();
return true;
}
return false;
}
public String bestTextAbbrev() {
assert shouldShowHint();
return format(Math.max(0,selected),abbrevShowFlags());
}
public void applySelected() {
apply(Math.max(0,selected));
}
public int autoApply() {
// Automatically apply the best found result
return rawApply(eddy.document, format(0, abbrevShowFlags()));
}
public boolean shouldAutoApply() {
// check if we're confident enough to apply the best found result automatically
PreferenceData data = Preferences.getData();
double t = data.getNumericAutoApplyThreshold();
double f = data.getNumericAutoApplyFactor();
if (results.size() >= 1 && results.get(0).p() >= t) {
if (results.size() == 1)
return true;
else
return results.get(0).p()/results.get(1).p() > f;
}
return false;
}
public int rawApply(final @NotNull Document document, final @NotNull String code) {
final int offsetDiff = code.length() - input.range.getLength();
document.replaceString(input.range.getStartOffset(), input.range.getEndOffset(), code);
// reindent
CodeStyleManager csm = CodeStyleManager.getInstance(eddy.project);
final int sline = document.getLineNumber(input.range.getStartOffset());
final int fline = document.getLineNumber(input.range.getEndOffset() + offsetDiff);
for (int i = sline; i <= fline; ++i) {
csm.adjustLineIndent(document, document.getLineStartOffset(i));
}
Memory.log(Memory.eddyAutoApply(eddy.base, Memory.now(), input.input, results, code));
return offsetDiff;
}
public void apply(final int index) {
final String full = format(results.get(index).x(),fullShowFlags());
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
final Editor editor = eddy.editor;
new WriteCommandAction(eddy.project, eddy.getFile()) {
@Override
public void run(@NotNull Result result) {
final int newOffset = input.range.getEndOffset() + rawApply(eddy.document,full);
editor.getCaretModel().moveToOffset(newOffset);
PsiDocumentManager.getInstance(eddy.project).commitDocument(eddy.document);
}
}.execute();
}
});
Memory.log(Memory.eddyApply(eddy.base,Memory.now(),input.input,results,index));
}
public void logSuggestion(final @NotNull String suggestion) {
Memory.log(Memory.eddySuggestion(eddy.base, Memory.now(), input.input, results, suggestion)).onComplete(new AbstractFunction1<Try<BoxedUnit>, Void>() {
@Override
public Void apply(Try<BoxedUnit> v) {
final String title, msg;
if (v.isSuccess()) {
title = "Suggestion processed";
msg = "Thank you! Your suggestion will help improve eddy!";
} else {
title = "Suggestion failed to send";
msg = "I'm sorry, your suggestion could not be recorded. Our servers could not be reached.";
}
Notifications.Bus.notify(new Notification("Eddy", title, msg, NotificationType.INFORMATION), eddy.project);
return null;
}
}, scala.concurrent.ExecutionContext.Implicits$.MODULE$.global());
}
}
public static interface Take {
// return true if we're done absorbing output, false if more is desired
public boolean take(Output output);
}
public Eddy(@NotNull final Project project, final Editor editor) {
this.project = project;
this.editor = editor;
this.document = editor.getDocument();
this.base = Memory.basics(EddyPlugin.installKey(), EddyPlugin.getVersion() + " - " + EddyPlugin.getBuild(), project.getName());
}
private static class Skip extends Exception {
public Skip(final String s) {
super(s);
}
}
public static class PsiStructureException extends RuntimeException {
public PsiStructureException(final String s) { super(s); }
}
// Find the previous or immediately enclosing element (which may be null if there's no parent)
private static @Nullable PsiElement previous(final PsiElement e) throws Skip {
PsiElement p = e.getPrevSibling();
if (p != null)
return p;
return e.getParent();
}
// Trim a range to not include whitespace
private static TextRange trim(final Document doc, final TextRange r) {
final int lo = r.getStartOffset();
final String s = doc.getText(r);
final String t = s.trim();
final int st = s.indexOf(t);
return new TextRange(lo+st,lo+st+t.length());
}
private static @NotNull PsiCodeBlock codeBlockAbove(PsiElement e) throws Skip {
for (;;) {
if (e == null)
throw new Skip("No enclosing code block found");
if (e instanceof PsiCodeBlock)
return (PsiCodeBlock)e;
e = e.getParent();
}
}
private static @NotNull PsiElement stmtsAbove(PsiElement e) throws Skip {
for (;;) {
if (e == null)
throw new Skip("No enclosing statements found");
if (e instanceof PsiCodeBlock || e instanceof PsiStatement)
return e;
e = e.getParent();
}
}
// Find the smallest consecutive sequence of statements and EOL comments that contains the given range.
// 1. Starting at elem, go up to find the nearest enclosing code block.
// 2. Descend to the smallest child that contains the whole trimmed range.
// 3. Go up to the nearest enclosing statement or code block.
// 4. If we're at a code block, return the list of children intersecting the line.
// 5. Otherwise, return whatever we're at.
private static List<PsiElement> elementsContaining(final Document doc, TextRange range, PsiElement e) throws Skip {
// Trim whitespace off both ends of range
range = trim(doc,range);
// Go up to the nearest enclosing code block
e = codeBlockAbove(e);
// Descend to the smallest child of e that contains the whole (trimmed) range
outer:
for (;;) {
for (final PsiElement kid : e.getChildren())
if (kid.getTextRange().contains(range)) {
e = kid;
continue outer;
}
break;
}
// Go back up to find a statement or code block
e = stmtsAbove(e);
// Go up outside of unblocked ifs so that we don't turn an unblocked body into multiple statements
// For an example, see testBlockYes in the IntelliJ tests.
if (e instanceof PsiStatement)
for (;;) {
final PsiElement p = e.getParent();
if (!(p instanceof PsiIfStatement))
break;
e = p;
}
// Collect results
final List<PsiElement> results = new SmartList<PsiElement>();
if (e instanceof PsiCodeBlock) {
// We're a code block, so return only those children intersecting the line.
// Also ignore the first and last children, which are left and right braces.
final PsiElement[] block = e.getChildren();
int lo = 1, hi = block.length-1;
while (lo < hi && !block[lo ].getTextRange().intersects(range)) lo++;
while (lo < hi && !block[hi-1].getTextRange().intersects(range)) hi--;
for (int i=lo;i<hi;i++)
results.add(block[i]);
} else {
// Otherwise, return a singleton list
results.add(e);
}
return results;
}
// Should we expand an element or leave it atomic?
private static boolean expand(final TreeElement e, final TextRange range, final int cursor) {
// Never expand leaves
if (e instanceof LeafElement)
return false;
// Otherwise, expand or not based on psi
final @NotNull PsiElement psi = e.getPsi();
final TextRange r = psi.getTextRange();
// Expand blocks if the cursor is strictly inside
if (psi instanceof PsiCodeBlock) {
// Check if we're strictly inside. Note that r.contains(pos) is wrong here.
// |{} - r 0 2, pos 0, not inside
// {|} - r 0 2, pos 1, inside
// {}| - r 0 2, pos 2, not inside
return r.getStartOffset() < cursor && cursor < r.getEndOffset();
}
// Expand statements if they overlap our line
if (psi instanceof PsiStatement)
return r.intersects(range);
// Expand everything else
return true;
}
public Input input() throws Skip {
//log("processing eddy...");
// Determine where we are
final int cursor = editor.getCaretModel().getCurrentCaret().getOffset();
final int line = document.getLineNumber(cursor);
final TextRange range = TextRange.create(document.getLineStartOffset(line), document.getLineEndOffset(line));
//log(" processing line " + line + ": " + document.getText(range));
// Find relevant statements and comments
final List<PsiElement> elems = elementsContaining(document,range,getFile().findElementAt(cursor));
if (elems.isEmpty())
throw new Skip("Empty statement list");
final PsiElement place = previous(elems.get(0));
if (place == null)
throw new PsiStructureException("previous(" + elems.get(0) + ") == null");
// Walk all relevant elements, collecting leaves and atomic code blocks.
// We walk on AST instead of Psi to get down to the token level.
final List<Loc<Token>> tokens = new ArrayList<Loc<Token>>();
final RecursiveTreeElementVisitor V = new RecursiveTreeElementVisitor() {
@Override protected boolean visitNode(final TreeElement e) {
if (expand(e,range,cursor))
return true;
tokens.add(Tokenizer.psiToTok(e));
return false;
}
};
for (final PsiElement elem : elems) {
final ASTNode node = elem.getNode();
assert node instanceof TreeElement : "Bad AST node "+node+" for element "+elem;
((TreeElement)node).acceptTree(V);
}
// Trim whitespace at the ends of the token stream
while (!tokens.isEmpty() && tokens.get(0).x() instanceof WhitespaceTok) tokens.remove(0);
while (!tokens.isEmpty() && tokens.get(tokens.size()-1).x() instanceof WhitespaceTok) tokens.remove(tokens.size()-1);
if (tokens.isEmpty())
throw new Skip("No tokens");
// Skip if we're entirely comments and whitespace
boolean allSpace = true;
for (final Loc<Token> t : tokens)
if (!(t.x() instanceof SpaceTok)) {
allSpace = false;
break;
}
if (allSpace)
throw new Skip("All whitespace and comments");
// Compute range to be replaced. We rely on !tokens.isEmpty
final TextRange trim = Tokenizer.range(tokens.get(0)).union(Tokenizer.range(tokens.get(tokens.size()-1)));
final String before = document.getText(trim);
log("eddy before: " + before.replaceAll("[\n\t ]+", " "));
return new Input(trim,tokens,place,before);
}
public Env env(final Input input, final int lastEdit) {
return EddyPlugin.getInstance(project).getEnv().getLocalEnvironment(input.place, lastEdit);
}
private void updateIntentions() {
if (!ApplicationManager.getApplication().isHeadlessEnvironment()) {
LaterInvocator.invokeLater(new Runnable() {
@Override
public void run() {
final PsiFile file = getFile();
ShowIntentionsPass.IntentionsInfo intentions = new ShowIntentionsPass.IntentionsInfo();
ShowIntentionsPass.getActionsToShow(editor, file, intentions, -1);
if (!intentions.isEmpty()) {
try {
if (editor.getComponent().isDisplayable())
IntentionHintComponent.showIntentionHint(project, file, editor, intentions, false);
} catch (final NullPointerException e) {
// Log and ignore
log("updateIntentions: Can't show hint due to NullPointerException");
}
}
}
}, project.getDisposed());
}
}
public void process(final @NotNull Editor editor, final int lastEdit, final Take takeoutput) {
// Use mutable variables so that we log more if an exception is thrown partway through
class Helper {
final double start = Memory.now();
Input input;
List<Alt<ShowStmts>> results;
List<Double> delays = new ArrayList<Double>(4);
Throwable error;
void compute(final Env env) {
if (Thread.currentThread().isInterrupted())
throw new ThreadDeath();
final Function2<String,ShowFlags,String> format = new AbstractFunction2<String,ShowFlags,String>() {
@Override public String apply(final String sh, final ShowFlags f) {
return reformat(input.place,sh);
}
};
final long startTime = System.nanoTime();
final Tarski.Take take = new Tarski.Take() {
@Override public boolean take(final List<Alt<ShowStmts>> rs) {
results = rs;
double delay = (System.nanoTime() - startTime)/1e9;
delays.add(delay);
Eddy.Output output = new Output(Eddy.this,input,results);
if (isDebug())
System.out.println(String.format("output %.3fs: ", delay) + logString(output.formats(denotationShowFlags(),true)));
updateIntentions();
return takeoutput.take(output);
}
};
Tarski.fixTake(input.input,env,format,take);
}
void unsafe() {
try {
input = Eddy.this.input();
compute(env(input,lastEdit));
} catch (Skip s) {
// ignore skipped lines
//log("skipping: " + s.getMessage());
}
}
void safe() {
try {
if (isDebug()) // Run outside try so that we can see inside exceptions
unchecked(new Unchecked<Unit$>() { @Override public Unit$ apply() {
unsafe();
return Unit$.MODULE$;
}});
else try {
unsafe();
} catch (final Throwable e) {
error = e;
if (!(e instanceof ThreadDeath) && !(e instanceof RuntimeInterruptedException))
logError("process()",e); // Log everything except for ThreadDeath and RuntimeInterruptedException, which happens all the time.
if (e instanceof Error && !(e instanceof AssertionError))
throw (Error)e; // Rethrow most kinds of Errors
}
} finally {
Memory.log(Memory.eddyProcess(base,start,
input==null ? null : input.input,
results,
delays).error(error));
}
}
}
new Helper().safe();
}
// The string should be a single syntactically valid statement
private String reformat(final PsiElement place, final @NotNull String show) {
return new Formatter(project,place).reformat(show);
}
}
|
src/com/eddysystems/eddy/engine/Eddy.java
|
package com.eddysystems.eddy.engine;
import com.eddysystems.eddy.EddyPlugin;
import com.eddysystems.eddy.LightDocument;
import com.eddysystems.eddy.PreferenceData;
import com.eddysystems.eddy.Preferences;
import com.intellij.codeInsight.daemon.impl.ShowIntentionsPass;
import com.intellij.codeInsight.intention.impl.IntentionHintComponent;
import com.intellij.formatting.*;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LanguageFormatting;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.java.parser.JavaParser;
import com.intellij.lang.java.parser.JavaParserUtil;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.RuntimeInterruptedException;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.DummyHolder;
import com.intellij.psi.impl.source.JavaDummyElement;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.codeStyle.CodeFormatterFacade;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.impl.source.tree.RecursiveTreeElementVisitor;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import scala.Function2;
import scala.Unit$;
import scala.runtime.AbstractFunction1;
import scala.runtime.AbstractFunction2;
import scala.runtime.BoxedUnit;
import scala.util.Try;
import tarski.Environment.Env;
import tarski.Memory;
import tarski.Scores.Alt;
import tarski.Tarski;
import tarski.Tarski.ShowStmts;
import tarski.Tokens.*;
import utility.Locations.Loc;
import utility.Utility.Unchecked;
import java.util.ArrayList;
import java.util.List;
import static com.eddysystems.eddy.engine.Utility.*;
import static tarski.Tokens.*;
import static utility.Utility.unchecked;
public class Eddy {
final private Project project;
final private Memory.Info base;
final private Editor editor;
final private Document document;
public Editor getEditor() { return editor; }
public PsiFile getFile() {
final PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(document);
assert file != null;
return file;
}
public static class Input {
final TextRange range;
final List<Loc<Token>> input;
final PsiElement place;
final String before_text;
Input(final TextRange range, final List<Loc<Token>> input, final PsiElement place, final String before_text) {
this.range = range;
this.input = input;
this.place = place;
this.before_text = before_text;
}
public String getText() {
return before_text;
}
}
// The results of the interpretation
public static class Output {
final private Eddy eddy;
final public Input input;
final public List<Alt<ShowStmts>> results;
// Mutable field: which output we've selected. If we haven't explicitly selected something, offset < 0.
private int selected = -1;
Output(final Eddy eddy, final Input input, final List<Alt<ShowStmts>> results) {
this.eddy = eddy;
this.input = input;
this.results = results;
}
static String format(final ShowStmts ss, final ShowFlags f) {
return f.den() ? ss.den() : f.abbreviate() ? ss.abbrev() : ss.full();
}
public String format(final int i, final ShowFlags f) {
return format(results.get(i).x(),f);
}
public List<String> formats(final ShowFlags f, final boolean probs) {
final List<String> fs = new ArrayList<String>(results.size());
for (final Alt<ShowStmts> a : results)
fs.add(format(a.x(),f));
if (probs) {
for (int i = 0; i < fs.size(); ++i) {
fs.set(i, String.format("%f: %s", results.get(i).p(), fs.get(i)));
}
}
return fs;
}
public String[] getResultSummary() {
return formats(new ShowFlags(false,true,true), true).toArray(new String[results.size()]);
}
public boolean foundSomething() {
return !results.isEmpty();
}
// Did we find useful meanings, and are those meanings different from what's already there?
public boolean shouldShowHint() {
for (final Alt<ShowStmts> r : results)
if (r.x().similar(input.input))
return false; // We found what's already there
return !results.isEmpty();
}
// Is there only one realistic option (or did the user explicitly select one)?
public boolean single() {
return results.size() == 1 || selected >= 0;
}
public boolean nextBestResult() {
if (shouldShowHint() && results.size()>1) {
selected = (Math.max(0,selected)+1)%results.size();
return true;
}
return false;
}
public boolean prevBestResult() {
if (shouldShowHint() && results.size()>1) {
selected = (Math.max(0,selected)+results.size()-1)%results.size();
return true;
}
return false;
}
public String bestTextAbbrev() {
assert shouldShowHint();
return format(Math.max(0,selected),abbrevShowFlags());
}
public void applySelected() {
apply(Math.max(0,selected));
}
public int autoApply() {
// Automatically apply the best found result
return rawApply(eddy.document, format(0, abbrevShowFlags()));
}
public boolean shouldAutoApply() {
// check if we're confident enough to apply the best found result automatically
PreferenceData data = Preferences.getData();
double t = data.getNumericAutoApplyThreshold();
double f = data.getNumericAutoApplyFactor();
if (results.size() >= 1 && results.get(0).p() >= t) {
if (results.size() == 1)
return true;
else
return results.get(0).p()/results.get(1).p() > f;
}
return false;
}
public int rawApply(final @NotNull Document document, final @NotNull String code) {
final int offsetDiff = code.length() - input.range.getLength();
document.replaceString(input.range.getStartOffset(), input.range.getEndOffset(), code);
// reindent
CodeStyleManager csm = CodeStyleManager.getInstance(eddy.project);
final int sline = document.getLineNumber(input.range.getStartOffset());
final int fline = document.getLineNumber(input.range.getEndOffset() + offsetDiff);
for (int i = sline; i <= fline; ++i) {
csm.adjustLineIndent(document, document.getLineStartOffset(i));
}
Memory.log(Memory.eddyAutoApply(eddy.base, Memory.now(), input.input, results, code));
return offsetDiff;
}
public void apply(final int index) {
final String full = format(results.get(index).x(),fullShowFlags());
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
final Editor editor = eddy.editor;
new WriteCommandAction(eddy.project, eddy.getFile()) {
@Override
public void run(@NotNull Result result) {
final int newOffset = input.range.getEndOffset() + rawApply(eddy.document,full);
editor.getCaretModel().moveToOffset(newOffset);
PsiDocumentManager.getInstance(eddy.project).commitDocument(eddy.document);
}
}.execute();
}
});
Memory.log(Memory.eddyApply(eddy.base,Memory.now(),input.input,results,index));
}
public void logSuggestion(final @NotNull String suggestion) {
Memory.log(Memory.eddySuggestion(eddy.base, Memory.now(), input.input, results, suggestion)).onComplete(new AbstractFunction1<Try<BoxedUnit>, Void>() {
@Override
public Void apply(Try<BoxedUnit> v) {
final String title, msg;
if (v.isSuccess()) {
title = "Suggestion processed";
msg = "Thank you! Your suggestion will help improve eddy!";
} else {
title = "Suggestion failed to send";
msg = "I'm sorry, your suggestion could not be recorded. Our servers could not be reached.";
}
Notifications.Bus.notify(new Notification("Eddy", title, msg, NotificationType.INFORMATION), eddy.project);
return null;
}
}, scala.concurrent.ExecutionContext.Implicits$.MODULE$.global());
}
}
public static interface Take {
// return true if we're done absorbing output, false if more is desired
public boolean take(Output output);
}
public Eddy(@NotNull final Project project, final Editor editor) {
this.project = project;
this.editor = editor;
this.document = editor.getDocument();
this.base = Memory.basics(EddyPlugin.installKey(), EddyPlugin.getVersion() + " - " + EddyPlugin.getBuild(), project.getName());
}
private static class Skip extends Exception {
public Skip(final String s) {
super(s);
}
}
public static class PsiStructureException extends RuntimeException {
public PsiStructureException(final String s) { super(s); }
}
// Find the previous or immediately enclosing element (which may be null if there's no parent)
private static @Nullable PsiElement previous(final PsiElement e) throws Skip {
PsiElement p = e.getPrevSibling();
if (p != null)
return p;
return e.getParent();
}
// Trim a range to not include whitespace
private static TextRange trim(final Document doc, final TextRange r) {
final int lo = r.getStartOffset();
final String s = doc.getText(r);
final String t = s.trim();
final int st = s.indexOf(t);
return new TextRange(lo+st,lo+st+t.length());
}
private static @NotNull PsiCodeBlock codeBlockAbove(PsiElement e) throws Skip {
for (;;) {
if (e == null)
throw new Skip("No enclosing code block found");
if (e instanceof PsiCodeBlock)
return (PsiCodeBlock)e;
e = e.getParent();
}
}
private static @NotNull PsiElement stmtsAbove(PsiElement e) throws Skip {
for (;;) {
if (e == null)
throw new Skip("No enclosing statements found");
if (e instanceof PsiCodeBlock || e instanceof PsiStatement)
return e;
e = e.getParent();
}
}
// Find the smallest consecutive sequence of statements and EOL comments that contains the given range.
// 1. Starting at elem, go up to find the nearest enclosing code block.
// 2. Descend to the smallest child that contains the whole trimmed range.
// 3. Go up to the nearest enclosing statement or code block.
// 4. If we're at a code block, return the list of children intersecting the line.
// 5. Otherwise, return whatever we're at.
private static List<PsiElement> elementsContaining(final Document doc, TextRange range, PsiElement e) throws Skip {
// Trim whitespace off both ends of range
range = trim(doc,range);
// Go up to the nearest enclosing code block
e = codeBlockAbove(e);
// Descend to the smallest child of e that contains the whole (trimmed) range
outer:
for (;;) {
for (final PsiElement kid : e.getChildren())
if (kid.getTextRange().contains(range)) {
e = kid;
continue outer;
}
break;
}
// Go back up to find a statement or code block
e = stmtsAbove(e);
// Go up outside of unblocked ifs so that we don't turn an unblocked body into multiple statements
// For an example, see testBlockYes in the IntelliJ tests.
if (e instanceof PsiStatement)
for (;;) {
final PsiElement p = e.getParent();
if (!(p instanceof PsiIfStatement))
break;
e = p;
}
// Collect results
final List<PsiElement> results = new SmartList<PsiElement>();
if (e instanceof PsiCodeBlock) {
// We're a code block, so return only those children intersecting the line.
// Also ignore the first and last children, which are left and right braces.
final PsiElement[] block = e.getChildren();
int lo = 1, hi = block.length-1;
while (lo < hi && !block[lo ].getTextRange().intersects(range)) lo++;
while (lo < hi && !block[hi-1].getTextRange().intersects(range)) hi--;
for (int i=lo;i<hi;i++)
results.add(block[i]);
} else {
// Otherwise, return a singleton list
results.add(e);
}
return results;
}
// Should we expand an element or leave it atomic?
private static boolean expand(final TreeElement e, final TextRange range, final int cursor) {
// Never expand leaves
if (e instanceof LeafElement)
return false;
// Otherwise, expand or not based on psi
final @NotNull PsiElement psi = e.getPsi();
final TextRange r = psi.getTextRange();
// Expand blocks if the cursor is strictly inside
if (psi instanceof PsiCodeBlock) {
// Check if we're strictly inside. Note that r.contains(pos) is wrong here.
// |{} - r 0 2, pos 0, not inside
// {|} - r 0 2, pos 1, inside
// {}| - r 0 2, pos 2, not inside
return r.getStartOffset() < cursor && cursor < r.getEndOffset();
}
// Expand statements if they overlap our line
if (psi instanceof PsiStatement)
return r.intersects(range);
// Expand everything else
return true;
}
public Input input() throws Skip {
//log("processing eddy...");
// Determine where we are
final int cursor = editor.getCaretModel().getCurrentCaret().getOffset();
final int line = document.getLineNumber(cursor);
final TextRange range = TextRange.create(document.getLineStartOffset(line), document.getLineEndOffset(line));
//log(" processing line " + line + ": " + document.getText(range));
// Find relevant statements and comments
final List<PsiElement> elems = elementsContaining(document,range,getFile().findElementAt(cursor));
if (elems.isEmpty())
throw new Skip("Empty statement list");
final PsiElement place = previous(elems.get(0));
if (place == null)
throw new PsiStructureException("previous(" + elems.get(0) + ") == null");
// Walk all relevant elements, collecting leaves and atomic code blocks.
// We walk on AST instead of Psi to get down to the token level.
final List<Loc<Token>> tokens = new ArrayList<Loc<Token>>();
final RecursiveTreeElementVisitor V = new RecursiveTreeElementVisitor() {
@Override protected boolean visitNode(final TreeElement e) {
if (expand(e,range,cursor))
return true;
tokens.add(Tokenizer.psiToTok(e));
return false;
}
};
for (final PsiElement elem : elems) {
final ASTNode node = elem.getNode();
assert node instanceof TreeElement : "Bad AST node "+node+" for element "+elem;
((TreeElement)node).acceptTree(V);
}
// Trim whitespace at the ends of the token stream
while (!tokens.isEmpty() && tokens.get(0).x() instanceof WhitespaceTok) tokens.remove(0);
while (!tokens.isEmpty() && tokens.get(tokens.size()-1).x() instanceof WhitespaceTok) tokens.remove(tokens.size()-1);
if (tokens.isEmpty())
throw new Skip("No tokens");
// Compute range to be replaced. We rely on !tokens.isEmpty
final TextRange trim = Tokenizer.range(tokens.get(0)).union(Tokenizer.range(tokens.get(tokens.size()-1)));
final String before = document.getText(trim);
log("eddy before: " + before.replaceAll("[\n\t ]+", " "));
return new Input(trim,tokens,place,before);
}
public Env env(final Input input, final int lastEdit) {
return EddyPlugin.getInstance(project).getEnv().getLocalEnvironment(input.place, lastEdit);
}
private void updateIntentions() {
if (!ApplicationManager.getApplication().isHeadlessEnvironment()) {
LaterInvocator.invokeLater(new Runnable() {
@Override
public void run() {
final PsiFile file = getFile();
ShowIntentionsPass.IntentionsInfo intentions = new ShowIntentionsPass.IntentionsInfo();
ShowIntentionsPass.getActionsToShow(editor, file, intentions, -1);
if (!intentions.isEmpty()) {
try {
if (editor.getComponent().isDisplayable())
IntentionHintComponent.showIntentionHint(project, file, editor, intentions, false);
} catch (final NullPointerException e) {
// Log and ignore
log("updateIntentions: Can't show hint due to NullPointerException");
}
}
}
}, project.getDisposed());
}
}
public void process(final @NotNull Editor editor, final int lastEdit, final Take takeoutput) {
// Use mutable variables so that we log more if an exception is thrown partway through
class Helper {
final double start = Memory.now();
Input input;
List<Alt<ShowStmts>> results;
List<Double> delays = new ArrayList<Double>(4);
Throwable error;
void compute(final Env env) {
if (Thread.currentThread().isInterrupted())
throw new ThreadDeath();
final Function2<String,ShowFlags,String> format = new AbstractFunction2<String,ShowFlags,String>() {
@Override public String apply(final String sh, final ShowFlags f) {
return reformat(input.place,sh);
}
};
final long startTime = System.nanoTime();
final Tarski.Take take = new Tarski.Take() {
@Override public boolean take(final List<Alt<ShowStmts>> rs) {
results = rs;
double delay = (System.nanoTime() - startTime)/1e9;
delays.add(delay);
Eddy.Output output = new Output(Eddy.this,input,results);
if (isDebug())
System.out.println(String.format("output %.3fs: ", delay) + logString(output.formats(denotationShowFlags(),true)));
updateIntentions();
return takeoutput.take(output);
}
};
Tarski.fixTake(input.input,env,format,take);
}
void unsafe() {
try {
input = Eddy.this.input();
compute(env(input,lastEdit));
} catch (Skip s) {
// ignore skipped lines
//log("skipping: " + s.getMessage());
}
}
void safe() {
try {
if (isDebug()) // Run outside try so that we can see inside exceptions
unchecked(new Unchecked<Unit$>() { @Override public Unit$ apply() {
unsafe();
return Unit$.MODULE$;
}});
else try {
unsafe();
} catch (final Throwable e) {
error = e;
if (!(e instanceof ThreadDeath) && !(e instanceof RuntimeInterruptedException))
logError("process()",e); // Log everything except for ThreadDeath and RuntimeInterruptedException, which happens all the time.
if (e instanceof Error && !(e instanceof AssertionError))
throw (Error)e; // Rethrow most kinds of Errors
}
} finally {
Memory.log(Memory.eddyProcess(base,start,
input==null ? null : input.input,
results,
delays).error(error));
}
}
}
new Helper().safe();
}
// The string should be a single syntactically valid statement
private String reformat(final PsiElement place, final @NotNull String show) {
return new Formatter(project,place).reformat(show);
}
}
|
Skip all comment lines
|
src/com/eddysystems/eddy/engine/Eddy.java
|
Skip all comment lines
|
|
Java
|
bsd-3-clause
|
067958637ad007675c54ba1b1a9a3708c7628ac2
| 0
|
aldebaran/libqi-java,aldebaran/libqi-java,aldebaran/libqi-java
|
package com.aldebaran.qi;
import java.util.HashMap;
import java.util.Map;
/**
* Specific {@link ClientAuthenticator} authenticating from a user and a token.
*
* If the server trusts on first use, we may receive a new token. In that case,
* {@link #hasNewToken()} returns {@code true} and the new token is available
* through {@link #getToken()}.
*/
public class UserTokenAuthenticator implements ClientAuthenticator
{
private String user;
private String token;
private boolean hasNewToken;
/**
* Create an authenticator with an user and an initial token.
*
* @param user
* the user
* @param initialToken
* the initial token (may be {@code null})
*/
public UserTokenAuthenticator(String user, String initialToken)
{
if (initialToken == null)
{
// "" means no token (null would segfault in the API)
initialToken = "";
}
this.user = user;
token = initialToken;
}
/**
* Create an authenticator with an user but no token.
*
* @param user
* the user
*/
public UserTokenAuthenticator(String user)
{
this(user, null);
}
private static Map<String, Object> createMap(String user, String token)
{
Map<String, Object> authData = new HashMap<String, Object>();
authData.put("user", user);
authData.put("token", token);
return authData;
}
@Override
public synchronized Map<String, Object> initialAuthData()
{
hasNewToken = false;
return createMap(user, token);
}
@Override
public synchronized Map<String, Object> _processAuth(Map<String, Object> authData)
{
// If no token was provided by initialAuthData(), the gateway
// generates and provides it the very first time (it trusts on first
// use).
token = (String) authData.get("newToken");
hasNewToken = true;
// We must return our new authentication data.
return createMap(user, token);
}
/**
* Indicates whether a new token has been retrieved during the last
* authentication.
*
* @return {@©ode true} if a new token has been retrieved, {@false} otherwise
*/
public synchronized boolean hasNewToken()
{
return hasNewToken;
}
/**
* Return the token.
*
* @return the token
*/
public synchronized String getToken()
{
return token;
}
}
|
qimessaging/src/main/java/com/aldebaran/qi/UserTokenAuthenticator.java
|
package com.aldebaran.qi;
import java.util.HashMap;
import java.util.Map;
/**
* Specific {@link ClientAuthenticator} authenticating from a user and a token.
*
* If the server trusts on first use, we may receive a new token. In that case,
* {@link #hasNewToken()} returns {@code true} and the new token is available
* through {@link #getToken()}.
*/
public class UserTokenAuthenticator implements ClientAuthenticator
{
private String user;
private String token;
private boolean hasNewToken;
/**
* Create an authenticator with an user and an initial token.
*
* @param user
* the user
* @param initialToken
* the initial token (may be {@code null})
*/
public UserTokenAuthenticator(String user, String initialToken)
{
if (initialToken == null)
{
// "" means no token (null would segfault in the API)
initialToken = "";
}
this.user = user;
token = initialToken;
}
/**
* Create an authenticator with an user but no token.
*
* @param user
* the user
*/
public UserTokenAuthenticator(String user)
{
this(user, null);
}
private static Map<String, Object> createMap(String user, String token)
{
Map<String, Object> authData = new HashMap<String, Object>();
authData.put("user", user);
authData.put("token", token);
return authData;
}
@Override
public Map<String, Object> initialAuthData()
{
hasNewToken = false;
return createMap(user, token);
}
@Override
public Map<String, Object> _processAuth(Map<String, Object> authData)
{
// If no token was provided by initialAuthData(), the gateway
// generates and provides it the very first time (it trusts on first
// use).
token = (String) authData.get("newToken");
hasNewToken = true;
// We must return our new authentication data.
return createMap(user, token);
}
/**
* Indicates whether a new token has been retrieved during the last
* authentication.
*
* @return {@©ode true} if a new token has been retrieved, {@false} otherwise
*/
public boolean hasNewToken()
{
return hasNewToken;
}
/**
* Return the token.
*
* @return the token
*/
public String getToken()
{
return token;
}
}
|
Make UserTokenAuthenticator thread-safe
The callbacks of ClientAuthenticator are called from random qi threads.
Since UserTokenAuthenticator manages a state (token and hasNewToken),
make its access synchronized.
Change-Id: Id5156736dc876ef202696f5d541850323483eb87
Reviewed-on: http://gerrit.aldebaran.lan/72510
Tested-by: gerrit
Reviewed-by: rvimont <20c35f6f0668496f51846e8638771ff6c82c8c27@presta.aldebaran.com>
|
qimessaging/src/main/java/com/aldebaran/qi/UserTokenAuthenticator.java
|
Make UserTokenAuthenticator thread-safe
|
|
Java
|
bsd-3-clause
|
7c285ce5525b3a2584ba8347a794d9455bd5674d
| 0
|
bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg,bigdawg-istc/bigdawg
|
package istc.bigdawg.executor;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.jcabi.log.Logger;
import com.jcabi.log.VerboseThreads;
import istc.bigdawg.exceptions.MigrationException;
import istc.bigdawg.executor.plan.ExecutionNode;
import istc.bigdawg.executor.plan.QueryExecutionPlan;
import istc.bigdawg.migration.MigrationResult;
import istc.bigdawg.migration.Migrator;
import istc.bigdawg.monitoring.Monitor;
import istc.bigdawg.postgresql.PostgreSQLHandler;
import istc.bigdawg.postgresql.PostgreSQLHandler.QueryResult;
import istc.bigdawg.query.ConnectionInfo;
import org.apache.commons.lang3.tuple.ImmutablePair;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* TODO:
* fully abstracted DbHandlers instead of casting to PostgreSQL
* shuffle joins
* better exception/error handling in the event of failure
*
* @author ankush
*/
class PlanExecutor {
private static final Monitor monitor = new Monitor();
private static final ExecutorService threadPool = java.util.concurrent.Executors.newCachedThreadPool(new VerboseThreads());
private final Multimap<ExecutionNode, ConnectionInfo> resultLocations = Multimaps.synchronizedSetMultimap(HashMultimap.create());
private final Multimap<ConnectionInfo, String> temporaryTables = Multimaps.synchronizedSetMultimap(HashMultimap.create());
private final Map<ImmutablePair<String, ConnectionInfo>, CompletableFuture<MigrationResult>> migrations = new ConcurrentHashMap<>();
private final Map<ExecutionNode, CountDownLatch> locks = new ConcurrentHashMap<>();
private final QueryExecutionPlan plan;
/**
* Class responsible for handling the execution of a single QueryExecutionPlan
*
* @param plan
* a data structure of the queries to be run and their ordering,
* with edges pointing to dependencies
*/
public PlanExecutor(QueryExecutionPlan plan) {
this.plan = plan;
final StringBuilder sb = new StringBuilder();
for(ExecutionNode n : plan) {
sb.append(String.format("%s -> (%s)\n", n, plan.getDependents(n)));
}
Logger.info(this, "Received plan %s", plan.getSerializedName());
Logger.debug(this, "Nodes for plan %s: \n %s", plan.getSerializedName(), sb);
Logger.debug(this, "Ordered queries: \n %s",
StreamSupport.stream(plan.spliterator(), false)
.map(ExecutionNode::getQueryString)
.filter(Optional::isPresent).map(Optional::get)
.collect(Collectors.joining(" \n ---- then ---- \n ")));
// initialize countdown latches to the proper counts
for(ExecutionNode node : plan) {
int latchSize = plan.inDegreeOf(node);
Logger.debug(this, "Node %s lock initialized with %d dependencies", node, latchSize);
this.locks.put(node, new CountDownLatch(latchSize));
}
}
/**
* Execute the plan, and return the result
*/
Optional<QueryResult> executePlan() throws SQLException, MigrationException {
final long start = System.currentTimeMillis();
Logger.info(this, "Executing query plan %s...", plan.getSerializedName());
CompletableFuture<Optional<QueryResult>> finalResult = CompletableFuture.completedFuture(Optional.empty());
for (ExecutionNode node : plan) {
CompletableFuture<Optional<QueryResult>> result = CompletableFuture.supplyAsync(() -> this.executeNode(node), threadPool);
if (plan.getTerminalTableNode().equals(node)) {
finalResult = result;
}
}
// Block until finalResult has resolved
Optional<QueryResult> result = Optional.empty();
try {
result = finalResult.get();
} catch (InterruptedException e) {
Logger.error(this, "Execution of query plan %s was interrupted: %[exception]s", plan.getSerializedName(), e);
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
Logger.error(this, "Error retrieving results of final query node %s: %[exception]s", plan.getSerializedName(), e);
}
dropTemporaryTables();
final long end = System.currentTimeMillis();
Logger.info(this, "Finished executing query plan %s, in %d ms.", plan.getSerializedName(), (end - start));
Logger.info(this, "Sending timing to monitor...");
monitor.finishedBenchmark(plan, start, end);
Logger.info(this, "Returning result to planner...");
return result;
}
private Optional<QueryResult> executeNode(ExecutionNode node) {
// perform shuffle join if equijoin and hint doesn't specify otherwise
// TODO(ankush): re-enable this and debug
// if (node instanceof BinaryJoinExecutionNode &&
// ((BinaryJoinExecutionNode) node).getHint().orElse(BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE) == BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE &&
// ((BinaryJoinExecutionNode) node).isEquiJoin()) {
// BinaryJoinExecutionNode joinNode = (BinaryJoinExecutionNode) node;
// if(!joinNode.getHint().isPresent() || joinNode.getHint().get() == BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE) {
// try {
// colocateDependencies(node, Arrays.asList(joinNode.getLeft().table, joinNode.getRight().table));
//
// Optional<QueryResult> result = new ShuffleJoinExecutor(joinNode).execute();
// markNodeAsCompleted(node);
// return result;
// } catch (Exception e) {
// log.error(String.format("Error executing node %s", joinNode), e);
// return Optional.empty();
// }
// }
// }
// otherwise execute as local query execution (same as broadcast join)
// colocate dependencies, blocking until completed
colocateDependencies(node, new HashSet<>());
Logger.debug(this, "Executing query node %s...", node);
return node.getQueryString().flatMap((query) -> {
try {
final Optional<QueryResult> result = ((PostgreSQLHandler) node.getEngine().getHandler()).executePostgreSQL(query);
Logger.info(this, "Successfully executed node %s", node);
return result;
} catch (SQLException e) {
Logger.error(this, "Error executing node %s: %[exception]s", node, e);
// TODO: if error is actually bad, don't markNodeAsCompleted, and instead fail the QEP gracefully.
return Optional.empty();
} finally {
markNodeAsCompleted(node);
}
});
}
private void markNodeAsCompleted(ExecutionNode node) {
Logger.debug(this, "Completed execution of %s.", node);
if (!plan.getTerminalTableNode().equals(node)) {
// clean up the intermediate table later
node.getTableName().ifPresent((table) -> temporaryTables.put(node.getEngine(), table));
// update nodeLocations to reflect that the results are located on this node's engine
resultLocations.put(node, node.getEngine());
final Collection<ExecutionNode> dependants = plan.getDependents(node);
Logger.debug(this, "Examining dependants %s of %s", dependants, node);
for (ExecutionNode dependent : dependants) {
Logger.debug(this, "Decrementing lock of %s because %s completed.", dependent, node);
this.locks.get(dependent).countDown();
Logger.debug(this, "%s is now waiting on %d dependencies.", dependent, this.locks.get(dependent).getCount());
}
Logger.debug(this, "Completed examination of dependants %s of %s", dependants, node);
}
}
/**
* Colocates the dependencies for the given ExecutionNode onto that node's engine.
*
* Waits for any incomplete dependencies, and blocks the current thread until completion.
*
* @param node the ExecutionNOde whose dependencies we want to colocate
* @param ignoreTables table names that we wish to ignore
*/
private void colocateDependencies(ExecutionNode node, final Collection<String> ignoreTables) {
final Collection<String> ignoreCopy = new HashSet<>(ignoreTables);
// Block until dependencies are all resolved
try {
Logger.debug(this, "Waiting for %d dependencies of query node %s to be resolved...", this.locks.get(node).getCount(), node);
while(!this.locks.get(node).await(10, TimeUnit.SECONDS)) {
Logger.debug(this, "Still waiting for %d dependencies of query node %s to be resolved...", this.locks.get(node).getCount(), node);
}
} catch (InterruptedException e) {
Logger.error(this, "Execution of query node %s was interrupted while waiting for dependencies: %[exception]s", node, e);
Thread.currentThread().interrupt();
}
Logger.debug(this, "Colocating dependencies of %s to %s", node, node.getEngine());
ignoreCopy.addAll(plan.getDependencies(node).stream()
.filter(d -> resultLocations.containsEntry(d, node.getEngine()))
.map(n -> n.getTableName().orElse("NO_TABLE"))
.collect(Collectors.toSet()));
Logger.debug(this, "Ignoring dependencies %s of %s", ignoreCopy, node);
// java.util.stream.Stream<ExecutionNode> deps = plan.getDependencies(node).stream()
// .filter(d -> d.getTableName().isPresent() && !ignoreCopy.contains(d.getTableName().get()));
//
// Logger.debug(this, "Examining dependencies %s of %s", deps.collect(Collectors.toSet()), node);
// CompletableFuture[] futures = deps
// .map((d) -> {
// // computeIfAbsent gets a previous migration's Future, or creates one if it doesn't already exist
// ImmutablePair<String, ConnectionInfo> migrationKey = new ImmutablePair<>(d.getTableName().get(), node.getEngine());
// Logger.debug(PlanExecutor.this, "Examining %s to see if migration is necessary...", d);
//
// return migrations.computeIfAbsent(migrationKey, (k) -> {
// return CompletableFuture.supplyAsync(() -> {
// Logger.debug(PlanExecutor.this, "Started migrating dependency %s of node %s", d, node);
// MigrationResult result = colocateSingleDependency(d, node);
// Logger.debug(PlanExecutor.this, "Finished migrating dependency %s of node %s: %s", d, node, result);
// return result;
// }, threadPool);
// });
// }).toArray(CompletableFuture[]::new);
final Collection<ExecutionNode> deps = plan.getDependencies(node).stream()
.filter(d -> d.getTableName().isPresent() && !ignoreCopy.contains(d.getTableName().get()))
.collect(Collectors.toSet());
Logger.debug(this, "Examining dependencies %s of %s", deps, node);
Collection<CompletableFuture<MigrationResult>> futureCollection = new HashSet<>();
for(ExecutionNode d : deps) {
final ImmutablePair<String, ConnectionInfo> migrationKey = new ImmutablePair<>(d.getTableName().get(), node.getEngine());
Logger.debug(PlanExecutor.this, "Examining %s to see if migration is necessary...", d);
synchronized (migrations) {
if (!migrations.containsKey(migrationKey)) {
final CompletableFuture<MigrationResult> migration = CompletableFuture.supplyAsync(() -> {
Logger.debug(PlanExecutor.this, "Started migrating dependency %s of node %s", d, node);
final MigrationResult result = colocateSingleDependency(d, node);
Logger.debug(PlanExecutor.this, "Finished migrating dependency %s of node %s: %s", d, node, result);
return result;
}, threadPool);
migrations.put(migrationKey, migration);
futureCollection.add(migration);
} else {
Logger.debug(PlanExecutor.this, "Already migrating %s, not queueing again.", d);
}
}
}
final CompletableFuture[] futures = futureCollection.toArray(new CompletableFuture[futureCollection.size()]);
Logger.debug(this, "Waiting on %d dependencies of %s to be migrated...", futures.length, node);
CompletableFuture.allOf(futures).join();
Logger.debug(this, "All dependencies of %s have migrated!", node);
}
private MigrationResult colocateSingleDependency(ExecutionNode dependency, ExecutionNode dependant) {
return dependency.getTableName().map((table) -> {
try {
final MigrationResult result = Migrator.migrate(dependency.getEngine(), table, dependant.getEngine(), table);
if(result.isError()) {
throw new MigrationException(result.toString());
}
Logger.debug(PlanExecutor.this, "Marking dependency %s as migrated on engine %s...", dependency, dependant.getEngine());
// mark the dependency's data as being present on node.getEngine()
resultLocations.put(dependency, dependant.getEngine());
// mark that this engine now has a copy of the dependency's data
temporaryTables.put(dependant.getEngine(), table);
return result;
} catch (MigrationException e) {
Logger.error(PlanExecutor.this, "Error migrating dependency %s of node %s: %[exception]s", dependency.getTableName(), dependant.getTableName(), e);
return MigrationResult.getFailedInstance(e.getLocalizedMessage());
}
}).orElse(MigrationResult.getEmptyInstance(String.format("No table to migrate for node %s", dependency.getTableName())));
}
private void dropTemporaryTables() throws SQLException {
synchronized(temporaryTables) {
final Multimap<ConnectionInfo, String> removed = HashMultimap.create();
for (ConnectionInfo c : temporaryTables.keySet()) {
final Collection<String> tables = temporaryTables.get(c);
Logger.debug(this, "Cleaning up %s by removing %s...", c, tables);
((PostgreSQLHandler) c.getHandler()).executeStatementPostgreSQL(c.getCleanupQuery(tables));
removed.putAll(c, tables);
}
for (Map.Entry<ConnectionInfo, String> entry : removed.entries()) {
temporaryTables.remove(entry.getKey(), entry.getValue());
}
}
Logger.debug(this, "Temporary tables for query plan %s have been cleaned up", plan.getSerializedName());
}
/**
* Colocates the dependencies for the given ExecutionNode onto that node's engine one at a time.
*
* Waits for any incomplete dependencies, and blocks the current thread until completion.
*
* @param node the ExecutionNOde whose dependencies we want to colocate
*
* @deprecated use {@link #colocateDependencies(ExecutionNode, Collection)} instead.
*/
@Deprecated
private void colocateDependenciesSerially(ExecutionNode node) {
// Block until dependencies are all resolved
try {
Logger.debug(this, "Waiting for dependencies of query node %s to be resolved", node.getTableName());
this.locks.get(node).await();
} catch (InterruptedException e) {
Logger.error(this, "Execution of query node %s was interrupted while waiting for dependencies: %[exception]s", node.getTableName(), e);
Thread.currentThread().interrupt();
}
Logger.debug(this, "Colocating dependencies of query node %s", node.getTableName());
plan.getDependencies(node).stream()
// only look at dependencies not already on desired engine
.filter(d -> !resultLocations.containsEntry(d, node.getEngine()))
.forEach(d -> {
// migrate to node.getEngine()
d.getTableName().ifPresent((table) -> {
Logger.debug(this, "Migrating dependency table %s from engine %s to engine %s...", table, d.getEngine(),
node.getEngine());
try {
MigrationResult result = Migrator.migrate(d.getEngine(), table, node.getEngine(), table);
if(result.isError()) {
throw new MigrationException(result.toString());
}
// mark the dependency's data as being present on node.getEngine()
resultLocations.put(d, node.getEngine());
// mark that this engine now has a copy of the dependency's data
temporaryTables.put(node.getEngine(), table);
} catch (MigrationException e) {
Logger.error(this, "Error migrating dependency %s of node %s: %[exception]s", d, node, e);
}
});
});
}
}
|
src/main/java/istc/bigdawg/executor/PlanExecutor.java
|
package istc.bigdawg.executor;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.jcabi.log.Logger;
import com.jcabi.log.VerboseThreads;
import istc.bigdawg.exceptions.MigrationException;
import istc.bigdawg.executor.plan.ExecutionNode;
import istc.bigdawg.executor.plan.QueryExecutionPlan;
import istc.bigdawg.migration.MigrationResult;
import istc.bigdawg.migration.Migrator;
import istc.bigdawg.monitoring.Monitor;
import istc.bigdawg.postgresql.PostgreSQLHandler;
import istc.bigdawg.postgresql.PostgreSQLHandler.QueryResult;
import istc.bigdawg.query.ConnectionInfo;
import org.apache.commons.lang3.tuple.ImmutablePair;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* TODO:
* fully abstracted DbHandlers instead of casting to PostgreSQL
* shuffle joins
* better exception/error handling in the event of failure
*
* @author ankush
*/
class PlanExecutor {
private static final Monitor monitor = new Monitor();
private static final ExecutorService threadPool = java.util.concurrent.Executors.newCachedThreadPool(new VerboseThreads());
private final Multimap<ExecutionNode, ConnectionInfo> resultLocations = Multimaps.synchronizedSetMultimap(HashMultimap.create());
private final Multimap<ConnectionInfo, String> temporaryTables = Multimaps.synchronizedSetMultimap(HashMultimap.create());
private final Map<ImmutablePair<String, ConnectionInfo>, CompletableFuture<MigrationResult>> migrations = new ConcurrentHashMap<>();
private final Map<ExecutionNode, CountDownLatch> locks = new ConcurrentHashMap<>();
private final QueryExecutionPlan plan;
/**
* Class responsible for handling the execution of a single QueryExecutionPlan
*
* @param plan
* a data structure of the queries to be run and their ordering,
* with edges pointing to dependencies
*/
public PlanExecutor(QueryExecutionPlan plan) {
this.plan = plan;
final StringBuilder sb = new StringBuilder();
for(ExecutionNode n : plan) {
sb.append(String.format("%s -> (%s)\n", n, plan.getDependents(n)));
}
Logger.info(this, "Received plan %s", plan.getSerializedName());
Logger.debug(this, "Nodes for plan %s: \n %s", plan.getSerializedName(), sb);
Logger.debug(this, "Ordered queries: \n %s",
StreamSupport.stream(plan.spliterator(), false)
.map(ExecutionNode::getQueryString)
.filter(Optional::isPresent).map(Optional::get)
.collect(Collectors.joining(" \n ---- then ---- \n ")));
// initialize countdown latches to the proper counts
for(ExecutionNode node : plan) {
int latchSize = plan.inDegreeOf(node);
Logger.debug(this, "Node %s lock initialized with %d dependencies", node, latchSize);
this.locks.put(node, new CountDownLatch(latchSize));
}
}
/**
* Execute the plan, and return the result
*/
Optional<QueryResult> executePlan() throws SQLException, MigrationException {
final long start = System.currentTimeMillis();
Logger.info(this, "Executing query plan %s...", plan.getSerializedName());
CompletableFuture<Optional<QueryResult>> finalResult = CompletableFuture.completedFuture(Optional.empty());
for (ExecutionNode node : plan) {
CompletableFuture<Optional<QueryResult>> result = CompletableFuture.supplyAsync(() -> this.executeNode(node), threadPool);
if (plan.getTerminalTableNode().equals(node)) {
finalResult = result;
}
}
// Block until finalResult has resolved
Optional<QueryResult> result = Optional.empty();
try {
result = finalResult.get();
} catch (InterruptedException e) {
Logger.error(this, "Execution of query plan %s was interrupted: %[exception]s", plan.getSerializedName(), e);
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
Logger.error(this, "Error retrieving results of final query node %s: %[exception]s", plan.getSerializedName(), e);
}
dropTemporaryTables();
final long end = System.currentTimeMillis();
Logger.info(this, "Finished executing query plan %s, in %d ms.", plan.getSerializedName(), (start - end));
Logger.info(this, "Sending timing to monitor...");
monitor.finishedBenchmark(plan, start, end);
Logger.info(this, "Returning result to planner...");
return result;
}
private Optional<QueryResult> executeNode(ExecutionNode node) {
// perform shuffle join if equijoin and hint doesn't specify otherwise
// TODO(ankush): re-enable this and debug
// if (node instanceof BinaryJoinExecutionNode &&
// ((BinaryJoinExecutionNode) node).getHint().orElse(BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE) == BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE &&
// ((BinaryJoinExecutionNode) node).isEquiJoin()) {
// BinaryJoinExecutionNode joinNode = (BinaryJoinExecutionNode) node;
// if(!joinNode.getHint().isPresent() || joinNode.getHint().get() == BinaryJoinExecutionNode.JoinAlgorithms.SHUFFLE) {
// try {
// colocateDependencies(node, Arrays.asList(joinNode.getLeft().table, joinNode.getRight().table));
//
// Optional<QueryResult> result = new ShuffleJoinExecutor(joinNode).execute();
// markNodeAsCompleted(node);
// return result;
// } catch (Exception e) {
// log.error(String.format("Error executing node %s", joinNode), e);
// return Optional.empty();
// }
// }
// }
// otherwise execute as local query execution (same as broadcast join)
// colocate dependencies, blocking until completed
colocateDependencies(node, new HashSet<>());
Logger.debug(this, "Executing query node %s...", node);
return node.getQueryString().flatMap((query) -> {
try {
final Optional<QueryResult> result = ((PostgreSQLHandler) node.getEngine().getHandler()).executePostgreSQL(query);
Logger.info(this, "Successfully executed node %s", node);
return result;
} catch (SQLException e) {
Logger.error(this, "Error executing node %s: %[exception]s", node, e);
// TODO: if error is actually bad, don't markNodeAsCompleted, and instead fail the QEP gracefully.
return Optional.empty();
} finally {
markNodeAsCompleted(node);
}
});
}
private void markNodeAsCompleted(ExecutionNode node) {
Logger.debug(this, "Completed execution of %s.", node);
if (!plan.getTerminalTableNode().equals(node)) {
// clean up the intermediate table later
node.getTableName().ifPresent((table) -> temporaryTables.put(node.getEngine(), table));
// update nodeLocations to reflect that the results are located on this node's engine
resultLocations.put(node, node.getEngine());
final Collection<ExecutionNode> dependants = plan.getDependents(node);
Logger.debug(this, "Examining dependants %s of %s", dependants, node);
for (ExecutionNode dependent : dependants) {
Logger.debug(this, "Decrementing lock of %s because $s completed.", dependent, node);
this.locks.get(dependent).countDown();
Logger.debug(this, "%s is now waiting on %d dependencies.", dependent, this.locks.get(dependent).getCount());
}
Logger.debug(this, "Completed examination of dependants %s of %s", dependants, node);
}
}
/**
* Colocates the dependencies for the given ExecutionNode onto that node's engine.
*
* Waits for any incomplete dependencies, and blocks the current thread until completion.
*
* @param node the ExecutionNOde whose dependencies we want to colocate
* @param ignoreTables table names that we wish to ignore
*/
private void colocateDependencies(ExecutionNode node, final Collection<String> ignoreTables) {
final Collection<String> ignoreCopy = new HashSet<>(ignoreTables);
// Block until dependencies are all resolved
try {
Logger.debug(this, "Waiting for %d dependencies of query node %s to be resolved...", this.locks.get(node).getCount(), node);
while(!this.locks.get(node).await(10, TimeUnit.SECONDS)) {
Logger.debug(this, "Still waiting for %d dependencies of query node %s to be resolved...", this.locks.get(node).getCount(), node);
}
} catch (InterruptedException e) {
Logger.error(this, "Execution of query node %s was interrupted while waiting for dependencies: %[exception]s", node, e);
Thread.currentThread().interrupt();
}
Logger.debug(this, "Colocating dependencies of %s to %s", node, node.getEngine());
ignoreCopy.addAll(plan.getDependencies(node).stream()
.filter(d -> resultLocations.containsEntry(d, node.getEngine()))
.map(n -> n.getTableName().orElse("NO_TABLE"))
.collect(Collectors.toSet()));
Logger.debug(this, "Ignoring dependencies %s of %s", ignoreCopy, node);
// java.util.stream.Stream<ExecutionNode> deps = plan.getDependencies(node).stream()
// .filter(d -> d.getTableName().isPresent() && !ignoreCopy.contains(d.getTableName().get()));
//
// Logger.debug(this, "Examining dependencies %s of %s", deps.collect(Collectors.toSet()), node);
// CompletableFuture[] futures = deps
// .map((d) -> {
// // computeIfAbsent gets a previous migration's Future, or creates one if it doesn't already exist
// ImmutablePair<String, ConnectionInfo> migrationKey = new ImmutablePair<>(d.getTableName().get(), node.getEngine());
// Logger.debug(PlanExecutor.this, "Examining %s to see if migration is necessary...", d);
//
// return migrations.computeIfAbsent(migrationKey, (k) -> {
// return CompletableFuture.supplyAsync(() -> {
// Logger.debug(PlanExecutor.this, "Started migrating dependency %s of node %s", d, node);
// MigrationResult result = colocateSingleDependency(d, node);
// Logger.debug(PlanExecutor.this, "Finished migrating dependency %s of node %s: %s", d, node, result);
// return result;
// }, threadPool);
// });
// }).toArray(CompletableFuture[]::new);
final Collection<ExecutionNode> deps = plan.getDependencies(node).stream()
.filter(d -> d.getTableName().isPresent() && !ignoreCopy.contains(d.getTableName().get()))
.collect(Collectors.toSet());
Logger.debug(this, "Examining dependencies %s of %s", deps, node);
Collection<CompletableFuture<MigrationResult>> futureCollection = new HashSet<>();
for(ExecutionNode d : deps) {
final ImmutablePair<String, ConnectionInfo> migrationKey = new ImmutablePair<>(d.getTableName().get(), node.getEngine());
Logger.debug(PlanExecutor.this, "Examining %s to see if migration is necessary...", d);
synchronized (migrations) {
if (!migrations.containsKey(migrationKey)) {
final CompletableFuture<MigrationResult> migration = CompletableFuture.supplyAsync(() -> {
Logger.debug(PlanExecutor.this, "Started migrating dependency %s of node %s", d, node);
final MigrationResult result = colocateSingleDependency(d, node);
Logger.debug(PlanExecutor.this, "Finished migrating dependency %s of node %s: %s", d, node, result);
return result;
}, threadPool);
migrations.put(migrationKey, migration);
futureCollection.add(migration);
} else {
Logger.debug(PlanExecutor.this, "Already migrating %s, not queueing again.", d);
}
}
}
final CompletableFuture[] futures = futureCollection.toArray(new CompletableFuture[futureCollection.size()]);
Logger.debug(this, "Waiting on %d dependencies of %s to be migrated...", futures.length, node);
CompletableFuture.allOf(futures).join();
Logger.debug(this, "All dependencies of %s have migrated!", node);
}
private MigrationResult colocateSingleDependency(ExecutionNode dependency, ExecutionNode dependant) {
return dependency.getTableName().map((table) -> {
try {
final MigrationResult result = Migrator.migrate(dependency.getEngine(), table, dependant.getEngine(), table);
if(result.isError()) {
throw new MigrationException(result.toString());
}
Logger.debug(PlanExecutor.this, "Marking dependency %s as migrated on engine %s...", dependency, dependant.getEngine());
// mark the dependency's data as being present on node.getEngine()
resultLocations.put(dependency, dependant.getEngine());
// mark that this engine now has a copy of the dependency's data
temporaryTables.put(dependant.getEngine(), table);
return result;
} catch (MigrationException e) {
Logger.error(PlanExecutor.this, "Error migrating dependency %s of node %s: %[exception]s", dependency.getTableName(), dependant.getTableName(), e);
return MigrationResult.getFailedInstance(e.getLocalizedMessage());
}
}).orElse(MigrationResult.getEmptyInstance(String.format("No table to migrate for node %s", dependency.getTableName())));
}
private void dropTemporaryTables() throws SQLException {
synchronized(temporaryTables) {
final Multimap<ConnectionInfo, String> removed = HashMultimap.create();
for (ConnectionInfo c : temporaryTables.keySet()) {
final Collection<String> tables = temporaryTables.get(c);
Logger.debug(this, "Cleaning up %s by removing %s...", c, tables);
((PostgreSQLHandler) c.getHandler()).executeStatementPostgreSQL(c.getCleanupQuery(tables));
removed.putAll(c, tables);
}
for (Map.Entry<ConnectionInfo, String> entry : removed.entries()) {
temporaryTables.remove(entry.getKey(), entry.getValue());
}
}
Logger.debug(this, "Temporary tables for query plan %s have been cleaned up", plan.getSerializedName());
}
/**
* Colocates the dependencies for the given ExecutionNode onto that node's engine one at a time.
*
* Waits for any incomplete dependencies, and blocks the current thread until completion.
*
* @param node the ExecutionNOde whose dependencies we want to colocate
*
* @deprecated use {@link #colocateDependencies(ExecutionNode, Collection)} instead.
*/
@Deprecated
private void colocateDependenciesSerially(ExecutionNode node) {
// Block until dependencies are all resolved
try {
Logger.debug(this, "Waiting for dependencies of query node %s to be resolved", node.getTableName());
this.locks.get(node).await();
} catch (InterruptedException e) {
Logger.error(this, "Execution of query node %s was interrupted while waiting for dependencies: %[exception]s", node.getTableName(), e);
Thread.currentThread().interrupt();
}
Logger.debug(this, "Colocating dependencies of query node %s", node.getTableName());
plan.getDependencies(node).stream()
// only look at dependencies not already on desired engine
.filter(d -> !resultLocations.containsEntry(d, node.getEngine()))
.forEach(d -> {
// migrate to node.getEngine()
d.getTableName().ifPresent((table) -> {
Logger.debug(this, "Migrating dependency table %s from engine %s to engine %s...", table, d.getEngine(),
node.getEngine());
try {
MigrationResult result = Migrator.migrate(d.getEngine(), table, node.getEngine(), table);
if(result.isError()) {
throw new MigrationException(result.toString());
}
// mark the dependency's data as being present on node.getEngine()
resultLocations.put(d, node.getEngine());
// mark that this engine now has a copy of the dependency's data
temporaryTables.put(node.getEngine(), table);
} catch (MigrationException e) {
Logger.error(this, "Error migrating dependency %s of node %s: %[exception]s", d, node, e);
}
});
});
}
}
|
i am an idiot
|
src/main/java/istc/bigdawg/executor/PlanExecutor.java
|
i am an idiot
|
|
Java
|
mit
|
d2f460c787a99ef6c026839f36717f44eadca052
| 0
|
IIlllII/bitbreeds-webrtc,IIlllII/bitbreeds-webrtc,IIlllII/bitbreeds-webrtc,IIlllII/bitbreeds-webrtc
|
package com.bitbreeds.webrtc.signaling;
import com.bitbreeds.webrtc.peerconnection.IceCandidate;
import gov.nist.javax.sdp.MediaDescriptionImpl;
import gov.nist.javax.sdp.SessionDescriptionImpl;
import gov.nist.javax.sdp.TimeDescriptionImpl;
import gov.nist.javax.sdp.fields.*;
import javax.sdp.*;
import java.math.BigInteger;
import java.util.*;
import java.util.stream.Collectors;
/**
* Copyright (c) 05/02/2018, Jonas Waage
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
public class SDPUtil {
public static SessionDescription createSDP(
List<IceCandidate> remoteCandidates,
IceCandidate ice,
String user,
String pwd,
String fingerprint,
String mid,
boolean isIceLite) {
try {
SessionDescription sdp = baseSessionDescription();
ProtoVersionField v = new ProtoVersionField();
v.setProtoVersion(0);
sdp.setVersion(v);
OriginField originField = new OriginField();
originField.setAddress(ice.getIp());
originField.setAddressType("IP4");
originField.setUsername("pyrrhic_victory");
Random rd = new Random();
originField.setSessionId(12345678+rd.nextInt(12345678)); //Hmm random?
originField.setSessionVersion(0);
originField.setNetworkType("IN");
sdp.setOrigin(originField);
sdp.setAttribute("s","-");
sdp.setAttribute("t","0 0");
AttributeField sendrecv = createAttribute("sendrecv","");
AttributeField print = createAttribute("fingerprint",fingerprint);
AttributeField bundle = createAttribute("group","BUNDLE "+mid);
AttributeField msid = createAttribute("msid-semantic","WMS *");
AttributeField iceoptions = createAttribute("ice-options","trickle");
Vector<SDPField> vec = new Vector<>();
vec.add(sendrecv);
vec.add(print);
vec.add(bundle);
vec.add(msid);
//vec.add(iceoptions);
if(isIceLite) {
vec.add(createAttribute("ice-lite",""));
}
sdp.setAttributes(vec);
Vector<MediaDescription> vec2 = new Vector<>();
vec2.add(creatMedia(remoteCandidates,user,pwd,ice.getIp(),fingerprint,ice.getPort(),mid));
sdp.setMediaDescriptions(vec2);
return sdp;
} catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
/*
* Same as SDPFactory.createSessionDescription(), but it does not do a local ip lookup
*/
private static SessionDescriptionImpl baseSessionDescription() throws SdpException {
SessionDescriptionImpl sdp = new SessionDescriptionImpl();
ProtoVersionField ProtoVersionField = new ProtoVersionField();
ProtoVersionField.setVersion(0);
sdp.setVersion(ProtoVersionField);
SessionNameField sessionNameImpl = new SessionNameField();
sessionNameImpl.setValue("-");
sdp.setSessionName(sessionNameImpl);
TimeDescriptionImpl timeDescriptionImpl = new TimeDescriptionImpl();
TimeField timeImpl = new TimeField();
timeImpl.setZero();
timeDescriptionImpl.setTime(timeImpl);
Vector times = new Vector();
times.addElement(timeDescriptionImpl);
sdp.setTimeDescriptions(times);
return sdp;
}
private static AttributeField createAttribute(String key, String value) {
try {
AttributeField attr = new AttributeField();
attr.setName(key);
attr.setValue(value);
return attr;
}
catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
private static MediaDescriptionImpl creatMedia(
List<IceCandidate> remoteCandidates,
String user,
String pass,
String address,
String fingerprint,
int port,
String mid) {
try {
MediaDescriptionImpl media = new MediaDescriptionImpl();
MediaField mediaField = new MediaField();
mediaField.setProtocol("UDP/DTLS/SCTP");
Vector<String> formats = new Vector<>();
formats.add("webrtc-datachannel");
mediaField.setMediaFormats(formats);
mediaField.setMediaType("application");
mediaField.setPort(port);
media.setMedia(mediaField);
ConnectionField connectionField = new ConnectionField();
connectionField.setAddress("0.0.0.0");
connectionField.setNettype("IN");
connectionField.setAddressType("IP4");
media.setConnection(connectionField);
//FIXME (do we need to look at remotes here or not)
/*Optional<IceCandidate> cand = remoteCandidates.stream()
.filter(i ->
"udp".equalsIgnoreCase(i.getProtocol()) &&
"host".equalsIgnoreCase(i.getType()) &&
address.equalsIgnoreCase(i.getIp()) &&
BigInteger.ONE.equals(i.getComponent())
)
.findFirst();
Optional<Integer> max = remoteCandidates.stream()
.map(i->i.getFoundation().intValue())
.max(Integer::compare);
int foundationIfNoHit = max.map(i->i+1).orElse(0);
int foundation = cand
.map(i->i.getFoundation().intValue())
.orElse(foundationIfNoHit);*/
Vector<AttributeField> cands = new Vector<>();
int component = 1;
cands.add(createAttribute("candidate",0+" "+component+" udp "+findPriority(component)+" "+address+" "+port+" typ host"));
media.setAttributes(cands);
media.setAttribute("sendrecv","");
media.setAttribute("end-of-candidates","");
media.setAttribute("ice-pwd",pass);
media.setAttribute("ice-ufrag",user);
media.setAttribute("setup","passive");
media.setAttribute("mid",mid);
media.setAttribute("sctp-port","5000");
media.setAttribute("max-message-size","1073741823");
return media;
}
catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
static int findPriority(int componentId) {
return 2113929216 + 16776960 + (256 - componentId);
}
public static ArrayList<String> getCandidates(Vector vec) throws SdpParseException {
Object[] arr = vec.toArray();
ArrayList<String> out = new ArrayList<>();
for(int i = 0; i<arr.length; i++) {
Object r = arr[i];
if(r instanceof AttributeField) {
AttributeField data = (AttributeField) r;
if("candidate".equalsIgnoreCase(data.getAttribute().getKey())) {
out.add(data.getAttribute().getValue());
}
}
}
return out;
}
}
|
webrtc-signaling/src/main/java/com/bitbreeds/webrtc/signaling/SDPUtil.java
|
package com.bitbreeds.webrtc.signaling;
import com.bitbreeds.webrtc.peerconnection.IceCandidate;
import gov.nist.javax.sdp.MediaDescriptionImpl;
import gov.nist.javax.sdp.fields.*;
import javax.sdp.*;
import java.math.BigInteger;
import java.util.*;
import java.util.stream.Collectors;
/**
* Copyright (c) 05/02/2018, Jonas Waage
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
public class SDPUtil {
public static SessionDescription createSDP(
List<IceCandidate> remoteCandidates,
IceCandidate ice,
String user,
String pwd,
String fingerprint,
String mid,
boolean isIceLite) {
try {
SdpFactory factory = SdpFactory.getInstance();
SessionDescription sdp = factory.createSessionDescription();
ProtoVersionField v = new ProtoVersionField();
v.setProtoVersion(0);
sdp.setVersion(v);
OriginField originField = new OriginField();
originField.setAddress(ice.getIp());
originField.setAddressType("IP4");
originField.setUsername("pyrrhic_victory");
Random rd = new Random();
originField.setSessionId(12345678+rd.nextInt(12345678)); //Hmm random?
originField.setSessionVersion(0);
originField.setNetworkType("IN");
sdp.setOrigin(originField);
sdp.setAttribute("s","-");
sdp.setAttribute("t","0 0");
AttributeField sendrecv = createAttribute("sendrecv","");
AttributeField print = createAttribute("fingerprint",fingerprint);
AttributeField bundle = createAttribute("group","BUNDLE "+mid);
AttributeField msid = createAttribute("msid-semantic","WMS *");
AttributeField iceoptions = createAttribute("ice-options","trickle");
Vector<SDPField> vec = new Vector<>();
vec.add(sendrecv);
vec.add(print);
vec.add(bundle);
vec.add(msid);
//vec.add(iceoptions);
if(isIceLite) {
vec.add(createAttribute("ice-lite",""));
}
sdp.setAttributes(vec);
Vector<MediaDescription> vec2 = new Vector<>();
vec2.add(creatMedia(remoteCandidates,user,pwd,ice.getIp(),fingerprint,ice.getPort(),mid));
sdp.setMediaDescriptions(vec2);
return sdp;
} catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
private static AttributeField createAttribute(String key, String value) {
try {
AttributeField attr = new AttributeField();
attr.setName(key);
attr.setValue(value);
return attr;
}
catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
private static MediaDescriptionImpl creatMedia(
List<IceCandidate> remoteCandidates,
String user,
String pass,
String address,
String fingerprint,
int port,
String mid) {
try {
MediaDescriptionImpl media = new MediaDescriptionImpl();
MediaField mediaField = new MediaField();
mediaField.setProtocol("UDP/DTLS/SCTP");
Vector<String> formats = new Vector<>();
formats.add("webrtc-datachannel");
mediaField.setMediaFormats(formats);
mediaField.setMediaType("application");
mediaField.setPort(port);
media.setMedia(mediaField);
ConnectionField connectionField = new ConnectionField();
connectionField.setAddress("0.0.0.0");
connectionField.setNettype("IN");
connectionField.setAddressType("IP4");
media.setConnection(connectionField);
//FIXME (do we need to look at remotes here or not)
/*Optional<IceCandidate> cand = remoteCandidates.stream()
.filter(i ->
"udp".equalsIgnoreCase(i.getProtocol()) &&
"host".equalsIgnoreCase(i.getType()) &&
address.equalsIgnoreCase(i.getIp()) &&
BigInteger.ONE.equals(i.getComponent())
)
.findFirst();
Optional<Integer> max = remoteCandidates.stream()
.map(i->i.getFoundation().intValue())
.max(Integer::compare);
int foundationIfNoHit = max.map(i->i+1).orElse(0);
int foundation = cand
.map(i->i.getFoundation().intValue())
.orElse(foundationIfNoHit);*/
Vector<AttributeField> cands = new Vector<>();
int component = 1;
cands.add(createAttribute("candidate",0+" "+component+" udp "+findPriority(component)+" "+address+" "+port+" typ host"));
media.setAttributes(cands);
media.setAttribute("sendrecv","");
media.setAttribute("end-of-candidates","");
media.setAttribute("ice-pwd",pass);
media.setAttribute("ice-ufrag",user);
media.setAttribute("setup","passive");
media.setAttribute("mid",mid);
media.setAttribute("sctp-port","5000");
media.setAttribute("max-message-size","1073741823");
return media;
}
catch (SdpException e) {
throw new RuntimeException("SDP creation failed: ",e);
}
}
static int findPriority(int componentId) {
return 2113929216 + 16776960 + (256 - componentId);
}
public static ArrayList<String> getCandidates(Vector vec) throws SdpParseException {
Object[] arr = vec.toArray();
ArrayList<String> out = new ArrayList<>();
for(int i = 0; i<arr.length; i++) {
Object r = arr[i];
if(r instanceof AttributeField) {
AttributeField data = (AttributeField) r;
if("candidate".equalsIgnoreCase(data.getAttribute().getKey())) {
out.add(data.getAttribute().getValue());
}
}
}
return out;
}
}
|
Avoid resolving local ip for the sdp
|
webrtc-signaling/src/main/java/com/bitbreeds/webrtc/signaling/SDPUtil.java
|
Avoid resolving local ip for the sdp
|
|
Java
|
mit
|
1dd4a297cb2da46339514eed886153df53589d2e
| 0
|
mattlogan/CircleMenu
|
package com.matthewlogan.circlemenu.library;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.matthewlogan.circlemenu.R;
public class CircleMenu extends FrameLayout
implements AdapterView.OnItemClickListener, Animation.AnimationListener {
private Context mContext;
private ListView mListView;
private OnItemClickListener mListener;
private Animation mHideAnimation;
private Animation mShowAnimation;
private boolean mIsAnimating = false;
private boolean mIsShowing = false;
private int mTextColor;
private float mTextSize;
private int mDividerColor;
// This is the ListView's listener. We'll use this to trigger our own.
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// Subtract one for the "header"
mListener.onItemClick(position - 1);
}
// Whoever is implementing this class only needs to know the clicked item position.
public interface OnItemClickListener {
public void onItemClick(int position);
}
public CircleMenu(Context context, AttributeSet attrs) {
super(context, attrs);
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircleMenu, 0, 0);
if (a != null) {
try {
mTextColor = a.getColor(R.styleable.CircleMenu_textColor, Color.WHITE);
mTextSize = a.getDimensionPixelSize(R.styleable.CircleMenu_textSize, 54);
mDividerColor = a.getColor(R.styleable.CircleMenu_dividerColor, Color.WHITE);
} catch (Exception e) {
Log.e("CircleMenu", "Error while creating the view:", e);
} finally {
a.recycle();
}
}
}
mContext = context;
setupMenu();
loadAnimations();
setVisibility(View.GONE);
}
private void setupMenu() {
mListView = new ListView(mContext);
mListView.setLayoutParams(new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
mListView.setOnItemClickListener(this);
addView(mListView);
}
private void loadAnimations() {
mHideAnimation = AnimationUtils.loadAnimation(mContext, R.anim.circle_menu_hide);
mShowAnimation = AnimationUtils.loadAnimation(mContext, R.anim.circle_menu_show);
mHideAnimation.setAnimationListener(this);
mShowAnimation.setAnimationListener(this);
}
@Override
public void onAnimationStart(Animation animation) {
mIsAnimating = true;
}
@Override
public void onAnimationEnd(Animation animation) {
mIsAnimating = false;
mIsShowing = !mIsShowing;
if (!mIsShowing) {
setVisibility(View.GONE);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
}
public void setItems(String[] items) {
// Rather than dealing with adding a header view to the ListView, we'll just add an
// empty row. This way, we don't have to inflate another view manually.
String[] itemsWithHeader = new String[items.length + 1];
for (int i = 0; i < items.length + 1; i++) {
itemsWithHeader[i] = (i == 0) ? "" : items[i - 1];
}
CircleMenuAdapter<String> adapter = new CircleMenuAdapter<String>(mContext,
R.layout.circle_menu_item, R.id.circle_menu_item_text, itemsWithHeader);
mListView.setAdapter(adapter);
}
public void setOnItemClickListener(OnItemClickListener listener) {
mListener = listener;
}
public void toggle() {
if (mIsAnimating) {
return;
}
setVisibility(View.VISIBLE);
startAnimation(mIsShowing ? mHideAnimation : mShowAnimation);
}
private class CircleMenuAdapter<String> extends ArrayAdapter<String> {
public CircleMenuAdapter(Context context, int resource, int textViewResourceId,
String[] objects) {
super(context, resource, textViewResourceId, objects);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
TextView textView = (TextView) view.findViewById(R.id.circle_menu_item_text);
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, mTextSize);
textView.setTextColor(mTextColor);
View divider = view.findViewById(R.id.circle_menu_item_divider);
divider.setBackgroundColor(mDividerColor);
return view;
}
// Makes the top row (presumably underneath an action bar or control of some kind)
// not clickable.
@Override
public boolean isEnabled(int position) {
return !(position == 0);
}
}
}
|
library/src/main/java/com/matthewlogan/circlemenu/library/CircleMenu.java
|
package com.matthewlogan.circlemenu.library;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.matthewlogan.circlemenu.R;
public class CircleMenu extends FrameLayout
implements AdapterView.OnItemClickListener, Animation.AnimationListener {
private Context mContext;
private ListView mListView;
private OnItemClickListener mListener;
private Animation mInitialHideAnimation;
private Animation mHideAnimation;
private Animation mShowAnimation;
private boolean mIsAnimating = false;
private boolean mIsShowing = false;
private int mTextColor;
private float mTextSize;
private int mDividerColor;
private boolean mShouldBlockLayout = true;
// This is the ListView's listener. We'll use this to trigger our own.
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// Subtract one for the "header"
mListener.onItemClick(position - 1);
}
// Whoever is implementing this class only needs to know the clicked item position.
public interface OnItemClickListener {
public void onItemClick(int position);
}
public CircleMenu(Context context, AttributeSet attrs) {
super(context, attrs);
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircleMenu, 0, 0);
if (a != null) {
try {
mTextColor = a.getColor(R.styleable.CircleMenu_textColor, Color.WHITE);
mTextSize = a.getDimensionPixelSize(R.styleable.CircleMenu_textSize, 54);
mDividerColor = a.getColor(R.styleable.CircleMenu_dividerColor, Color.WHITE);
} catch (Exception e) {
Log.e("CircleMenu", "Error while creating the view:", e);
} finally {
a.recycle();
}
}
}
mContext = context;
setupMenu();
loadAnimations();
setVisibility(View.GONE);
}
private void setupMenu() {
mListView = new ListView(mContext);
mListView.setLayoutParams(new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
mListView.setOnItemClickListener(this);
addView(mListView);
}
private void loadAnimations() {
mHideAnimation = AnimationUtils.loadAnimation(mContext, R.anim.circle_menu_hide);
mShowAnimation = AnimationUtils.loadAnimation(mContext, R.anim.circle_menu_show);
mHideAnimation.setAnimationListener(this);
mShowAnimation.setAnimationListener(this);
}
@Override
public void onAnimationStart(Animation animation) {
mIsAnimating = true;
}
@Override
public void onAnimationEnd(Animation animation) {
mIsAnimating = false;
mIsShowing = !mIsShowing;
if (!mIsShowing) {
setVisibility(View.GONE);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
}
public void setItems(String[] items) {
// Rather than dealing with adding a header view to the ListView, we'll just add an
// empty row. This way, we don't have to inflate another view manually.
String[] itemsWithHeader = new String[items.length + 1];
for (int i = 0; i < items.length + 1; i++) {
itemsWithHeader[i] = (i == 0) ? "" : items[i - 1];
}
CircleMenuAdapter<String> adapter = new CircleMenuAdapter<String>(mContext,
R.layout.circle_menu_item, R.id.circle_menu_item_text, itemsWithHeader);
mListView.setAdapter(adapter);
}
public void setOnItemClickListener(OnItemClickListener listener) {
mListener = listener;
}
public void toggle() {
if (mIsAnimating) {
return;
}
setVisibility(View.VISIBLE);
startAnimation(mIsShowing ? mHideAnimation : mShowAnimation);
}
private class CircleMenuAdapter<String> extends ArrayAdapter<String> {
public CircleMenuAdapter(Context context, int resource, int textViewResourceId,
String[] objects) {
super(context, resource, textViewResourceId, objects);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
TextView textView = (TextView) view.findViewById(R.id.circle_menu_item_text);
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, mTextSize);
textView.setTextColor(mTextColor);
View divider = view.findViewById(R.id.circle_menu_item_divider);
divider.setBackgroundColor(mDividerColor);
return view;
}
// Makes the top row (presumably underneath an action bar or control of some kind)
// not clickable.
@Override
public boolean isEnabled(int position) {
return !(position == 0);
}
}
}
|
Removed some old unused instance variables
|
library/src/main/java/com/matthewlogan/circlemenu/library/CircleMenu.java
|
Removed some old unused instance variables
|
|
Java
|
mit
|
bd65b436b3ef3ec76d340da9b9c97691f39008a2
| 0
|
tepidpond/tum,tepidpond/tum
|
package com.tepidpond.tum.PlateTectonics;
import java.util.Arrays;
import java.util.Random;
import java.util.Vector;
import org.lwjgl.util.vector.Vector4f;
public class Plate {
private static final float DEFORMATION_WEIGHT = 5f;
private static final float INITIAL_SPEED = 1.0f;
private static final float CONT_BASE = 1.0f;
private int activeContinentID;
private Vector<CollisionSegment> collisionSegments = new Vector<CollisionSegment>();
// Height of terrain on the plate.
private float[] heightMap;
// Age of the crust on the plate.
private int[] timestampMap;
// Which collision segment is responsible for the given tile.
private int[] segmentOwnerMap;
// Dimensions and locations of plate in world coordinates
private int left, top, width, height;
// Size of world map
private int mapSize;
// Amount of crust that constitutes the plate.
private float M;
// Center of mass of the plate in world coordinates
private float R_x, R_y;
// Components of plate's velocity. vX and vY are components of a unit vector, velocity is the magnitude
private float velocity, vX, vY;
// Components of plate's acceleration
private float dX, dY;
// Used for random off-setting in subduction routine and setting up initial direction
private Random rand;
float getMomentum() {return M * velocity;}
int getLeft() {return left;}
int getTop() {return top;}
int getHeight() {return height;}
int getWidth() {return width;}
float getVelocity() {return velocity;}
float getVelocityX() {return vX;}
float getVelocityY() {return vY;}
Boolean isEmpty() {return M<=0;}
public Plate(float[] plateData, int plateMapWidth, int xOrigin, int yOrigin, int plateAge, int mapSize, Random rand) {
if (plateData.length < 1) return;
// Save basic pre-defined data.
this.heightMap = new float[plateData.length];
this.timestampMap = new int[plateData.length];
this.segmentOwnerMap = new int[plateData.length];
Arrays.fill(segmentOwnerMap, 255);
this.left = xOrigin;
this.top = yOrigin;
this.width = plateMapWidth;
this.height = plateData.length / plateMapWidth;
this.rand = rand;
this.mapSize = mapSize;
// Establish initial velocity and direction.
double angle = 2 * Math.PI * rand.nextDouble();
this.velocity = 1;
this.vX = (float)Math.cos(angle) * INITIAL_SPEED;
this.vY = (float)Math.sin(angle) * INITIAL_SPEED;
// Intended for random circular motion of plate. Unused.
//this.alpha = -rand.nextInt(1) * Math.PI * 0.01 * rand.nextFloat();
// Clone heightMap data, calculate center of mass and total mass.
int tileIndex = 0; float activeTile = 0.0f;
System.arraycopy(plateData, 0, heightMap, 0, plateData.length);
for(int x = 0; x<width; x++) {
for (int y=0; y<height; y++) {
tileIndex = y * width + x;
activeTile = heightMap[tileIndex];
R_x += x * activeTile;
R_y += y * activeTile;
M += activeTile;
if (activeTile > 0.0f)
this.timestampMap[tileIndex] = plateAge;
}
}
// Normalize center of mass.
R_x /= M;
R_y /= M;
}
/**
* Increment collision counter of the continent at given location.
* @param x X coordinate of collision point on world map.
* @param y Y coordinate of collision point on world map.
* @return Surface area of the collided continent (HACK!)
*/
int addCollision(int x, int y) {
int tile = getMapIndex(x, y);
int xLocal = getOffsetX(x);
int yLocal = getOffsetY(y);
int newSegment = this.segmentOwnerMap[tile];
if (newSegment >= collisionSegments.size())
newSegment = createSegment(xLocal, yLocal);
collisionSegments.elementAt(newSegment).Collisions++;
return collisionSegments.elementAt(newSegment).Area;
}
/**
* Add crust to plate as result of continental collision.
* @param x X coordinate of location of new crust on world map.
* @param y Y coordinate of location of new crust on world map.
* @param amount Amount of crust to add. (units?)
* @param creationTime Time of creation of new crust.
*/
void addCrustByCollision(int x, int y, float amount, int creationTime) {
setCrust(x, y, getCrust(x, y) + amount, creationTime);
int tile = getMapIndex(x, y);
int xLocal = getOffsetX(x);
int yLocal = getOffsetY(y);
}
/**
* Simulates subduction of oceanic plate under this plate.
*
* Subduction is simulated by calculating the distance on surface
* that subducting sediment will travel under the plate until the
* subducting slab has reached certain depth where the heat triggers
* the melting and uprising of molten magma.
*
* @param x X coordinate of origin of subduction on world map.
* @param y Y coordinate of origin of subduction on world map.
* @param amount Amount of sediment that subducts.
* @param creationTime Time of creation of new crust.
* @param dX X direction of the subducting plate.
* @param dY Y direction of the subducting plate.
*/
void addCrustBySubduction(int x, int y, float amount, int creationTime, float dX, float dY) {
int localX = getOffsetX(x), localY = getOffsetY(y);
float dotProduct = vX * dX + vY * dX;
if (dotProduct > 0) {
dX -= vX;
dY -= vY;
}
float offset = 3.0f * (float)Math.pow(rand.nextFloat(), 3.0D) * (2 * rand.nextInt(1) - 1);
dX = 10 * dX + offset;
dY = 10 * dY + offset;
localX += dX;
localY += dY;
if (width == mapSize) x &= width - 1;
if (height == mapSize) x &= height - 1;
int mapTile = y * width + x;
if (mapTile < width * height && heightMap[mapTile] > 0) {
creationTime = (timestampMap[mapTile] + creationTime)/2;
if (amount > 0)
timestampMap[mapTile] = creationTime;
else
timestampMap[mapTile] = 0;
heightMap[mapTile] += amount;
M += amount;
}
}
/**
* Add continental crust from this plate onto another plate.
*
* Aggregation of two continents is the event where the collided
* pieces of crust fuse together at the point of collision. It is
* crucial to merge not only the collided pieces of crust but also
* the entire continent that's part of the colliding tad of crust.
* However, because one plate can contain many islands and pieces
* of continents, the merging must be done WITHOUT merging the entire
* plate and all those continental pieces that have NOTHING to do with
* the collision in question.
*
* @param plate Destination plate receiving the crust
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Amount of crust added to destination plate.
*/
float aggregateCrust(Plate plate, int worldX, int worldY) {
int mapTile = getMapIndex(worldX, worldY);
int localX = getOffsetX(worldX);
int localY = getOffsetY(worldY);
int segmentID = segmentOwnerMap[mapTile];
CollisionSegment segment = collisionSegments.elementAt(segmentID);
if (segment.Area == 0)
return 0; // Ignore empty continents.
plate.selectCollisionSegment(worldX, worldY);
worldX += mapSize; worldY += mapSize;
float M_old = M;
for (int iY = segment.Y0; iY < segment.Y1; iY++) {
for (int iX = segment.X0; iX < segment.X1; iX++) {
int activeTile = iY * width + iX;
if (segmentOwnerMap[activeTile] == segmentID && heightMap[activeTile] > 0) {
plate.addCrustByCollision(worldX + localX - iX, worldY + localY - iY, heightMap[activeTile], timestampMap[activeTile]);
M -= heightMap[activeTile];
heightMap[activeTile] = 0;
}
}
}
segment.Area = 0;
return M_old - M;
}
/**
* Decrease the speed of plate relative to total mass.
*
* Decreases the speed of plate due to friction occurring when two
* plates collide. The amount of reduction depends on the amount of
* mass that causes friction (i.e. has collided) compared to the
* total mass of the plate. Thus big chunk of crust colliding into a
* small plate will halt it but have little effect on a huge plate.
*
* @param deformingMass Amount of mass deformed in collision.
*/
void applyFriction(float deformingMass) {
if (deformingMass > 0) {
float dV = DEFORMATION_WEIGHT * deformingMass / M;
if (dV > velocity) dV = velocity;
velocity -= dV;
}
}
/**
* Collides two plates according to Newton's laws of motion.
*
* The velocity and direction of both plates are updated using
* impulse forces following the collision according to Newton's laws
* of motion. Deformations are not applied but energy consumed by the
* deformation process is taken away from plate's momentum.
*
* @param plate Plate to test against.
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @param collidingMass Amount of colliding mass from source plate.
*/
void collide(Plate plate, int worldX, int worldY, float collidingMass) {
float coefficientRestitution = 0.0f;
int plateA_X = this.getOffsetX(worldX), plateA_Y = this.getOffsetY(worldY);
int plateB_X = plate.getOffsetX(worldX), plateB_Y = plate.getOffsetY(worldY);
int plateA_Tile = this.getMapIndex(worldX, worldY);
int plateB_Tile = plate.getMapIndex(worldX, worldY);
float plateA_dX = plateA_X - R_x;
float plateA_dY = plateA_Y - R_y;
float plateB_dX = plateB_X - plate.R_x;
float plateB_dY = plateB_Y - plate.R_y;
float collision_X = plateA_dX - plateB_dX;
float collision_Y = plateA_dY - plateB_dY;
float magnitude = (float)Math.sqrt(collision_X * collision_X + collision_Y * collision_Y);
if (magnitude <= 0)
return; // no relative motion between plates.
collision_X /= magnitude; collision_Y /= magnitude; // normalize collision vector
float relative_X = vX - plate.vX, relative_Y = vY - plate.vY; // find relative velocity vector
float dotProduct = relative_X * collision_X + relative_Y * collision_Y;
if (dotProduct <= 0)
return; // plates moving away from each other.
float denominatorOfImpulse = (float)Math.pow(magnitude, 2.0f) * (1.0f/M + 1.0f/collidingMass);
// force of impulse
float J = -(1 + coefficientRestitution) * dotProduct / denominatorOfImpulse;
// Finally apply an acceleration;
dX += collision_X * J / M;
dY += collision_Y * J / M;
plate.dX -= collision_X * J / (collidingMass + plate.M);
plate.dY -= collision_Y * J / (collidingMass + plate.M);
}
/**
* Apply plate wide erosion algorithm.
*
* Plate's total mass and the center of mass are updated.
*
* @param lowerBound Sets limit below which there's no erosion. (Is this height limit? Mass?)
*/
void erode(float lowerBound) {
float newHeightmap[] = new float[width * height];
Arrays.fill(newHeightmap, 0);
M = R_x = R_y = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int mapTile = y * width + x;
M += heightMap[mapTile];
newHeightmap[mapTile] += heightMap[mapTile];
// Update R (center of mass)
R_x += x * heightMap[mapTile];
R_y += y * heightMap[mapTile];
if (heightMap[mapTile] < lowerBound)
continue; // eroded too far already, no more
int mapTileN = Math.max(0, (y - 1)) * width + x;
int mapTileS = Math.min(height - 1, (y + 1)) * width + x;
int mapTileW = y * width + Math.max(0, x - 1);
int mapTileE = y * width + Math.min(width - 1, x + 1);
float heightN = 0, heightS = 0, heightW = 0, heightE = 0;
if (y > 0) heightN = heightMap[mapTileN];
if (y < height - 1) heightS = heightMap[mapTileS];
if (x > 0) heightW = heightMap[mapTileW];
if (x < width - 1) heightE = heightMap[mapTileE];
if (heightN + heightS + heightW + heightE == 0)
continue; // no neighbors
float diffN = heightMap[mapTile] - heightN;
float diffS = heightMap[mapTile] - heightS;
float diffW = heightMap[mapTile] - heightW;
float diffE = heightMap[mapTile] - heightE;
float minDiff = Math.min(Math.min(diffN, diffS), Math.min(diffW, diffE));
float diffSum = (heightN > 0 ? (diffN - minDiff) : 0.0f) +
(heightS > 0 ? (diffS - minDiff) : 0.0f) +
(heightW > 0 ? (diffW - minDiff) : 0.0f) +
(heightE > 0 ? (diffE - minDiff) : 0.0f);
if (diffSum < minDiff) {
newHeightmap[mapTileN] += (heightN > 0)?(diffN - minDiff):0;
newHeightmap[mapTileS] += (heightS > 0)?(diffS - minDiff):0;
newHeightmap[mapTileW] += (heightW > 0)?(diffW - minDiff):0;
newHeightmap[mapTileE] += (heightE > 0)?(diffE - minDiff):0;
newHeightmap[mapTile] -= diffSum;
minDiff -= diffSum;
minDiff /= 1 + (heightN > 0?1:0) + (heightS > 0?1:0) +
(heightW > 0?1:0) + (heightE > 0?1:0);
newHeightmap[mapTileN] += (heightN > 0)?(minDiff):0;
newHeightmap[mapTileS] += (heightS > 0)?(minDiff):0;
newHeightmap[mapTileW] += (heightW > 0)?(minDiff):0;
newHeightmap[mapTileE] += (heightE > 0)?(minDiff):0;
} else {
// Remove the erodable crust from the tile
newHeightmap[mapTile] -= minDiff;
float crustToShare = minDiff / diffSum;
// And spread it over the four neighbors.
newHeightmap[mapTileN] += crustToShare * (heightN > 0?diffN - minDiff:0);
newHeightmap[mapTileS] += crustToShare * (heightS > 0?diffS - minDiff:0);
newHeightmap[mapTileW] += crustToShare * (heightW > 0?diffW - minDiff:0);
newHeightmap[mapTileE] += crustToShare * (heightE > 0?diffE - minDiff:0);
}
}
}
// Save new eroded heights
heightMap = newHeightmap;
// Normalize center of mass
if (M > 0) {
R_x /= M;
R_y /= M;
}
}
/**
* Retrieve collision statistics of continent at given location.
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Instance of collision statistic class containing percentage
* of area collided and number of collisions
*/
CollisionStatistic getCollisionInfo(int worldX, int worldY) {
int localX = getOffsetX(worldX), localY = getOffsetY(worldY);
int mapTile = getMapIndex(worldX, worldY);
int segmentID = segmentOwnerMap[mapTile];
CollisionSegment segment = collisionSegments.elementAt(segmentID);
return new CollisionStatistic(segment.Collisions, segment.Collisions / (1.0f + segment.Area));
}
/**
* Retrieve the surface area of continent lying at desired location.
*
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Area of continent at desired location or 0 if none.
*/
int getContinentArea(int worldX, int worldY) {
int mapTile = getMapIndex(worldX, worldY);
return collisionSegments.elementAt(mapTile).Area;
}
/**
* Get the amount of plate's crustal material at some location.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Amount of crust at requested location.
*/
float getCrust(int x, int y) {
int tileLocal = getMapIndex(x, y);
if (tileLocal<0 || tileLocal > timestampMap.length) return 0;
return heightMap[tileLocal];
}
/**
* Get the timestamp of plate's crustal material at some location.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Timestamp of creation of crust at the location or 0 if no crust.
*/
int getCrustTimestamp(int x, int y) {
int tileLocal = getMapIndex(x, y);
if (tileLocal<0 || tileLocal > timestampMap.length) return 0;
return timestampMap[tileLocal];
}
/**
* Get plate's data.
* @return heightMap data
*/
float[] getHeightmap() {
return this.heightMap;
}
/**
* Get plate's data.
* @return Time of creation data.
*/
int[] getTimestampMap() {
return this.timestampMap;
}
/**
* Moves plate along its trajectory.
*/
void move() {
updateVelocity();
updatePosition();
}
private void updateVelocity() {
vX += dX; dX = 0;
vY += dY; dY = 0;
float len = (float)Math.sqrt(vX * vX + vY * vY);
vX /= len;
vY /= len;
velocity += len - 1.0;
if (velocity<0) velocity = 0;
}
private void updatePosition() {
float leftTmp = vX * velocity + left;
float topTmp = vY * velocity + top;
// Wrap-around positions into torus-shaped world.
while (leftTmp < 0) leftTmp += mapSize;
while (topTmp < 0) topTmp += mapSize;
while (leftTmp > mapSize) leftTmp -= mapSize;
while (topTmp > mapSize) topTmp -= mapSize;
left = (int)leftTmp;
top = (int)topTmp;
}
/**
* Clear any earlier continental crust partitions.
*
* Plate has internal bookkeeping of distinct areas of continental
* crust for more realistic collision response. However as the number
* of collisions that plate experiences grows, so does the bookkeeping
* of a continent become more and more inaccurate. Finally it results
* in striking artifacts that cannot be overlooked.
*
* To alleviate this problem without the need of per iteration
* recalculations plate supplies caller a method to reset its
* bookkeeping and start clean.
*/
void resetSegments() {
collisionSegments.removeAllElements();
Arrays.fill(segmentOwnerMap, 255);
}
/**
* Remember the currently processed continent's segment number.
*
* @param x X coordinate of origin of collision on world map.
* @param y Y coordinate of origin of collision on world map.
*/
void selectCollisionSegment(int x, int y) {
int mapTile = getMapIndex(x, y);
activeContinentID = segmentOwnerMap[mapTile];
}
/**
* Set the amount of plate's crustal material at some location.
*
* If the amount of crust to be set is negative, it'll be set to zero.
*
* @param worldX X coordinate of desired location on the world map.
* @param worldY Y coordinate of desired location on the world map.
* @param amount Amount of material at the given location.
* @param timeStamp Time of creation of new crust.
*/
void setCrust(int worldX, int worldY, float amount, int timeStamp) {
if (amount < 0) amount = 0; //negative mass is unlikely
worldX &= mapSize - 1; worldY &= mapSize - 1; // Just to be safe
int localX = getOffsetX(worldX), localY = getOffsetY(worldY);
int plateTile = getMapIndex(worldX, worldY);
if (plateTile >= width * height) {
// Bounds of this plate
Vector4f bounds = new Vector4f(left, top, left + width - 1, top + height - 1);
// Distance from each edge for the new crust piece.
Vector4f dist = new Vector4f(
bounds.x - worldX,
bounds.y - worldY,
(worldX < bounds.x ? mapSize : 0) + worldX - bounds.z,
(worldY < bounds.y ? mapSize : 0) + worldY - bounds.w
);
// Add new tile to nearest plate border
dist = new Vector4f(
dist.x * (dist.x >=0 && dist.x < dist.z ? 1 : 0) * (dist.x < mapSize ? 1 : 0),
dist.y * (dist.y >=0 && dist.y < dist.w ? 1 : 0) * (dist.y < mapSize ? 1 : 0),
dist.z * (dist.z >=0 && dist.z <= dist.x ? 1 : 0) * (dist.z < mapSize ? 1 : 0),
dist.w * (dist.w >=0 && dist.w <= dist.y ? 1 : 0) * (dist.w < mapSize ? 1 : 0)
);
// Force growth in 8 tile blocks (optimization maybe?)
if (false) {
if (dist.x > 0) dist.x = 8 * (int)(dist.x / 8 + 1);
if (dist.y > 0) dist.y = 8 * (int)(dist.y / 8 + 1);
if (dist.z > 0) dist.z = 8 * (int)(dist.z / 8 + 1);
if (dist.w > 0) dist.w = 8 * (int)(dist.w / 8 + 1);
}
// Clamp new plate size to world map size
if (width + dist.x + dist.z > mapSize) {
dist.x = 0;
dist.z = mapSize - width;
}
if (height + dist.y + dist.w > mapSize) {
dist.y = 0;
dist.w = mapSize - height;
}
// Update plate bounds based on distance
int oldWidth = width, oldHeight = height;
left -= dist.x; if (left < 0) left += mapSize;
width += dist.x + dist.z;
top -= dist.y; if (top < 0) top += mapSize;
height += dist.y + dist.w;
// Reallocate plate data storage
float[] newHeightmap = new float[width * height];
int[] newSegmentOwnerMap = new int[width * height];
int[] newTimestampMap = new int[width * height];
// Copy existing data over
for (int row = 0; row < oldHeight; row++) {
int posDest = (int) ((dist.y + row) * width + dist.x);
int posSrc = row * oldWidth;
if (oldWidth>width)
System.out.println("Panic!");
System.arraycopy(heightMap, posSrc, newHeightmap, posDest, oldWidth);
System.arraycopy(segmentOwnerMap, posSrc, newSegmentOwnerMap, posDest, oldWidth);
System.arraycopy(timestampMap, posSrc, newTimestampMap, posDest, oldWidth);
}
// Replace the old(now invalid) storage
heightMap = newHeightmap;
segmentOwnerMap = newSegmentOwnerMap;
timestampMap = newTimestampMap;
// Shift collision segment local coordinates
for (CollisionSegment seg:collisionSegments) {
seg.X0 += dist.x;
seg.X1 += dist.x;
seg.Y0 += dist.y;
seg.Y1 += dist.y;
}
}
plateTile = getMapIndex(worldX, worldY);
if (amount > 0 && heightMap[plateTile] > 0) {
timestampMap[plateTile] += timeStamp;
timestampMap[plateTile] /= 2;
} else if (amount > 0) {
timestampMap[plateTile] = timeStamp;
}
// Update mass
M -= heightMap[plateTile];
heightMap[plateTile] = amount;
M += heightMap[plateTile];
}
/**
* Separate a continent at (X, Y) to its own partition.
*
* Method analyzes the pixels 4-ways adjacent at the given location
* and labels all connected continental points with the same segment ID.
*
* @param localX X coordinate on the local map.
* @param localY Y coordinate on the local map.
* @return ID of created segment on success, otherwise -1.
*/
private int createSegment(int localX, int localY) {
int origin_index = localY * width + localX;
int newSegmentID = collisionSegments.size();
// This tile already belongs to a collision segment
if (segmentOwnerMap[origin_index] < newSegmentID)
return segmentOwnerMap[origin_index];
// Is a neighboring tile part of an existing collision segment?
int adjSegmentID = checkNeighboringSegment(localX, localY);
if (adjSegmentID < newSegmentID)
return adjSegmentID;
segmentOwnerMap[origin_index] = newSegmentID;
CollisionSegment newSegment = new CollisionSegment(localX, localY, localX, localY, 1);
Stack<Integer> border = new Stack<Integer>();
border.Push(origin_index);
while (!border.IsEmpty()) {
// choose random location on border
int borderIndex = rand.nextInt(border.size());
int mapTile = border.Peek(borderIndex);
int x = Util.getX(mapTile, width);
int y = Util.getY(mapTile, width);
// in the 4 cardinal directions, clamp at border.
int tileN, tileS, tileW, tileE;
tileN = Util.getTile(x, Math.max(y - 1, 0), width);
tileS = Util.getTile(x, Math.min(y + 1, height - 1), width);
tileW = Util.getTile(Math.max(x - 1, 0), y, width);
tileE = Util.getTile(Math.min(x + 1, width - 1), y, width);
// If the N/S/E/W tile is un-owned, claim it for the active segment
// and add it to the border.
if (segmentOwnerMap[tileN] > newSegmentID && heightMap[tileN] >= CONT_BASE) {
segmentOwnerMap[tileN] = newSegmentID;
border.Push(tileN);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileN, width), Util.getY(tileN, width));
}
if (segmentOwnerMap[tileS] > newSegmentID && heightMap[tileS] >= CONT_BASE) {
segmentOwnerMap[tileS] = newSegmentID;
border.Push(tileS);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileS, width), Util.getY(tileS, width));
}
if (segmentOwnerMap[tileW] > newSegmentID && heightMap[tileW] >= CONT_BASE) {
segmentOwnerMap[tileW] = newSegmentID;
border.Push(tileW);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileW, width), Util.getY(tileW, width));
}
if (segmentOwnerMap[tileE] > newSegmentID && heightMap[tileE] >= CONT_BASE) {
segmentOwnerMap[tileE] = newSegmentID;
border.Push(tileE);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileE, width), Util.getY(tileE, width));
}
// Overwrite processed point in border with last item from border
border.set(borderIndex, border.Peek());
border.Pop();
}
collisionSegments.addElement(newSegment);
return newSegmentID;
}
private int checkNeighboringSegment(int localX, int localY) {
int origin_index = localY * width + localX;
int newSegmentID = collisionSegments.size();
int adjTileSegmentID = newSegmentID;
if ((localX > 0) &&
heightMap[origin_index-1] >= CONT_BASE &&
segmentOwnerMap[origin_index-1] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index - 1];
} else if ((localX < width - 1) &&
heightMap[origin_index+1] >= CONT_BASE &&
segmentOwnerMap[origin_index+1] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index + 1];
} else if ((localY > 0) &&
heightMap[origin_index - width] >= CONT_BASE &&
segmentOwnerMap[origin_index - width] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index - width];
} else if ((localY < height - 1) &&
heightMap[origin_index + width] >= CONT_BASE &&
segmentOwnerMap[origin_index + width] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index + width];
}
if (adjTileSegmentID < newSegmentID) {
// A neighbor exists, this tile should be added to it instead
segmentOwnerMap[origin_index] = adjTileSegmentID;
CollisionSegment segment = collisionSegments.elementAt(adjTileSegmentID);
segment.Area++;
if (localX > segment.X0) segment.X0 = localX;
if (localX > segment.X1) segment.X1 = localX;
if (localY < segment.Y0) segment.Y0 = localY;
if (localY < segment.Y1) segment.Y1 = localY;
}
return adjTileSegmentID;
}
/**
* Translate world coordinates into offset within plate's height map.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Index into local heightmap.
*/
private int getMapIndex(int x, int y) {
return (getOffsetY(y) * width + getOffsetX(x));
}
private int getOffsetX(int x) {
x &= mapSize - 1; // scale within map dimensions
if (x < left) x += mapSize; // wrap around world edge if necessary
x -= this.left;
return x;
}
private int getOffsetY(int y) {
y &= mapSize - 1; // scale within map dimensions
if (y < top) y += mapSize; // wrap around world edge if necessary
y -= this.top;
return y;
}
}
|
src/java/com/tepidpond/tum/PlateTectonics/Plate.java
|
package com.tepidpond.tum.PlateTectonics;
import java.util.Arrays;
import java.util.Random;
import java.util.Vector;
import org.lwjgl.util.vector.Vector4f;
public class Plate {
private static final float DEFORMATION_WEIGHT = 5f;
private static final float INITIAL_SPEED = 1.0f;
private static final float CONT_BASE = 1.0f;
private int activeContinentID;
private Vector<CollisionSegment> collisionSegments = new Vector<CollisionSegment>();
// Height of terrain on the plate.
private float[] heightMap;
// Age of the crust on the plate.
private int[] timestampMap;
// Which collision segment is responsible for the given tile.
private int[] segmentOwnerMap;
// Dimensions and locations of plate in world coordinates
private int left, top, width, height;
// Size of world map
private int mapSize;
// Amount of crust that constitutes the plate.
private float M;
// Center of mass of the plate in world coordinates
private float R_x, R_y;
// Components of plate's velocity. vX and vY are components of a unit vector, velocity is the magnitude
private float velocity, vX, vY;
// Components of plate's acceleration
private float dX, dY;
// Used for random off-setting in subduction routine and setting up initial direction
private Random rand;
float getMomentum() {return M * velocity;}
int getLeft() {return left;}
int getTop() {return top;}
int getHeight() {return height;}
int getWidth() {return width;}
float getVelocity() {return velocity;}
float getVelocityX() {return vX;}
float getVelocityY() {return vY;}
Boolean isEmpty() {return M<=0;}
public Plate(float[] plateData, int plateMapWidth, int xOrigin, int yOrigin, int plateAge, int mapSize, Random rand) {
if (plateData.length < 1) return;
// Save basic pre-defined data.
this.heightMap = new float[plateData.length];
this.timestampMap = new int[plateData.length];
this.segmentOwnerMap = new int[plateData.length];
Arrays.fill(segmentOwnerMap, 255);
this.left = xOrigin;
this.top = yOrigin;
this.width = plateMapWidth;
this.height = plateData.length / plateMapWidth;
this.rand = rand;
this.mapSize = mapSize;
// Establish initial velocity and direction.
double angle = 2 * Math.PI * rand.nextDouble();
this.velocity = 1;
this.vX = (float)Math.cos(angle) * INITIAL_SPEED;
this.vY = (float)Math.sin(angle) * INITIAL_SPEED;
// Intended for random circular motion of plate. Unused.
//this.alpha = -rand.nextInt(1) * Math.PI * 0.01 * rand.nextFloat();
// Clone heightMap data, calculate center of mass and total mass.
int tileIndex = 0; float activeTile = 0.0f;
System.arraycopy(plateData, 0, heightMap, 0, plateData.length);
for(int x = 0; x<width; x++) {
for (int y=0; y<height; y++) {
tileIndex = y * width + x;
activeTile = heightMap[tileIndex];
R_x += x * activeTile;
R_y += y * activeTile;
M += activeTile;
if (activeTile > 0.0f)
this.timestampMap[tileIndex] = plateAge;
}
}
// Normalize center of mass.
R_x /= M;
R_y /= M;
}
/**
* Increment collision counter of the continent at given location.
* @param x X coordinate of collision point on world map.
* @param y Y coordinate of collision point on world map.
* @return Surface area of the collided continent (HACK!)
*/
int addCollision(int x, int y) {
int tile = getMapIndex(x, y);
int xLocal = getOffsetX(x);
int yLocal = getOffsetY(y);
int newSegment = this.segmentOwnerMap[tile];
if (newSegment >= collisionSegments.size())
newSegment = createSegment(xLocal, yLocal);
collisionSegments.elementAt(newSegment).Collisions++;
return collisionSegments.elementAt(newSegment).Area;
}
/**
* Add crust to plate as result of continental collision.
* @param x X coordinate of location of new crust on world map.
* @param y Y coordinate of location of new crust on world map.
* @param amount Amount of crust to add. (units?)
* @param creationTime Time of creation of new crust.
*/
void addCrustByCollision(int x, int y, float amount, int creationTime) {
setCrust(x, y, getCrust(x, y) + amount, creationTime);
int tile = getMapIndex(x, y);
int xLocal = getOffsetX(x);
int yLocal = getOffsetY(y);
}
/**
* Simulates subduction of oceanic plate under this plate.
*
* Subduction is simulated by calculating the distance on surface
* that subducting sediment will travel under the plate until the
* subducting slab has reached certain depth where the heat triggers
* the melting and uprising of molten magma.
*
* @param x X coordinate of origin of subduction on world map.
* @param y Y coordinate of origin of subduction on world map.
* @param amount Amount of sediment that subducts.
* @param creationTime Time of creation of new crust.
* @param dX X direction of the subducting plate.
* @param dY Y direction of the subducting plate.
*/
void addCrustBySubduction(int x, int y, float amount, int creationTime, float dX, float dY) {
int localX = getOffsetX(x), localY = getOffsetY(y);
float dotProduct = vX * dX + vY * dX;
if (dotProduct > 0) {
dX -= vX;
dY -= vY;
}
float offset = 3.0f * (float)Math.pow(rand.nextFloat(), 3.0D) * (2 * rand.nextInt(1) - 1);
dX = 10 * dX + offset;
dY = 10 * dY + offset;
localX += dX;
localY += dY;
if (width == mapSize) x &= width - 1;
if (height == mapSize) x &= height - 1;
int mapTile = y * width + x;
if (mapTile < width * height && heightMap[mapTile] > 0) {
creationTime = (timestampMap[mapTile] + creationTime)/2;
if (amount > 0)
timestampMap[mapTile] = creationTime;
else
timestampMap[mapTile] = 0;
heightMap[mapTile] += amount;
M += amount;
}
}
/**
* Add continental crust from this plate onto another plate.
*
* Aggregation of two continents is the event where the collided
* pieces of crust fuse together at the point of collision. It is
* crucial to merge not only the collided pieces of crust but also
* the entire continent that's part of the colliding tad of crust.
* However, because one plate can contain many islands and pieces
* of continents, the merging must be done WITHOUT merging the entire
* plate and all those continental pieces that have NOTHING to do with
* the collision in question.
*
* @param plate Destination plate receiving the crust
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Amount of crust added to destination plate.
*/
float aggregateCrust(Plate plate, int worldX, int worldY) {
int mapTile = getMapIndex(worldX, worldY);
int localX = getOffsetX(worldX);
int localY = getOffsetY(worldY);
int segmentID = segmentOwnerMap[mapTile];
CollisionSegment segment = collisionSegments.elementAt(segmentID);
if (segment.Area == 0)
return 0; // Ignore empty continents.
plate.selectCollisionSegment(worldX, worldY);
worldX += mapSize; worldY += mapSize;
float M_old = M;
for (int iY = segment.Y0; iY < segment.Y1; iY++) {
for (int iX = segment.X0; iX < segment.X1; iX++) {
int activeTile = iY * width + iX;
if (segmentOwnerMap[activeTile] == segmentID && heightMap[activeTile] > 0) {
plate.addCrustByCollision(worldX + localX - iX, worldY + localY - iY, heightMap[activeTile], timestampMap[activeTile]);
M -= heightMap[activeTile];
heightMap[activeTile] = 0;
}
}
}
segment.Area = 0;
return M_old - M;
}
/**
* Decrease the speed of plate relative to total mass.
*
* Decreases the speed of plate due to friction occurring when two
* plates collide. The amount of reduction depends on the amount of
* mass that causes friction (i.e. has collided) compared to the
* total mass of the plate. Thus big chunk of crust colliding into a
* small plate will halt it but have little effect on a huge plate.
*
* @param deformingMass Amount of mass deformed in collision.
*/
void applyFriction(float deformingMass) {
if (deformingMass > 0) {
float dV = DEFORMATION_WEIGHT * deformingMass / M;
if (dV > velocity) dV = velocity;
velocity -= dV;
}
}
/**
* Collides two plates according to Newton's laws of motion.
*
* The velocity and direction of both plates are updated using
* impulse forces following the collision according to Newton's laws
* of motion. Deformations are not applied but energy consumed by the
* deformation process is taken away from plate's momentum.
*
* @param plate Plate to test against.
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @param collidingMass Amount of colliding mass from source plate.
*/
void collide(Plate plate, int worldX, int worldY, float collidingMass) {
float coefficientRestitution = 0.0f;
int plateA_X = this.getOffsetX(worldX), plateA_Y = this.getOffsetY(worldY);
int plateB_X = plate.getOffsetX(worldX), plateB_Y = plate.getOffsetY(worldY);
int plateA_Tile = this.getMapIndex(worldX, worldY);
int plateB_Tile = plate.getMapIndex(worldX, worldY);
float plateA_dX = plateA_X - R_x;
float plateA_dY = plateA_Y - R_y;
float plateB_dX = plateB_X - plate.R_x;
float plateB_dY = plateB_Y - plate.R_y;
float collision_X = plateA_dX - plateB_dX;
float collision_Y = plateA_dY - plateB_dY;
float magnitude = (float)Math.sqrt(collision_X * collision_X + collision_Y * collision_Y);
if (magnitude <= 0)
return; // no relative motion between plates.
collision_X /= magnitude; collision_Y /= magnitude; // normalize collision vector
float relative_X = vX - plate.vX, relative_Y = vY - plate.vY; // find relative velocity vector
float dotProduct = relative_X * collision_X + relative_Y * collision_Y;
if (dotProduct <= 0)
return; // plates moving away from each other.
float denominatorOfImpulse = (float)Math.pow(magnitude, 2.0f) * (1.0f/M + 1.0f/collidingMass);
// force of impulse
float J = -(1 + coefficientRestitution) * dotProduct / denominatorOfImpulse;
// Finally apply an acceleration;
dX += collision_X * J / M;
dY += collision_Y * J / M;
plate.dX -= collision_X * J / (collidingMass + plate.M);
plate.dY -= collision_Y * J / (collidingMass + plate.M);
}
/**
* Apply plate wide erosion algorithm.
*
* Plate's total mass and the center of mass are updated.
*
* @param lowerBound Sets limit below which there's no erosion. (Is this height limit? Mass?)
*/
void erode(float lowerBound) {
float newHeightmap[] = new float[width * height];
Arrays.fill(newHeightmap, 0);
M = R_x = R_y = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int mapTile = y * width + x;
M += heightMap[mapTile];
newHeightmap[mapTile] += heightMap[mapTile];
// Update R (center of mass)
R_x += x * heightMap[mapTile];
R_y += y * heightMap[mapTile];
if (heightMap[mapTile] < lowerBound)
continue; // eroded too far already, no more
int mapTileN = Math.max(0, (y - 1)) * width + x;
int mapTileS = Math.min(height - 1, (y + 1)) * width + x;
int mapTileW = y * width + Math.max(0, x - 1);
int mapTileE = y * width + Math.min(width - 1, x + 1);
float heightN = 0, heightS = 0, heightW = 0, heightE = 0;
if (y > 0) heightN = heightMap[mapTileN];
if (y < height - 1) heightS = heightMap[mapTileS];
if (x > 0) heightW = heightMap[mapTileW];
if (x < width - 1) heightE = heightMap[mapTileE];
if (heightN + heightS + heightW + heightE == 0)
continue; // no neighbors
float diffN = heightMap[mapTile] - heightN;
float diffS = heightMap[mapTile] - heightS;
float diffW = heightMap[mapTile] - heightW;
float diffE = heightMap[mapTile] - heightE;
float minDiff = Math.min(Math.min(diffN, diffS), Math.min(diffW, diffE));
float diffSum = (heightN > 0 ? (diffN - minDiff) : 0.0f) +
(heightS > 0 ? (diffS - minDiff) : 0.0f) +
(heightW > 0 ? (diffW - minDiff) : 0.0f) +
(heightE > 0 ? (diffE - minDiff) : 0.0f);
if (diffSum < minDiff) {
newHeightmap[mapTileN] += (heightN > 0)?(diffN - minDiff):0;
newHeightmap[mapTileS] += (heightS > 0)?(diffS - minDiff):0;
newHeightmap[mapTileW] += (heightW > 0)?(diffW - minDiff):0;
newHeightmap[mapTileE] += (heightE > 0)?(diffE - minDiff):0;
newHeightmap[mapTile] -= diffSum;
minDiff -= diffSum;
minDiff /= 1 + (heightN > 0?1:0) + (heightS > 0?1:0) +
(heightW > 0?1:0) + (heightE > 0?1:0);
newHeightmap[mapTileN] += (heightN > 0)?(minDiff):0;
newHeightmap[mapTileS] += (heightS > 0)?(minDiff):0;
newHeightmap[mapTileW] += (heightW > 0)?(minDiff):0;
newHeightmap[mapTileE] += (heightE > 0)?(minDiff):0;
} else {
// Remove the erodable crust from the tile
newHeightmap[mapTile] -= minDiff;
float crustToShare = minDiff / diffSum;
// And spread it over the four neighbors.
newHeightmap[mapTileN] += crustToShare * (heightN > 0?diffN - minDiff:0);
newHeightmap[mapTileS] += crustToShare * (heightS > 0?diffS - minDiff:0);
newHeightmap[mapTileW] += crustToShare * (heightW > 0?diffW - minDiff:0);
newHeightmap[mapTileE] += crustToShare * (heightE > 0?diffE - minDiff:0);
}
}
}
// Save new eroded heights
heightMap = newHeightmap;
// Normalize center of mass
if (M > 0) {
R_x /= M;
R_y /= M;
}
}
/**
* Retrieve collision statistics of continent at given location.
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Instance of collision statistic class containing percentage
* of area collided and number of collisions
*/
CollisionStatistic getCollisionInfo(int worldX, int worldY) {
int localX = getOffsetX(worldX), localY = getOffsetY(worldY);
int mapTile = getMapIndex(worldX, worldY);
int segmentID = segmentOwnerMap[mapTile];
CollisionSegment segment = collisionSegments.elementAt(segmentID);
return new CollisionStatistic(segment.Collisions, segment.Collisions / (1.0f + segment.Area));
}
/**
* Retrieve the surface area of continent lying at desired location.
*
* @param worldX X coordinate of collision point on world map.
* @param worldY Y coordinate of collision point on world map.
* @return Area of continent at desired location or 0 if none.
*/
int getContinentArea(int worldX, int worldY) {
int mapTile = getMapIndex(worldX, worldY);
return collisionSegments.elementAt(mapTile).Area;
}
/**
* Get the amount of plate's crustal material at some location.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Amount of crust at requested location.
*/
float getCrust(int x, int y) {
int tileLocal = getMapIndex(x, y);
if (tileLocal<0 || tileLocal > timestampMap.length) return 0;
return heightMap[tileLocal];
}
/**
* Get the timestamp of plate's crustal material at some location.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Timestamp of creation of crust at the location or 0 if no crust.
*/
int getCrustTimestamp(int x, int y) {
int tileLocal = getMapIndex(x, y);
if (tileLocal<0 || tileLocal > timestampMap.length) return 0;
return timestampMap[tileLocal];
}
/**
* Get plate's data.
* @return heightMap data
*/
float[] getHeightmap() {
return this.heightMap;
}
/**
* Get plate's data.
* @return Time of creation data.
*/
int[] getTimestampMap() {
return this.timestampMap;
}
/**
* Moves plate along its trajectory.
*/
void move() {
updateVelocity();
updatePosition();
}
private void updateVelocity() {
vX += dX; dX = 0;
vY += dY; dY = 0;
float len = (float)Math.sqrt(vX * vX + vY * vY);
vX /= len;
vY /= len;
velocity += len - 1.0;
if (velocity<0) velocity = 0;
}
private void updatePosition() {
float leftTmp = vX * velocity + left;
float topTmp = vY * velocity + top;
// Wrap-around positions into torus-shaped world.
while (leftTmp < 0) leftTmp += mapSize;
while (topTmp < 0) topTmp += mapSize;
while (leftTmp > mapSize) leftTmp -= mapSize;
while (topTmp > mapSize) topTmp -= mapSize;
left = (int)leftTmp;
top = (int)topTmp;
}
/**
* Clear any earlier continental crust partitions.
*
* Plate has internal bookkeeping of distinct areas of continental
* crust for more realistic collision response. However as the number
* of collisions that plate experiences grows, so does the bookkeeping
* of a continent become more and more inaccurate. Finally it results
* in striking artifacts that cannot be overlooked.
*
* To alleviate this problem without the need of per iteration
* recalculations plate supplies caller a method to reset its
* bookkeeping and start clean.
*/
void resetSegments() {
collisionSegments.removeAllElements();
Arrays.fill(segmentOwnerMap, 255);
}
/**
* Remember the currently processed continent's segment number.
*
* @param x X coordinate of origin of collision on world map.
* @param y Y coordinate of origin of collision on world map.
*/
void selectCollisionSegment(int x, int y) {
int mapTile = getMapIndex(x, y);
activeContinentID = segmentOwnerMap[mapTile];
}
/**
* Set the amount of plate's crustal material at some location.
*
* If the amount of crust to be set is negative, it'll be set to zero.
*
* @param worldX X coordinate of desired location on the world map.
* @param worldY Y coordinate of desired location on the world map.
* @param amount Amount of material at the given location.
* @param timeStamp Time of creation of new crust.
*/
void setCrust(int worldX, int worldY, float amount, int timeStamp) {
if (amount < 0) amount = 0; //negative mass is unlikely
worldX &= mapSize - 1; worldY &= mapSize - 1; // Just to be safe
int localX = getOffsetX(worldX), localY = getOffsetY(worldY);
int mapTile = getMapIndex(worldX, worldY);
if (mapTile >= width * height) {
// Bounds of this plate
Vector4f bounds = new Vector4f(left, top, left + width - 1, top + height - 1);
// Distance from each edge for the new crust piece.
Vector4f dist = new Vector4f(
bounds.x - worldX,
bounds.y - worldY,
(worldX < bounds.x ? mapSize : 0) + worldX - bounds.z,
(worldY < bounds.y ? mapSize : 0) + worldY - bounds.w
);
// Add new tile to nearest plate border
dist = new Vector4f(
dist.x * (dist.x < dist.z ? 1 : 0) * (dist.x < mapSize ? 1 : 0),
dist.y * (dist.y < dist.w ? 1 : 0) * (dist.y < mapSize ? 1 : 0),
dist.z * (dist.z <= dist.x ? 1 : 0) * (dist.z < mapSize ? 1 : 0),
dist.w * (dist.w <= dist.y ? 1 : 0) * (dist.w < mapSize ? 1 : 0)
);
// Force growth in 8 tile blocks (optimization maybe?)
if (false) {
if (dist.x > 0) dist.x = 8 * (int)(dist.x / 8 + 1);
if (dist.y > 0) dist.y = 8 * (int)(dist.y / 8 + 1);
if (dist.z > 0) dist.z = 8 * (int)(dist.z / 8 + 1);
if (dist.w > 0) dist.w = 8 * (int)(dist.w / 8 + 1);
}
// Clamp new plate size to world map size
if (width + dist.x + dist.z > mapSize) {
dist.x = 0;
dist.z = mapSize - width;
}
if (height + dist.y + dist.w > mapSize) {
dist.y = 0;
dist.w = mapSize - height;
}
// Update plate bounds based on distance
int oldWidth = width, oldHeight = height;
left -= dist.x; if (left < 0) left += mapSize;
width += dist.x + dist.z;
top -= dist.y; if (top < 0) top += mapSize;
height += dist.y + dist.w;
// Reallocate plate data storage
float[] newHeightmap = new float[width * height];
int[] newSegmentOwnerMap = new int[width * height];
int[] newTimestampMap = new int[width * height];
// Copy existing data over
for (int row = 0; row < oldHeight; row++) {
int posDest = (int) ((dist.y + row) * width + dist.x);
int posSrc = row * oldWidth;
if (oldWidth>width)
System.out.println("Panic!");
System.arraycopy(heightMap, posSrc, newHeightmap, posDest, oldWidth);
System.arraycopy(segmentOwnerMap, posSrc, newSegmentOwnerMap, posDest, oldWidth);
System.arraycopy(timestampMap, posSrc, newTimestampMap, posDest, oldWidth);
}
// Replace the old(now invalid) storage
heightMap = newHeightmap;
segmentOwnerMap = newSegmentOwnerMap;
timestampMap = newTimestampMap;
// Shift collision segment local coordinates
for (CollisionSegment seg:collisionSegments) {
seg.X0 += dist.x;
seg.X1 += dist.x;
seg.Y0 += dist.y;
seg.Y1 += dist.y;
}
}
mapTile = getMapIndex(worldX, worldY);
if (amount > 0 && heightMap[mapTile] > 0) {
timestampMap[mapTile] += timeStamp;
timestampMap[mapTile] /= 2;
} else if (amount > 0) {
timestampMap[mapTile] = timeStamp;
}
// Update mass
M -= heightMap[mapTile];
heightMap[mapTile] = amount;
M += heightMap[mapTile];
}
/**
* Separate a continent at (X, Y) to its own partition.
*
* Method analyzes the pixels 4-ways adjacent at the given location
* and labels all connected continental points with the same segment ID.
*
* @param localX X coordinate on the local map.
* @param localY Y coordinate on the local map.
* @return ID of created segment on success, otherwise -1.
*/
private int createSegment(int localX, int localY) {
int origin_index = localY * width + localX;
int newSegmentID = collisionSegments.size();
// This tile already belongs to a collision segment
if (segmentOwnerMap[origin_index] < newSegmentID)
return segmentOwnerMap[origin_index];
// Is a neighboring tile part of an existing collision segment?
int adjSegmentID = checkNeighboringSegment(localX, localY);
if (adjSegmentID < newSegmentID)
return adjSegmentID;
segmentOwnerMap[origin_index] = newSegmentID;
CollisionSegment newSegment = new CollisionSegment(localX, localY, localX, localY, 1);
Stack<Integer> border = new Stack<Integer>();
border.Push(origin_index);
while (!border.IsEmpty()) {
// choose random location on border
int borderIndex = rand.nextInt(border.size());
int mapTile = border.Peek(borderIndex);
int x = Util.getX(mapTile, width);
int y = Util.getY(mapTile, width);
// in the 4 cardinal directions, clamp at border.
int tileN, tileS, tileW, tileE;
tileN = Util.getTile(x, Math.max(y - 1, 0), width);
tileS = Util.getTile(x, Math.min(y + 1, height - 1), width);
tileW = Util.getTile(Math.max(x - 1, 0), y, width);
tileE = Util.getTile(Math.min(x + 1, width - 1), y, width);
// If the N/S/E/W tile is un-owned, claim it for the active segment
// and add it to the border.
if (segmentOwnerMap[tileN] > newSegmentID && heightMap[tileN] >= CONT_BASE) {
segmentOwnerMap[tileN] = newSegmentID;
border.Push(tileN);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileN, width), Util.getY(tileN, width));
}
if (segmentOwnerMap[tileS] > newSegmentID && heightMap[tileS] >= CONT_BASE) {
segmentOwnerMap[tileS] = newSegmentID;
border.Push(tileS);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileS, width), Util.getY(tileS, width));
}
if (segmentOwnerMap[tileW] > newSegmentID && heightMap[tileW] >= CONT_BASE) {
segmentOwnerMap[tileW] = newSegmentID;
border.Push(tileW);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileW, width), Util.getY(tileW, width));
}
if (segmentOwnerMap[tileE] > newSegmentID && heightMap[tileE] >= CONT_BASE) {
segmentOwnerMap[tileE] = newSegmentID;
border.Push(tileE);
newSegment.Area++;
newSegment.UpdateBoundsToInclude(Util.getX(tileE, width), Util.getY(tileE, width));
}
// Overwrite processed point in border with last item from border
border.set(borderIndex, border.Peek());
border.Pop();
}
collisionSegments.addElement(newSegment);
return newSegmentID;
}
private int checkNeighboringSegment(int localX, int localY) {
int origin_index = localY * width + localX;
int newSegmentID = collisionSegments.size();
int adjTileSegmentID = newSegmentID;
if ((localX > 0) &&
heightMap[origin_index-1] >= CONT_BASE &&
segmentOwnerMap[origin_index-1] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index - 1];
} else if ((localX < width - 1) &&
heightMap[origin_index+1] >= CONT_BASE &&
segmentOwnerMap[origin_index+1] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index + 1];
} else if ((localY > 0) &&
heightMap[origin_index - width] >= CONT_BASE &&
segmentOwnerMap[origin_index - width] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index - width];
} else if ((localY < height - 1) &&
heightMap[origin_index + width] >= CONT_BASE &&
segmentOwnerMap[origin_index + width] < newSegmentID) {
adjTileSegmentID = segmentOwnerMap[origin_index + width];
}
if (adjTileSegmentID < newSegmentID) {
// A neighbor exists, this tile should be added to it instead
segmentOwnerMap[origin_index] = adjTileSegmentID;
CollisionSegment segment = collisionSegments.elementAt(adjTileSegmentID);
segment.Area++;
if (localX > segment.X0) segment.X0 = localX;
if (localX > segment.X1) segment.X1 = localX;
if (localY < segment.Y0) segment.Y0 = localY;
if (localY < segment.Y1) segment.Y1 = localY;
}
return adjTileSegmentID;
}
/**
* Translate world coordinates into offset within plate's height map.
*
* @param x X coordinate on world map.
* @param y Y coordinate on world map.
* @return Index into local heightmap.
*/
private int getMapIndex(int x, int y) {
return (getOffsetY(y) * width + getOffsetX(x));
}
private int getOffsetX(int x) {
x &= mapSize - 1; // scale within map dimensions
if (x < left) x += mapSize; // wrap around world edge if necessary
x -= this.left;
return x;
}
private int getOffsetY(int y) {
y &= mapSize - 1; // scale within map dimensions
if (y < top) y += mapSize; // wrap around world edge if necessary
y -= this.top;
return y;
}
}
|
Bug hunt for the negative-distance incredible-shrinking-plate bug.
|
src/java/com/tepidpond/tum/PlateTectonics/Plate.java
|
Bug hunt for the negative-distance incredible-shrinking-plate bug.
|
|
Java
|
mit
|
d461655f28d73790b6768e49aec1b633778a6508
| 0
|
nearit/Android-SDK,nearit/Android-SDK
|
package it.near.sdk.Recipes.Models;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
import com.google.gson.internal.LinkedTreeMap;
import org.json.JSONException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import it.near.sdk.Communication.NearNetworkUtil;
import it.near.sdk.GlobalConfig;
import it.near.sdk.MorpheusNear.Annotations.Relationship;
import it.near.sdk.MorpheusNear.Resource;
import it.near.sdk.Utils.NearItIntentConstants;
import it.near.sdk.Utils.NearJsonAPIUtils;
/**
* @author cattaneostefano
*/
public class Recipe extends Resource {
@SerializedName("name")
String name;
@SerializedName("notification")
HashMap<String, Object> notification;
@SerializedName("labels")
HashMap<String, Object> labels;
@SerializedName("scheduling")
HashMap<String, Object> scheduling;
@SerializedName("pulse_plugin_id")
String pulse_plugin_id;
@Relationship("pulse_bundle")
PulseBundle pulse_bundle;
@Relationship("pulse_action")
PulseAction pulse_action;
@SerializedName("reaction_plugin_id")
String reaction_plugin_id;
@Relationship("reaction_bundle")
ReactionBundle reaction_bundle;
@Relationship("reaction_action")
ReactionAction reaction_action;
/*@SerializedName("operation_plugin_id")
String operation_plugin_id;
@SerializedName("operation_bundle_id")
String operation_bundle_id;*/
/*@Relationship("operation_action")
OperationAction operation_action;*/
private static final String ONLINE = "online";
private static final String TRACKINGS_PATH = "trackings";
public static final String NOTIFIED_STATUS = "notified";
public static final String ENGAGED_STATUS = "engaged";
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public HashMap<String, Object> getNotification() {
return notification;
}
public void setNotification(HashMap<String, Object> notification) {
this.notification = notification;
}
public HashMap<String, Object> getLabels() {
return labels;
}
public boolean isEvaluatedOnline(){
if (!labels.containsKey(ONLINE)){
return false;
} else {
return labels.get(ONLINE).equals(true);
}
}
public void setLabels(HashMap<String, Object> labels) {
this.labels = labels;
}
public String getPulse_plugin_id() {
return pulse_plugin_id;
}
public void setPulse_plugin_id(String pulse_plugin_id) {
this.pulse_plugin_id = pulse_plugin_id;
}
public PulseBundle getPulse_bundle() {
return pulse_bundle;
}
public void setPulse_bundle(PulseBundle pulse_bundle) {
this.pulse_bundle = pulse_bundle;
}
/*public String getOperation_plugin_id() {
return operation_plugin_id;
}
public void setOperation_plugin_id(String operation_plugin_id) {
this.operation_plugin_id = operation_plugin_id;
}
public String getOperation_bundle_id() {
return operation_bundle_id;
}
public void setOperation_bundle_id(String operation_bundle_id) {
this.operation_bundle_id = operation_bundle_id;
}*/
public String getReaction_plugin_id() {
return reaction_plugin_id;
}
public void setReaction_plugin_id(String reaction_plugin_id) {
this.reaction_plugin_id = reaction_plugin_id;
}
public ReactionBundle getReaction_bundle() {
return reaction_bundle;
}
public void setReaction_bundle(ReactionBundle reaction_bundle) {
this.reaction_bundle = reaction_bundle;
}
public PulseAction getPulse_action() {
return pulse_action;
}
public void setPulse_action(PulseAction pulse_action) {
this.pulse_action = pulse_action;
}
/*public OperationAction getOperation_action() {
return operation_action;
}
public void setOperation_action(OperationAction operation_action) {
this.operation_action = operation_action;
}*/
public ReactionAction getReaction_action() {
return reaction_action;
}
public void setReaction_action(ReactionAction reaction_action) {
this.reaction_action = reaction_action;
}
public void setScheduling(HashMap<String, Object> scheduling) {
this.scheduling = scheduling;
}
public String getNotificationTitle() {
if (getNotification().containsKey("title")){
return getNotification().get("title").toString();
}
return null;
}
public String getNotificationBody() {
if (getNotification().containsKey("body")){
return getNotification().get("body").toString();
}
return null;
}
/**
* Sends tracking on a recipe. Lets choose the notified status.
* @param context the app context.
* @param recipeId the recipe identifier.
* @param trackingEvent notified status to send. Can either be NO
* @throws JSONException
*/
public static void sendTracking(Context context, String recipeId, String trackingEvent) throws JSONException {
String trackingBody = buildTrackingBody(context, recipeId, trackingEvent);
Uri url = Uri.parse(TRACKINGS_PATH).buildUpon().build();
NearNetworkUtil.sendTrack(context, url.toString(), trackingBody);
}
/**
* Builds the tracking send request body.
* @param context the app context.
* @param recipeId the recipe identifier.
* @param trackingEvent the tracking event string.
* @return the http body string.
* @throws JSONException
*/
private static String buildTrackingBody(Context context, String recipeId, String trackingEvent) throws JSONException {
String profileId = GlobalConfig.getInstance(context).getProfileId();
String appId = GlobalConfig.getInstance(context).getAppId();
String installationId = GlobalConfig.getInstance(context).getInstallationId();
if (recipeId == null ||
profileId == null ||
installationId == null ){
throw new JSONException("missing data");
}
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
Date now = new Date(System.currentTimeMillis());
String formattedDate = sdf.format(now);
HashMap<String, Object> attributes = new HashMap<>();
attributes.put("profile_id", profileId);
attributes.put("installation_id", installationId);
attributes.put("app_id", appId);
attributes.put("recipe_id", recipeId);
attributes.put("event", trackingEvent);
attributes.put("tracked_at", formattedDate);
return NearJsonAPIUtils.toJsonAPI("trackings", attributes);
}
public boolean isForegroundRecipe() {
return getPulse_action().isForeground();
}
/**
* Check if the recipe is valid according to the scheduling information.
* @return the validity of the recipe.
*/
public boolean isScheduledNow(Calendar now){
return scheduling == null ||
( isDateValid(now) &&
isTimetableValid(now) &&
isDaysValid(now) );
}
/**
* Check if the date range is valid.
* @return if the date range is respected.
*/
private boolean isDateValid(Calendar now){
Map<String, Object> date = (Map<String, Object>) scheduling.get("date");
if (date == null) return true;
String fromDateString = (String) date.get("from");
String toDateString = (String) date.get("to");
boolean valid = true;
try {
// do not move the dateformatter to be an instance variable, it messes the parsing
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd");
if (fromDateString != null) {
Date fromDate = dateFormatter.parse(fromDateString);
Calendar fromCalendarDate = Calendar.getInstance();
fromCalendarDate.setTimeInMillis(fromDate.getTime());
valid &= fromCalendarDate.before(now) || fromCalendarDate.equals(now);
}
if (toDateString != null) {
Date toDate = dateFormatter.parse(toDateString);
Calendar toCalendarDate = Calendar.getInstance();
toCalendarDate.setTimeInMillis(toDate.getTime());
valid &= toCalendarDate.after(now) || toCalendarDate.equals(now);
}
} catch (ParseException e) {
e.printStackTrace();
return false;
}
return valid;
}
/**
* Check it the time range is valid.
* @return if the time range is respected.
*/
private boolean isTimetableValid(Calendar now) {
Map<String, Object> timetable = (LinkedTreeMap<String, Object>) scheduling.get("timetable");
if (timetable == null) return true;
String fromHour = (String) timetable.get("from");
String toHour = (String) timetable.get("to");
boolean valid = true;
try {
SimpleDateFormat timeFormatter = new SimpleDateFormat("HH:mm:ss");
if (fromHour != null) {
Date fromHourDate = timeFormatter.parse(fromHour);
Calendar fromHourCalendar = Calendar.getInstance();
fromHourCalendar.setTime(fromHourDate);
valid &= fromHourCalendar.before(now);
}
if (toHour != null){
Date toHourDate = timeFormatter.parse(toHour);
Calendar toHourCalendar = Calendar.getInstance();
toHourCalendar.setTime(toHourDate);
valid &= toHourCalendar.after(now);
}
} catch (ParseException e) {
e.printStackTrace();
return false;
}
return valid;
}
/**
* Check if the days selection is valid.
* @return if the days selection is respected.
*/
private boolean isDaysValid(Calendar now) {
List<String> days = (List<String>) scheduling.get("days");
if (days == null) return true;
String todaysDate = getTodaysDate(now);
return days.contains(todaysDate);
}
/**
* Get today's day of week.
* @return the day of week in "EE" format e.g. Sat.
*/
private String getTodaysDate(Calendar now) {
Date date = now.getTime();
// 3 letter name form of the day
return new SimpleDateFormat("EE", Locale.ENGLISH).format(date.getTime());
}
/**
* Fill the intent with extras regarding the recipe and the parcelable content.
* @param intent the intent for the background event.
* @param recipe the recipe causing the intent.
* @param parcelable the content to be delivered.
*/
public static void fillIntentExtras(Intent intent, Recipe recipe, Parcelable parcelable) {
intent.putExtra(NearItIntentConstants.RECIPE_ID, recipe.getId());
// set notification text
intent.putExtra(NearItIntentConstants.NOTIF_TITLE, recipe.getNotificationTitle());
intent.putExtra(NearItIntentConstants.NOTIF_BODY, recipe.getNotificationBody());
// set contet to show
intent.putExtra(NearItIntentConstants.CONTENT, parcelable);
// set the content type so the app can cast the parcelable to correct content
intent.putExtra(NearItIntentConstants.REACTION_PLUGIN, recipe.getReaction_plugin_id());
intent.putExtra(NearItIntentConstants.REACTION_ACTION, recipe.getReaction_action().getId());
// set the pulse info
intent.putExtra(NearItIntentConstants.PULSE_PLUGIN, recipe.getPulse_plugin_id());
intent.putExtra(NearItIntentConstants.PULSE_ACTION, recipe.getPulse_action().getId());
intent.putExtra(NearItIntentConstants.PULSE_BUNDLE, recipe.getPulse_bundle() != null ? recipe.getPulse_bundle().getId() : "");
}
}
|
src/main/java/it/near/sdk/Recipes/Models/Recipe.java
|
package it.near.sdk.Recipes.Models;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
import com.google.gson.internal.LinkedTreeMap;
import org.json.JSONException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import it.near.sdk.Communication.NearNetworkUtil;
import it.near.sdk.GlobalConfig;
import it.near.sdk.MorpheusNear.Annotations.Relationship;
import it.near.sdk.MorpheusNear.Resource;
import it.near.sdk.Utils.NearItIntentConstants;
import it.near.sdk.Utils.NearJsonAPIUtils;
/**
* @author cattaneostefano
*/
public class Recipe extends Resource {
@SerializedName("name")
String name;
@SerializedName("notification")
HashMap<String, Object> notification;
@SerializedName("labels")
HashMap<String, Object> labels;
@SerializedName("scheduling")
HashMap<String, Object> scheduling;
@SerializedName("pulse_plugin_id")
String pulse_plugin_id;
@Relationship("pulse_bundle")
PulseBundle pulse_bundle;
@Relationship("pulse_action")
PulseAction pulse_action;
@SerializedName("reaction_plugin_id")
String reaction_plugin_id;
@Relationship("reaction_bundle")
ReactionBundle reaction_bundle;
@Relationship("reaction_action")
ReactionAction reaction_action;
/*@SerializedName("operation_plugin_id")
String operation_plugin_id;
@SerializedName("operation_bundle_id")
String operation_bundle_id;*/
/*@Relationship("operation_action")
OperationAction operation_action;*/
private static final String ONLINE = "online";
private static final String TRACKINGS_PATH = "trackings";
public static final String NOTIFIED_STATUS = "notified";
public static final String ENGAGED_STATUS = "engaged";
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public HashMap<String, Object> getNotification() {
return notification;
}
public void setNotification(HashMap<String, Object> notification) {
this.notification = notification;
}
public HashMap<String, Object> getLabels() {
return labels;
}
public boolean isEvaluatedOnline(){
if (!labels.containsKey(ONLINE)){
return false;
} else {
return labels.get(ONLINE).equals(true);
}
}
public void setLabels(HashMap<String, Object> labels) {
this.labels = labels;
}
public String getPulse_plugin_id() {
return pulse_plugin_id;
}
public void setPulse_plugin_id(String pulse_plugin_id) {
this.pulse_plugin_id = pulse_plugin_id;
}
public PulseBundle getPulse_bundle() {
return pulse_bundle;
}
public void setPulse_bundle(PulseBundle pulse_bundle) {
this.pulse_bundle = pulse_bundle;
}
/*public String getOperation_plugin_id() {
return operation_plugin_id;
}
public void setOperation_plugin_id(String operation_plugin_id) {
this.operation_plugin_id = operation_plugin_id;
}
public String getOperation_bundle_id() {
return operation_bundle_id;
}
public void setOperation_bundle_id(String operation_bundle_id) {
this.operation_bundle_id = operation_bundle_id;
}*/
public String getReaction_plugin_id() {
return reaction_plugin_id;
}
public void setReaction_plugin_id(String reaction_plugin_id) {
this.reaction_plugin_id = reaction_plugin_id;
}
public ReactionBundle getReaction_bundle() {
return reaction_bundle;
}
public void setReaction_bundle(ReactionBundle reaction_bundle) {
this.reaction_bundle = reaction_bundle;
}
public PulseAction getPulse_action() {
return pulse_action;
}
public void setPulse_action(PulseAction pulse_action) {
this.pulse_action = pulse_action;
}
/*public OperationAction getOperation_action() {
return operation_action;
}
public void setOperation_action(OperationAction operation_action) {
this.operation_action = operation_action;
}*/
public ReactionAction getReaction_action() {
return reaction_action;
}
public void setReaction_action(ReactionAction reaction_action) {
this.reaction_action = reaction_action;
}
public String getNotificationTitle() {
if (getNotification().containsKey("title")){
return getNotification().get("title").toString();
}
return null;
}
public String getNotificationBody() {
if (getNotification().containsKey("body")){
return getNotification().get("body").toString();
}
return null;
}
/**
* Sends tracking on a recipe. Lets choose the notified status.
* @param context the app context.
* @param recipeId the recipe identifier.
* @param trackingEvent notified status to send. Can either be NO
* @throws JSONException
*/
public static void sendTracking(Context context, String recipeId, String trackingEvent) throws JSONException {
String trackingBody = buildTrackingBody(context, recipeId, trackingEvent);
Uri url = Uri.parse(TRACKINGS_PATH).buildUpon().build();
NearNetworkUtil.sendTrack(context, url.toString(), trackingBody);
}
/**
* Builds the tracking send request body.
* @param context the app context.
* @param recipeId the recipe identifier.
* @param trackingEvent the tracking event string.
* @return the http body string.
* @throws JSONException
*/
private static String buildTrackingBody(Context context, String recipeId, String trackingEvent) throws JSONException {
String profileId = GlobalConfig.getInstance(context).getProfileId();
String appId = GlobalConfig.getInstance(context).getAppId();
String installationId = GlobalConfig.getInstance(context).getInstallationId();
if (recipeId == null ||
profileId == null ||
installationId == null ){
throw new JSONException("missing data");
}
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
Date now = new Date(System.currentTimeMillis());
String formattedDate = sdf.format(now);
HashMap<String, Object> attributes = new HashMap<>();
attributes.put("profile_id", profileId);
attributes.put("installation_id", installationId);
attributes.put("app_id", appId);
attributes.put("recipe_id", recipeId);
attributes.put("event", trackingEvent);
attributes.put("tracked_at", formattedDate);
return NearJsonAPIUtils.toJsonAPI("trackings", attributes);
}
public boolean isForegroundRecipe() {
return getPulse_action().isForeground();
}
/**
* Check if the recipe is valid according to the scheduling information.
* @return the validity of the recipe.
*/
public boolean isScheduledNow(Calendar now){
return scheduling == null ||
( isDateValid(now) &&
isTimetableValid(now) &&
isDaysValid(now) );
}
/**
* Check if the date range is valid.
* @return if the date range is respected.
*/
private boolean isDateValid(Calendar now){
Map<String, Object> date = (LinkedTreeMap<String, Object>) scheduling.get("date");
if (date == null) return true;
String fromDateString = (String) date.get("from");
String toDateString = (String) date.get("to");
boolean valid = true;
try {
// do not move the dateformatter to be an instance variable, it messes the parsing
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd");
if (fromDateString != null) {
Date fromDate = dateFormatter.parse(fromDateString);
Calendar fromCalendarDate = Calendar.getInstance();
fromCalendarDate.setTimeInMillis(fromDate.getTime());
valid &= fromCalendarDate.before(now) || fromCalendarDate.equals(now);
}
if (toDateString != null) {
Date toDate = dateFormatter.parse(toDateString);
Calendar toCalendarDate = Calendar.getInstance();
toCalendarDate.setTimeInMillis(toDate.getTime());
valid &= toCalendarDate.after(now) || toCalendarDate.equals(now);
}
} catch (ParseException e) {
e.printStackTrace();
return false;
}
return valid;
}
/**
* Check it the time range is valid.
* @return if the time range is respected.
*/
private boolean isTimetableValid(Calendar now) {
Map<String, Object> timetable = (LinkedTreeMap<String, Object>) scheduling.get("timetable");
if (timetable == null) return true;
String fromHour = (String) timetable.get("from");
String toHour = (String) timetable.get("to");
boolean valid = true;
try {
SimpleDateFormat timeFormatter = new SimpleDateFormat("HH:mm:ss");
if (fromHour != null) {
Date fromHourDate = timeFormatter.parse(fromHour);
Calendar fromHourCalendar = Calendar.getInstance();
fromHourCalendar.setTime(fromHourDate);
valid &= fromHourCalendar.before(now);
}
if (toHour != null){
Date toHourDate = timeFormatter.parse(toHour);
Calendar toHourCalendar = Calendar.getInstance();
toHourCalendar.setTime(toHourDate);
valid &= toHourCalendar.after(now);
}
} catch (ParseException e) {
e.printStackTrace();
return false;
}
return valid;
}
/**
* Check if the days selection is valid.
* @return if the days selection is respected.
*/
private boolean isDaysValid(Calendar now) {
List<String> days = (List<String>) scheduling.get("days");
if (days == null) return true;
String todaysDate = getTodaysDate(now);
return days.contains(todaysDate);
}
/**
* Get today's day of week.
* @return the day of week in "EE" format e.g. Sat.
*/
private String getTodaysDate(Calendar now) {
Date date = now.getTime();
// 3 letter name form of the day
return new SimpleDateFormat("EE", Locale.ENGLISH).format(date.getTime());
}
/**
* Fill the intent with extras regarding the recipe and the parcelable content.
* @param intent the intent for the background event.
* @param recipe the recipe causing the intent.
* @param parcelable the content to be delivered.
*/
public static void fillIntentExtras(Intent intent, Recipe recipe, Parcelable parcelable) {
intent.putExtra(NearItIntentConstants.RECIPE_ID, recipe.getId());
// set notification text
intent.putExtra(NearItIntentConstants.NOTIF_TITLE, recipe.getNotificationTitle());
intent.putExtra(NearItIntentConstants.NOTIF_BODY, recipe.getNotificationBody());
// set contet to show
intent.putExtra(NearItIntentConstants.CONTENT, parcelable);
// set the content type so the app can cast the parcelable to correct content
intent.putExtra(NearItIntentConstants.REACTION_PLUGIN, recipe.getReaction_plugin_id());
intent.putExtra(NearItIntentConstants.REACTION_ACTION, recipe.getReaction_action().getId());
// set the pulse info
intent.putExtra(NearItIntentConstants.PULSE_PLUGIN, recipe.getPulse_plugin_id());
intent.putExtra(NearItIntentConstants.PULSE_ACTION, recipe.getPulse_action().getId());
intent.putExtra(NearItIntentConstants.PULSE_BUNDLE, recipe.getPulse_bundle() != null ? recipe.getPulse_bundle().getId() : "");
}
}
|
setter method for scheduling
|
src/main/java/it/near/sdk/Recipes/Models/Recipe.java
|
setter method for scheduling
|
|
Java
|
mit
|
2f5068262a42ba731bd5cebb1b93b867263a7361
| 0
|
kmdouglass/Micro-Manager,kmdouglass/Micro-Manager
|
///////////////////////////////////////////////////////////////////////////////
//FILE: MyNumberUtils.java
//PROJECT: Micro-Manager
//SUBSYSTEM: ASIdiSPIM plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Nico Stuurman, Jon Daniels
//
// COPYRIGHT: University of California, San Francisco, & ASI, 2013
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.asidispim.Utils;
import org.apache.commons.math3.util.Precision;
/**
* @author Jon
*/
public class MyNumberUtils {
public MyNumberUtils() {
}
/**
* Does "equality" test on floats using commons-math3 library
* and epsilon of 10*maxUlps
* (before r14313 used locally-defined epsilon of 1e-12)
* @param f1
* @param f2
* @return
*/
public static boolean floatsEqual(float f1, float f2) {
return(Precision.equals(f1, f2, (int) 10));
}
/**
* "rounds up" to nearest increment of 0.25, e.g. 0 goes to 0 but 0.01 goes to 0.25
* @param f
* @return
*/
public static float ceilToQuarterMs(float f) {
return (float) (Math.ceil(f*4)/4);
}
/**
* "rounds up" to nearest increment of 0.25
* @param f
* @return
*/
public static float roundToQuarterMs(float f) {
return (float) (((float) Math.round(f*4))/4);
}
}
|
plugins/ASIdiSPIM/src/org/micromanager/asidispim/Utils/MyNumberUtils.java
|
///////////////////////////////////////////////////////////////////////////////
//FILE: MyNumberUtils.java
//PROJECT: Micro-Manager
//SUBSYSTEM: ASIdiSPIM plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Nico Stuurman, Jon Daniels
//
// COPYRIGHT: University of California, San Francisco, & ASI, 2013
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.asidispim.Utils;
/**
* @author Jon
*/
public class MyNumberUtils {
public MyNumberUtils() {
}
/**
* Does "equality" test on floats, using locally-defined epsilon (1e-12)
* @param f1
* @param f2
* @return
*/
public static boolean floatsEqual(float f1, float f2) {
final float EPS = (float) 1e-12;
float diff = f2 - f1;
return((diff < EPS) && (diff > -EPS));
}
/**
* "rounds up" to nearest increment of 0.25, e.g. 0 goes to 0 but 0.01 goes to 0.25
* @param f
* @return
*/
public static float ceilToQuarterMs(float f) {
return (float) (Math.ceil(f*4)/4);
}
/**
* "rounds up" to nearest increment of 0.25
* @param f
* @return
*/
public static float roundToQuarterMs(float f) {
return (float) (((float) Math.round(f*4))/4);
}
}
|
ASIdiSPIM: replaced my floatsEqual function with apache commons math version
git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@14315 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
|
plugins/ASIdiSPIM/src/org/micromanager/asidispim/Utils/MyNumberUtils.java
|
ASIdiSPIM: replaced my floatsEqual function with apache commons math version
|
|
Java
|
mit
|
2695a58eb7d88ed8cb3fe66f09991f6458bb4c8e
| 0
|
stachon/XChange,TSavo/XChange,Panchen/XChange,andre77/XChange,timmolter/XChange,douggie/XChange
|
package org.knowm.xchange.enigma;
import org.knowm.xchange.BaseExchange;
import org.knowm.xchange.ExchangeSpecification;
import org.knowm.xchange.enigma.service.EnigmaAccountService;
import org.knowm.xchange.enigma.service.EnigmaMarketDataService;
import org.knowm.xchange.enigma.service.EnigmaTradeService;
import org.knowm.xchange.utils.nonce.CurrentTimeNonceFactory;
import si.mazi.rescu.SynchronizedValueFactory;
public class EnigmaExchange extends BaseExchange {
private static final String SSL_URI = "https://test.enigma-securities.io/";
private static final String HOST = "test.enigma-securities.io";
// private static final String SSL_URI = "https://api.enigma-securities.io/";
// private static final String HOST = "api.enigma-securities.io";
private SynchronizedValueFactory<Long> nonceFactory = new CurrentTimeNonceFactory();
@Override
protected void initServices() {
this.marketDataService = new EnigmaMarketDataService(this);
this.tradeService = new EnigmaTradeService(this);
this.accountService = new EnigmaAccountService(this);
}
@Override
public ExchangeSpecification getDefaultExchangeSpecification() {
ExchangeSpecification exchangeSpecification =
new ExchangeSpecification(this.getClass().getCanonicalName());
exchangeSpecification.setSslUri(SSL_URI);
exchangeSpecification.setHost(HOST);
exchangeSpecification.setPort(443);
exchangeSpecification.setExchangeName("Enigma");
exchangeSpecification.setExchangeDescription("Enigma Securities REST API integration");
exchangeSpecification.setShouldLoadRemoteMetaData(false);
return exchangeSpecification;
}
@Override
public SynchronizedValueFactory<Long> getNonceFactory() {
return nonceFactory;
}
}
|
xchange-enigma/src/main/java/org/knowm/xchange/enigma/EnigmaExchange.java
|
package org.knowm.xchange.enigma;
import org.knowm.xchange.BaseExchange;
import org.knowm.xchange.ExchangeSpecification;
import org.knowm.xchange.enigma.dto.account.EnigmaCredentials;
import org.knowm.xchange.enigma.dto.account.EnigmaLoginResponse;
import org.knowm.xchange.enigma.service.EnigmaAccountService;
import org.knowm.xchange.enigma.service.EnigmaMarketDataService;
import org.knowm.xchange.enigma.service.EnigmaTradeService;
import org.knowm.xchange.utils.nonce.CurrentTimeNonceFactory;
import si.mazi.rescu.SynchronizedValueFactory;
import java.io.IOException;
public class EnigmaExchange extends BaseExchange {
private static final String SSL_URI = "https://test.enigma-securities.io/";
// private static final String SSL_URI = "https://api.enigma-securities.io/";
private static final String HOST = "api.enigma-securities.io";
private SynchronizedValueFactory<Long> nonceFactory = new CurrentTimeNonceFactory();
@Override
protected void initServices() {
this.marketDataService = new EnigmaMarketDataService(this);
this.tradeService = new EnigmaTradeService(this);
this.accountService = new EnigmaAccountService(this);
}
@Override
public ExchangeSpecification getDefaultExchangeSpecification() {
ExchangeSpecification exchangeSpecification =
new ExchangeSpecification(this.getClass().getCanonicalName());
exchangeSpecification.setSslUri(SSL_URI);
exchangeSpecification.setHost(HOST);
exchangeSpecification.setPort(443);
exchangeSpecification.setExchangeName("Enigma");
exchangeSpecification.setExchangeDescription("Enigma Securities REST API integration");
exchangeSpecification.setShouldLoadRemoteMetaData(false);
return exchangeSpecification;
}
@Override
public SynchronizedValueFactory<Long> getNonceFactory() {
return nonceFactory;
}
}
|
[ENIGMA] Set test api url
|
xchange-enigma/src/main/java/org/knowm/xchange/enigma/EnigmaExchange.java
|
[ENIGMA] Set test api url
|
|
Java
|
mit
|
7752aa6bcfbdb958032afc5df9db2fa41d77b745
| 0
|
aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips
|
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import javax.swing.*;
import javax.swing.plaf.basic.BasicComboPopup;
import javax.swing.plaf.basic.ComboPopup;
import javax.swing.plaf.metal.MetalComboBoxUI;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableCellRenderer;
public final class MainPanel extends JPanel {
private MainPanel() {
super(new BorderLayout());
List<List<Object>> aseries = new ArrayList<>();
aseries.add(Arrays.asList("A1", 594, 841));
aseries.add(Arrays.asList("A2", 420, 594));
aseries.add(Arrays.asList("A3", 297, 420));
aseries.add(Arrays.asList("A4", 210, 297));
aseries.add(Arrays.asList("A5", 148, 210));
aseries.add(Arrays.asList("A6", 105, 148));
String[] columns = {"A series", "width", "height"};
JTextField wtf = new JTextField(5);
wtf.setEditable(false);
JTextField htf = new JTextField(5);
htf.setEditable(false);
DefaultTableModel model = new DefaultTableModel(null, columns) {
@Override public Class<?> getColumnClass(int column) {
return column == 1 || column == 2 ? Integer.class : String.class;
}
@Override public boolean isCellEditable(int row, int column) {
return false;
}
};
DropdownTableComboBox<List<Object>> combo = new DropdownTableComboBox<>(aseries, model);
// combo.addActionListener(e -> {
// List<Object> rowData = combo.getSelectedRow();
// wtf.setText(Objects.toString(rowData.get(1)));
// htf.setText(Objects.toString(rowData.get(2)));
// });
combo.addItemListener(e -> {
if (e.getStateChange() == ItemEvent.SELECTED) {
List<Object> rowData = combo.getSelectedRow();
wtf.setText(Objects.toString(rowData.get(1)));
htf.setText(Objects.toString(rowData.get(2)));
}
});
ListCellRenderer<? super List<Object>> renderer = combo.getRenderer();
combo.setRenderer((list, value, index, isSelected, cellHasFocus) -> {
JLabel c = (JLabel) renderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
c.setOpaque(true);
if (isSelected) {
c.setBackground(list.getSelectionBackground());
c.setForeground(list.getSelectionForeground());
} else {
c.setBackground(list.getBackground());
c.setForeground(list.getForeground());
}
c.setText(Objects.toString(value.get(0), ""));
return c;
});
EventQueue.invokeLater(() -> combo.setSelectedIndex(3));
Box box = Box.createHorizontalBox();
box.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
box.add(combo);
box.add(Box.createHorizontalStrut(15));
box.add(new JLabel("width: "));
box.add(wtf);
box.add(Box.createHorizontalStrut(5));
box.add(new JLabel("height: "));
box.add(htf);
box.add(Box.createHorizontalGlue());
add(box, BorderLayout.NORTH);
setPreferredSize(new Dimension(320, 240));
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class DropdownTableComboBox<E extends List<Object>> extends JComboBox<E> {
private final JTable table = new JTable() {
private transient HighlightListener mouseHandler;
@Override public Component prepareRenderer(TableCellRenderer renderer, int row, int column) {
Component c = super.prepareRenderer(renderer, row, column);
c.setForeground(Color.BLACK);
if (mouseHandler != null && mouseHandler.isHighlightTableRow(row)) {
c.setBackground(new Color(0xFF_C8_C8));
} else if (isRowSelected(row)) {
c.setBackground(Color.CYAN);
} else {
c.setBackground(Color.WHITE);
}
return c;
}
@Override public void updateUI() {
removeMouseListener(mouseHandler);
removeMouseMotionListener(mouseHandler);
super.updateUI();
mouseHandler = new HighlightListener();
addMouseListener(mouseHandler);
addMouseMotionListener(mouseHandler);
getTableHeader().setReorderingAllowed(false);
}
};
private final List<E> list = new ArrayList<>();
protected DropdownTableComboBox(List<E> list, DefaultTableModel model) {
super();
this.list.addAll(list);
table.setModel(model);
list.forEach(this::addItem);
list.forEach(v -> model.addRow(v.toArray(new Object[0])));
}
@Override public void updateUI() {
super.updateUI();
EventQueue.invokeLater(() -> {
setUI(new MetalComboBoxUI() {
@Override protected ComboPopup createPopup() {
return new ComboTablePopup(comboBox, table);
}
});
setEditable(false);
});
}
public List<Object> getSelectedRow() {
return list.get(getSelectedIndex());
}
}
class ComboTablePopup extends BasicComboPopup {
private final JTable table;
private final JScrollPane scroll;
protected ComboTablePopup(JComboBox<?> combo, JTable table) {
super(combo);
this.table = table;
ListSelectionModel sm = table.getSelectionModel();
sm.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
sm.addListSelectionListener(e -> combo.setSelectedIndex(table.getSelectedRow()));
combo.addItemListener(e -> {
if (e.getStateChange() == ItemEvent.SELECTED) {
setRowSelection(combo.getSelectedIndex());
}
});
table.addMouseListener(new MouseAdapter() {
@Override public void mousePressed(MouseEvent e) {
combo.setSelectedIndex(table.rowAtPoint(e.getPoint()));
setVisible(false);
}
});
scroll = new JScrollPane(table);
setBorder(BorderFactory.createEmptyBorder());
}
@Override public void show() {
if (isEnabled()) {
Insets ins = scroll.getInsets();
int tableHeight = table.getPreferredSize().height;
int headerHeight = table.getTableHeader().getPreferredSize().height;
scroll.setPreferredSize(new Dimension(240, tableHeight + headerHeight + ins.top + ins.bottom));
super.removeAll();
super.add(scroll);
setRowSelection(comboBox.getSelectedIndex());
super.show(comboBox, 0, comboBox.getBounds().height);
}
}
private void setRowSelection(int index) {
if (index != -1) {
table.setRowSelectionInterval(index, index);
table.scrollRectToVisible(table.getCellRect(index, 0, true));
}
}
}
class HighlightListener extends MouseAdapter {
private int viewRowIdx = -1;
public boolean isHighlightTableRow(int row) {
return this.viewRowIdx == row;
}
private void setHighlightTableCell(MouseEvent e) {
Point pt = e.getPoint();
Component c = e.getComponent();
if (c instanceof JTable) {
viewRowIdx = ((JTable) c).rowAtPoint(pt);
c.repaint();
}
}
@Override public void mouseMoved(MouseEvent e) {
setHighlightTableCell(e);
}
@Override public void mouseDragged(MouseEvent e) {
setHighlightTableCell(e);
}
@Override public void mouseExited(MouseEvent e) {
viewRowIdx = -1;
e.getComponent().repaint();
}
}
|
DropdownTableComboBox/src/java/example/MainPanel.java
|
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import javax.swing.*;
import javax.swing.plaf.basic.BasicComboPopup;
import javax.swing.plaf.basic.ComboPopup;
import javax.swing.plaf.metal.MetalComboBoxUI;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableCellRenderer;
public final class MainPanel extends JPanel {
private MainPanel() {
super(new BorderLayout());
List<List<Object>> aseries = new ArrayList<>();
aseries.add(Arrays.asList("A1", 594, 841));
aseries.add(Arrays.asList("A2", 420, 594));
aseries.add(Arrays.asList("A3", 297, 420));
aseries.add(Arrays.asList("A4", 210, 297));
aseries.add(Arrays.asList("A5", 148, 210));
aseries.add(Arrays.asList("A6", 105, 148));
String[] columns = {"A series", "width", "height"};
JTextField wtf = new JTextField(5);
wtf.setEditable(false);
JTextField htf = new JTextField(5);
htf.setEditable(false);
DefaultTableModel model = new DefaultTableModel(null, columns) {
@Override public Class<?> getColumnClass(int column) {
return column == 1 || column == 2 ? Integer.class : String.class;
}
@Override public boolean isCellEditable(int row, int column) {
return false;
}
};
DropdownTableComboBox<List<Object>> combo = new DropdownTableComboBox<>(aseries, model);
// combo.addActionListener(e -> {
// List<Object> rowData = combo.getSelectedRow();
// wtf.setText(Objects.toString(rowData.get(1)));
// htf.setText(Objects.toString(rowData.get(2)));
// });
combo.addItemListener(e -> {
if (e.getStateChange() == ItemEvent.SELECTED) {
List<Object> rowData = combo.getSelectedRow();
wtf.setText(Objects.toString(rowData.get(1)));
htf.setText(Objects.toString(rowData.get(2)));
}
});
ListCellRenderer<? super List<Object>> renderer = combo.getRenderer();
combo.setRenderer((list, value, index, isSelected, cellHasFocus) -> {
JLabel c = (JLabel) renderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
c.setOpaque(true);
if (isSelected) {
c.setBackground(list.getSelectionBackground());
c.setForeground(list.getSelectionForeground());
} else {
c.setBackground(list.getBackground());
c.setForeground(list.getForeground());
}
c.setText(Objects.toString(value.get(0), ""));
return c;
});
EventQueue.invokeLater(() -> combo.setSelectedIndex(3));
Box box = Box.createHorizontalBox();
box.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
box.add(combo);
box.add(Box.createHorizontalStrut(15));
box.add(new JLabel("width: "));
box.add(wtf);
box.add(Box.createHorizontalStrut(5));
box.add(new JLabel("height: "));
box.add(htf);
box.add(Box.createHorizontalGlue());
add(box, BorderLayout.NORTH);
setPreferredSize(new Dimension(320, 240));
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class DropdownTableComboBox<E extends List<Object>> extends JComboBox<E> {
private final JTable table = new JTable() {
private transient HighlightListener mouseHandler;
@Override public Component prepareRenderer(TableCellRenderer renderer, int row, int column) {
Component c = super.prepareRenderer(renderer, row, column);
c.setForeground(Color.BLACK);
if (mouseHandler != null && mouseHandler.isHighlightTableRow(row)) {
c.setBackground(new Color(0xFF_C8_C8));
} else if (isRowSelected(row)) {
c.setBackground(Color.CYAN);
} else {
c.setBackground(Color.WHITE);
}
return c;
}
@Override public void updateUI() {
removeMouseListener(mouseHandler);
removeMouseMotionListener(mouseHandler);
super.updateUI();
mouseHandler = new HighlightListener();
addMouseListener(mouseHandler);
addMouseMotionListener(mouseHandler);
getTableHeader().setReorderingAllowed(false);
}
};
private final transient List<E> list;
protected DropdownTableComboBox(List<E> list, DefaultTableModel model) {
super();
this.list = list;
table.setModel(model);
list.forEach(this::addItem);
list.forEach(v -> model.addRow(v.toArray(new Object[0])));
}
@Override public void updateUI() {
super.updateUI();
EventQueue.invokeLater(() -> {
setUI(new MetalComboBoxUI() {
@Override protected ComboPopup createPopup() {
return new ComboTablePopup(comboBox, table);
}
});
setEditable(false);
});
}
public List<Object> getSelectedRow() {
return list.get(getSelectedIndex());
}
}
class ComboTablePopup extends BasicComboPopup {
private final JTable table;
private final JScrollPane scroll;
protected ComboTablePopup(JComboBox<?> combo, JTable table) {
super(combo);
this.table = table;
ListSelectionModel sm = table.getSelectionModel();
sm.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
sm.addListSelectionListener(e -> combo.setSelectedIndex(table.getSelectedRow()));
combo.addItemListener(e -> {
if (e.getStateChange() == ItemEvent.SELECTED) {
setRowSelection(combo.getSelectedIndex());
}
});
table.addMouseListener(new MouseAdapter() {
@Override public void mousePressed(MouseEvent e) {
combo.setSelectedIndex(table.rowAtPoint(e.getPoint()));
setVisible(false);
}
});
scroll = new JScrollPane(table);
setBorder(BorderFactory.createEmptyBorder());
}
@Override public void show() {
if (isEnabled()) {
Insets ins = scroll.getInsets();
int tableHeight = table.getPreferredSize().height;
int headerHeight = table.getTableHeader().getPreferredSize().height;
scroll.setPreferredSize(new Dimension(240, tableHeight + headerHeight + ins.top + ins.bottom));
super.removeAll();
super.add(scroll);
setRowSelection(comboBox.getSelectedIndex());
super.show(comboBox, 0, comboBox.getBounds().height);
}
}
private void setRowSelection(int index) {
if (index != -1) {
table.setRowSelectionInterval(index, index);
table.scrollRectToVisible(table.getCellRect(index, 0, true));
}
}
}
class HighlightListener extends MouseAdapter {
private int viewRowIdx = -1;
public boolean isHighlightTableRow(int row) {
return this.viewRowIdx == row;
}
private void setHighlightTableCell(MouseEvent e) {
Point pt = e.getPoint();
Component c = e.getComponent();
if (c instanceof JTable) {
viewRowIdx = ((JTable) c).rowAtPoint(pt);
c.repaint();
}
}
@Override public void mouseMoved(MouseEvent e) {
setHighlightTableCell(e);
}
@Override public void mouseDragged(MouseEvent e) {
setHighlightTableCell(e);
}
@Override public void mouseExited(MouseEvent e) {
viewRowIdx = -1;
e.getComponent().repaint();
}
}
|
Spotbugs: SE_TRANSIENT_FIELD_NOT_RESTORED
|
DropdownTableComboBox/src/java/example/MainPanel.java
|
Spotbugs: SE_TRANSIENT_FIELD_NOT_RESTORED
|
|
Java
|
mpl-2.0
|
73b9ff82211864a20c8fcd0280333c84afb2a0d9
| 0
|
iLoop2/openmrs-core,trsorsimoII/openmrs-core,hoquangtruong/TestMylyn,kigsmtua/openmrs-core,kabariyamilind/openMRSDEV,jvena1/openmrs-core,Negatu/openmrs-core,kristopherschmidt/openmrs-core,andyvand/OpenMRS,ern2/openmrs-core,maany/openmrs-core,ern2/openmrs-core,jcantu1988/openmrs-core,nilusi/Legacy-UI,maekstr/openmrs-core,hoquangtruong/TestMylyn,naraink/openmrs-core,rbtracker/openmrs-core,alexwind26/openmrs-core,AbhijitParate/openmrs-core,vinayvenu/openmrs-core,kristopherschmidt/openmrs-core,kckc/openmrs-core,sravanthi17/openmrs-core,joansmith/openmrs-core,Openmrs-joel/openmrs-core,shiangree/openmrs-core,siddharthkhabia/openmrs-core,aboutdata/openmrs-core,pselle/openmrs-core,prisamuel/openmrs-core,michaelhofer/openmrs-core,sintjuri/openmrs-core,koskedk/openmrs-core,lbl52001/openmrs-core,nilusi/Legacy-UI,spereverziev/openmrs-core,asifur77/openmrs,prisamuel/openmrs-core,sadhanvejella/openmrs,hoquangtruong/TestMylyn,macorrales/openmrs-core,sadhanvejella/openmrs,Ch3ck/openmrs-core,jamesfeshner/openmrs-module,ssmusoke/openmrs-core,lbl52001/openmrs-core,ern2/openmrs-core,WANeves/openmrs-core,alexwind26/openmrs-core,geoff-wasilwa/openmrs-core,lbl52001/openmrs-core,prisamuel/openmrs-core,ldf92/openmrs-core,aj-jaswanth/openmrs-core,alexwind26/openmrs-core,iLoop2/openmrs-core,jembi/openmrs-core,MitchellBot/openmrs-core,lilo2k/openmrs-core,chethandeshpande/openmrs-core,MitchellBot/openmrs-core,spereverziev/openmrs-core,dlahn/openmrs-core,vinayvenu/openmrs-core,joansmith/openmrs-core,andyvand/OpenMRS,lilo2k/openmrs-core,iLoop2/openmrs-core,trsorsimoII/openmrs-core,asifur77/openmrs,prisamuel/openmrs-core,aboutdata/openmrs-core,sintjuri/openmrs-core,geoff-wasilwa/openmrs-core,Openmrs-joel/openmrs-core,jcantu1988/openmrs-core,lbl52001/openmrs-core,jcantu1988/openmrs-core,kabariyamilind/openMRSDEV,chethandeshpande/openmrs-core,sravanthi17/openmrs-core,vinayvenu/openmrs-core,kristopherschmidt/openmrs-core,ldf92/openmrs-core,MuhammadSafwan/Stop-Button-Ability,sintjuri/openmrs-core,pselle/openmrs-core,asifur77/openmrs,pselle/openmrs-core,foolchan2556/openmrs-core,asifur77/openmrs,spereverziev/openmrs-core,MuhammadSafwan/Stop-Button-Ability,lbl52001/openmrs-core,iLoop2/openmrs-core,kristopherschmidt/openmrs-core,preethi29/openmrs-core,maekstr/openmrs-core,maekstr/openmrs-core,vinayvenu/openmrs-core,aboutdata/openmrs-core,naraink/openmrs-core,spereverziev/openmrs-core,macorrales/openmrs-core,siddharthkhabia/openmrs-core,sintjuri/openmrs-core,preethi29/openmrs-core,kristopherschmidt/openmrs-core,rbtracker/openmrs-core,sadhanvejella/openmrs,hoquangtruong/TestMylyn,jcantu1988/openmrs-core,prisamuel/openmrs-core,iLoop2/openmrs-core,kigsmtua/openmrs-core,Ch3ck/openmrs-core,naraink/openmrs-core,jamesfeshner/openmrs-module,donaldgavis/openmrs-core,Negatu/openmrs-core,geoff-wasilwa/openmrs-core,AbhijitParate/openmrs-core,trsorsimoII/openmrs-core,ssmusoke/openmrs-core,shiangree/openmrs-core,joansmith/openmrs-core,prisamuel/openmrs-core,rbtracker/openmrs-core,alexei-grigoriev/openmrs-core,lilo2k/openmrs-core,aj-jaswanth/openmrs-core,dcmul/openmrs-core,jvena1/openmrs-core,foolchan2556/openmrs-core,WANeves/openmrs-core,dlahn/openmrs-core,spereverziev/openmrs-core,naraink/openmrs-core,donaldgavis/openmrs-core,kckc/openmrs-core,milankarunarathne/openmrs-core,siddharthkhabia/openmrs-core,foolchan2556/openmrs-core,alexei-grigoriev/openmrs-core,aj-jaswanth/openmrs-core,aj-jaswanth/openmrs-core,dcmul/openmrs-core,asifur77/openmrs,alexwind26/openmrs-core,trsorsimoII/openmrs-core,donaldgavis/openmrs-core,vinayvenu/openmrs-core,jembi/openmrs-core,lilo2k/openmrs-core,jembi/openmrs-core,dcmul/openmrs-core,michaelhofer/openmrs-core,alexei-grigoriev/openmrs-core,Openmrs-joel/openmrs-core,shiangree/openmrs-core,geoff-wasilwa/openmrs-core,iLoop2/openmrs-core,maany/openmrs-core,MuhammadSafwan/Stop-Button-Ability,sravanthi17/openmrs-core,ssmusoke/openmrs-core,jvena1/openmrs-core,AbhijitParate/openmrs-core,sintjuri/openmrs-core,dlahn/openmrs-core,lbl52001/openmrs-core,shiangree/openmrs-core,lilo2k/openmrs-core,chethandeshpande/openmrs-core,rbtracker/openmrs-core,andyvand/OpenMRS,WANeves/openmrs-core,donaldgavis/openmrs-core,naraink/openmrs-core,siddharthkhabia/openmrs-core,ldf92/openmrs-core,geoff-wasilwa/openmrs-core,kabariyamilind/openMRSDEV,ssmusoke/openmrs-core,joansmith/openmrs-core,nilusi/Legacy-UI,Openmrs-joel/openmrs-core,ldf92/openmrs-core,kckc/openmrs-core,ldf92/openmrs-core,foolchan2556/openmrs-core,Ch3ck/openmrs-core,pselle/openmrs-core,Negatu/openmrs-core,WANeves/openmrs-core,pselle/openmrs-core,MitchellBot/openmrs-core,ssmusoke/openmrs-core,preethi29/openmrs-core,kigsmtua/openmrs-core,sadhanvejella/openmrs,dlahn/openmrs-core,dcmul/openmrs-core,naraink/openmrs-core,kckc/openmrs-core,milankarunarathne/openmrs-core,aboutdata/openmrs-core,macorrales/openmrs-core,sravanthi17/openmrs-core,Negatu/openmrs-core,siddharthkhabia/openmrs-core,sintjuri/openmrs-core,donaldgavis/openmrs-core,maany/openmrs-core,chethandeshpande/openmrs-core,Negatu/openmrs-core,spereverziev/openmrs-core,kigsmtua/openmrs-core,chethandeshpande/openmrs-core,MitchellBot/openmrs-core,WANeves/openmrs-core,Ch3ck/openmrs-core,rbtracker/openmrs-core,jamesfeshner/openmrs-module,kigsmtua/openmrs-core,ern2/openmrs-core,dcmul/openmrs-core,dcmul/openmrs-core,AbhijitParate/openmrs-core,jembi/openmrs-core,aboutdata/openmrs-core,michaelhofer/openmrs-core,michaelhofer/openmrs-core,milankarunarathne/openmrs-core,WANeves/openmrs-core,AbhijitParate/openmrs-core,MuhammadSafwan/Stop-Button-Ability,alexwind26/openmrs-core,jvena1/openmrs-core,trsorsimoII/openmrs-core,macorrales/openmrs-core,andyvand/OpenMRS,siddharthkhabia/openmrs-core,jembi/openmrs-core,Ch3ck/openmrs-core,foolchan2556/openmrs-core,dlahn/openmrs-core,hoquangtruong/TestMylyn,preethi29/openmrs-core,koskedk/openmrs-core,aboutdata/openmrs-core,maany/openmrs-core,milankarunarathne/openmrs-core,jamesfeshner/openmrs-module,michaelhofer/openmrs-core,alexei-grigoriev/openmrs-core,kckc/openmrs-core,jamesfeshner/openmrs-module,koskedk/openmrs-core,ern2/openmrs-core,shiangree/openmrs-core,shiangree/openmrs-core,milankarunarathne/openmrs-core,koskedk/openmrs-core,MuhammadSafwan/Stop-Button-Ability,maany/openmrs-core,jcantu1988/openmrs-core,hoquangtruong/TestMylyn,Negatu/openmrs-core,maekstr/openmrs-core,andyvand/OpenMRS,sadhanvejella/openmrs,maekstr/openmrs-core,jembi/openmrs-core,macorrales/openmrs-core,Openmrs-joel/openmrs-core,MuhammadSafwan/Stop-Button-Ability,AbhijitParate/openmrs-core,preethi29/openmrs-core,kabariyamilind/openMRSDEV,sravanthi17/openmrs-core,kigsmtua/openmrs-core,alexei-grigoriev/openmrs-core,lilo2k/openmrs-core,andyvand/OpenMRS,foolchan2556/openmrs-core,kckc/openmrs-core,milankarunarathne/openmrs-core,nilusi/Legacy-UI,nilusi/Legacy-UI,koskedk/openmrs-core,jvena1/openmrs-core,nilusi/Legacy-UI,kabariyamilind/openMRSDEV,pselle/openmrs-core,MitchellBot/openmrs-core,koskedk/openmrs-core,maekstr/openmrs-core,sadhanvejella/openmrs,alexei-grigoriev/openmrs-core,aj-jaswanth/openmrs-core,joansmith/openmrs-core
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.web.controller.visit;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.directwebremoting.util.Logger;
import org.openmrs.Encounter;
import org.openmrs.Form;
import org.openmrs.Patient;
import org.openmrs.Provider;
import org.openmrs.Visit;
import org.openmrs.api.context.Context;
import org.openmrs.util.OpenmrsUtil;
import org.openmrs.web.controller.PortletControllerUtil;
import org.openmrs.web.controller.bean.DatatableRequest;
import org.openmrs.web.controller.bean.DatatableResponse;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* Lists visits.
*/
@Controller
public class VisitListController {
protected final Logger log = Logger.getLogger(getClass());
public static final String VISITS_PATH = "/admin/visits/datatable";
public static final String PATIENT = "patient";
/**
* It handles calls from DataTables.
*
* @param patient
* @param request
* @return {@link DatatableResponse}
*/
@RequestMapping(VISITS_PATH)
public @ResponseBody
DatatableResponse getVisits(@ModelAttribute Patient patient, HttpServletRequest request) {
DatatableRequest datatable = DatatableRequest.parseRequest(request);
DatatableResponse response = new DatatableResponse(datatable);
Integer totalVisitsCount = Context.getEncounterService().getEncountersByVisitsAndPatientCount(patient, false, null);
response.setiTotalRecords(totalVisitsCount);
Map<String, Object> model = new HashMap<String, Object>();
model.put("person", patient);
PortletControllerUtil.addFormToEditAndViewUrlMaps(model);
@SuppressWarnings("unchecked")
Map<Form, String> formToViewUrlMap = (Map<Form, String>) model.get("formToViewUrlMap");
@SuppressWarnings("unchecked")
Map<Form, String> formToEditUrlMap = (Map<Form, String>) model.get("formToEditUrlMap");
if (!StringUtils.isBlank(datatable.getsSearch())) {
Integer filteredVisitsCount = Context.getEncounterService().getEncountersByVisitsAndPatientCount(patient, false,
datatable.getsSearch());
response.setiTotalDisplayRecords(filteredVisitsCount);
} else {
response.setiTotalDisplayRecords(totalVisitsCount);
}
List<Encounter> encounters = Context.getEncounterService().getEncountersByVisitsAndPatient(patient, false,
datatable.getsSearch(), datatable.getiDisplayStart(), datatable.getiDisplayLength());
response.setsColumns("visitId", "visitActive", "visitType", "visitLocation", "visitFrom", "visitTo",
"visitIndication", "firstInVisit", "lastInVisit", "encounterId", "encounterDate", "encounterType",
"encounterProviders", "encounterLocation", "encounterEnterer", "formViewURL");
for (Encounter encounter : encounters) {
Map<String, String> row = new HashMap<String, String>();
if (encounter.getVisit() != null) {
Visit visit = encounter.getVisit();
row.put("visitId", visit.getId().toString());
row.put("visitActive", Boolean.toString(isActive(visit.getStartDatetime(), visit.getStopDatetime())));
row.put("visitType", visit.getVisitType().getName());
row.put("visitLocation", (visit.getLocation() != null) ? visit.getLocation().getName() : "");
row.put("visitFrom", Context.getDateFormat().format(visit.getStartDatetime()));
if (visit.getStopDatetime() != null) {
row.put("visitTo", Context.getDateFormat().format(visit.getStopDatetime()));
}
if (visit.getIndication() != null && visit.getIndication().getName() != null) {
row.put("visitIndication", visit.getIndication().getName().getName());
}
Object[] visitEncounters = visit.getEncounters().toArray();
if (visitEncounters.length > 0) {
if (encounter.equals(visitEncounters[0])) {
row.put("firstInVisit", Boolean.TRUE.toString());
}
if (encounter.equals(visitEncounters[visitEncounters.length - 1])) {
row.put("lastInVisit", Boolean.TRUE.toString());
}
} else {
row.put("firstInVisit", Boolean.TRUE.toString());
row.put("lastInVisit", Boolean.TRUE.toString());
}
}
if (encounter.getId() != null) { //If it is not mocked encounter
row.put("encounterId", encounter.getId().toString());
row.put("encounterDate", Context.getDateFormat().format(encounter.getEncounterDatetime()));
row.put("encounterType", encounter.getEncounterType().getName());
row.put("encounterProviders", getProviders(encounter));
row.put("encounterLocation", (encounter.getLocation() != null) ? encounter.getLocation().getName() : "");
row.put("encounterEnterer", (encounter.getCreator() != null) ? encounter.getCreator().getPersonName()
.getFullName() : "");
row.put("formViewURL", getViewFormURL(request, formToViewUrlMap, formToEditUrlMap, encounter));
}
response.addRow(row);
}
return response;
}
private String getViewFormURL(HttpServletRequest request, Map<Form, String> formToViewUrlMap,
Map<Form, String> formToEditUrlMap, Encounter encounter) {
String viewFormURL = formToViewUrlMap.get(encounter.getForm());
if (viewFormURL == null) {
viewFormURL = formToEditUrlMap.get(encounter.getForm());
}
if (viewFormURL != null) {
viewFormURL = request.getContextPath() + "/" + viewFormURL + "?encounterId=" + encounter.getId();
} else {
viewFormURL = request.getContextPath() + "/admin/encounters/encounter.form?encounterId=" + encounter.getId();
}
return viewFormURL;
}
private String getProviders(Encounter encounter) {
StringBuilder providersBuilder = new StringBuilder();
for (Set<Provider> providers : encounter.getProvidersByRoles().values()) {
for (Provider provider : providers) {
providersBuilder.append(provider.getName());
providersBuilder.append(", ");
}
}
if (providersBuilder.length() > 1) {
return providersBuilder.substring(0, providersBuilder.length() - 2);
} else {
return "";
}
}
@ModelAttribute
public Patient getPatient(@RequestParam(PATIENT) Integer patientId) {
return Context.getPatientService().getPatient(patientId);
}
private boolean isActive(Date start, Date end) {
Date now = new Date();
if (OpenmrsUtil.compare(now, start) >= 0) {
if (OpenmrsUtil.compareWithNullAsLatest(now, end) < 0) {
return true;
}
}
return false;
}
}
|
web/src/main/java/org/openmrs/web/controller/visit/VisitListController.java
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.web.controller.visit;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.directwebremoting.util.Logger;
import org.openmrs.Encounter;
import org.openmrs.Form;
import org.openmrs.Patient;
import org.openmrs.Provider;
import org.openmrs.Visit;
import org.openmrs.api.context.Context;
import org.openmrs.util.OpenmrsUtil;
import org.openmrs.web.controller.PortletControllerUtil;
import org.openmrs.web.controller.bean.DatatableRequest;
import org.openmrs.web.controller.bean.DatatableResponse;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* Lists visits.
*/
@Controller
public class VisitListController {
protected final Logger log = Logger.getLogger(getClass());
public static final String VISITS_PATH = "/admin/visits/datatable";
public static final String PATIENT = "patient";
/**
* It handles calls from DataTables.
*
* @param patient
* @param request
* @return {@link DatatableResponse}
*/
@RequestMapping(VISITS_PATH)
public @ResponseBody
DatatableResponse getVisits(@ModelAttribute Patient patient, HttpServletRequest request) {
DatatableRequest datatable = DatatableRequest.parseRequest(request);
DatatableResponse response = new DatatableResponse(datatable);
Integer totalVisitsCount = Context.getEncounterService().getEncountersByVisitsAndPatientCount(patient, false, null);
response.setiTotalRecords(totalVisitsCount);
Map<String, Object> model = new HashMap<String, Object>();
model.put("person", patient);
PortletControllerUtil.addFormToEditAndViewUrlMaps(model);
@SuppressWarnings("unchecked")
Map<Form, String> formToViewUrlMap = (Map<Form, String>) model.get("formToViewUrlMap");
@SuppressWarnings("unchecked")
Map<Form, String> formToEditUrlMap = (Map<Form, String>) model.get("formToEditUrlMap");
if (!StringUtils.isBlank(datatable.getsSearch())) {
Integer filteredVisitsCount = Context.getEncounterService().getEncountersByVisitsAndPatientCount(patient, false,
datatable.getsSearch());
response.setiTotalDisplayRecords(filteredVisitsCount);
} else {
response.setiTotalDisplayRecords(totalVisitsCount);
}
List<Encounter> encounters = Context.getEncounterService().getEncountersByVisitsAndPatient(patient, false,
datatable.getsSearch(), datatable.getiDisplayStart(), datatable.getiDisplayLength());
response.setsColumns("visitId", "visitActive", "visitType", "visitLocation", "visitFrom", "visitTo",
"visitIndication", "firstInVisit", "lastInVisit", "encounterId", "encounterDate", "encounterType",
"encounterProviders", "encounterLocation", "encounterEnterer", "formViewURL");
for (Encounter encounter : encounters) {
Map<String, String> row = new HashMap<String, String>();
if (encounter.getVisit() != null) {
Visit visit = encounter.getVisit();
row.put("visitId", visit.getId().toString());
row.put("visitActive", Boolean.toString(isActive(visit.getStartDatetime(), visit.getStopDatetime())));
row.put("visitType", visit.getVisitType().getName());
row.put("visitLocation", (visit.getLocation() != null) ? visit.getLocation().getName() : "");
row.put("visitFrom", Context.getDateFormat().format(visit.getStartDatetime()));
if (visit.getStopDatetime() != null) {
row.put("visitTo", Context.getDateFormat().format(visit.getStopDatetime()));
}
if (visit.getIndication() != null && visit.getIndication().getName() != null) {
row.put("visitIndication", visit.getIndication().getName().getName());
}
Object[] visitEncounters = visit.getEncounters().toArray();
if (visitEncounters.length > 0) {
if (encounter.equals(visitEncounters[0])) {
row.put("firstInVisit", Boolean.TRUE.toString());
}
if (encounter.equals(visitEncounters[visitEncounters.length - 1])) {
row.put("lastInVisit", Boolean.TRUE.toString());
}
} else {
row.put("firstInVisit", Boolean.TRUE.toString());
row.put("lastInVisit", Boolean.TRUE.toString());
}
}
if (encounter.getId() != null) { //If it is not mocked encounter
row.put("encounterId", encounter.getId().toString());
row.put("encounterDate", Context.getDateFormat().format(encounter.getEncounterDatetime()));
row.put("encounterType", encounter.getEncounterType().getName());
row.put("encounterProviders", getProviders(encounter));
row.put("encounterLocation", (encounter.getLocation() != null) ? encounter.getLocation().getName() : "");
row.put("encounterEnterer", (encounter.getCreator() != null) ? encounter.getCreator().getPersonName()
.getFullName() : "");
row.put("formViewURL", getViewFormURL(request, formToViewUrlMap, formToEditUrlMap, encounter));
}
response.addRow(row);
}
return response;
}
private String getViewFormURL(HttpServletRequest request, Map<Form, String> formToViewUrlMap,
Map<Form, String> formToEditUrlMap, Encounter encounter) {
String viewFormURL = formToViewUrlMap.get(encounter.getForm());
if (viewFormURL == null) {
viewFormURL = formToEditUrlMap.get(encounter.getForm());
}
if (viewFormURL != null) {
viewFormURL = request.getContextPath() + "/" + viewFormURL + "?encounterId=" + encounter.getId();
} else {
viewFormURL = request.getContextPath() + "/admin/encounters/encounter.form?encounterId=" + encounter.getId();
}
return viewFormURL;
}
private String getProviders(Encounter encounter) {
StringBuilder providersBuilder = new StringBuilder();
for (Set<Provider> providers : encounter.getProvidersByRoles().values()) {
for (Provider provider : providers) {
if (provider.getPerson() != null) {
providersBuilder.append(provider.getPerson().getPersonName().getFullName());
} else {
providersBuilder.append(provider.getIdentifier());
}
providersBuilder.append(", ");
}
}
if (providersBuilder.length() > 1) {
return providersBuilder.substring(0, providersBuilder.length() - 2);
} else {
return "";
}
}
@ModelAttribute
public Patient getPatient(@RequestParam(PATIENT) Integer patientId) {
return Context.getPatientService().getPatient(patientId);
}
private boolean isActive(Date start, Date end) {
Date now = new Date();
if (OpenmrsUtil.compare(now, start) >= 0) {
if (OpenmrsUtil.compareWithNullAsLatest(now, end) < 0) {
return true;
}
}
return false;
}
}
|
Encounter Provider Names not shown for providers not linked to a person
on patient dashboard visits tab - TRUNK-4531
|
web/src/main/java/org/openmrs/web/controller/visit/VisitListController.java
|
Encounter Provider Names not shown for providers not linked to a person on patient dashboard visits tab - TRUNK-4531
|
|
Java
|
agpl-3.0
|
8a1cbde69604dc45073dedfb17fd71072258a166
| 0
|
sourcebits-praveenkh/Tagase,caiyingyuan/tigase71,Smartupz/tigase-server,nate-sentjens/tigase-xmpp-java,f24-ag/tigase,f24-ag/tigase,sourcebits-praveenkh/Tagase,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,caiyingyuan/tigase71,wangningbo/tigase-server,caiyingyuan/tigase71,pivotal-nathan-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,cgvarela/tigase-server,Smartupz/tigase-server,caiyingyuan/tigase71,Smartupz/tigase-server,cgvarela/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,wangningbo/tigase-server,caiyingyuan/tigase71,sourcebits-praveenkh/Tagase,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,Smartupz/tigase-server,fanout/tigase-server,fanout/tigase-server,wangningbo/tigase-server,amikey/tigase-server,cgvarela/tigase-server,fanout/tigase-server,fanout/tigase-server,nate-sentjens/tigase-xmpp-java,nate-sentjens/tigase-xmpp-java,wangningbo/tigase-server,Smartupz/tigase-server,caiyingyuan/tigase71,amikey/tigase-server,wangningbo/tigase-server,amikey/tigase-server,f24-ag/tigase,fanout/tigase-server,cgvarela/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,amikey/tigase-server,sourcebits-praveenkh/Tagase,pivotal-nathan-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,cgvarela/tigase-server,nate-sentjens/tigase-xmpp-java,wangningbo/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,wangningbo/tigase-server,fanout/tigase-server,amikey/tigase-server,Smartupz/tigase-server,f24-ag/tigase,cgvarela/tigase-server,amikey/tigase-server,sourcebits-praveenkh/Tagase
|
/*
* Tigase Jabber/XMPP Server
* Copyright (C) 2004-2007 "Artur Hefczyc" <artur.hefczyc@tigase.org>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. Look for COPYING file in the top folder.
* If not, see http://www.gnu.org/licenses/.
*
* $Rev$
* Last modified by $Author$
* $Date$
*/
package tigase.xmpp.impl;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import java.util.Queue;
import java.util.Map;
import java.util.logging.Logger;
import tigase.server.Packet;
import tigase.util.JIDUtils;
import tigase.xml.Element;
import tigase.xmpp.Authorization;
import tigase.xmpp.NotAuthorizedException;
import tigase.xmpp.StanzaType;
import tigase.xmpp.XMPPProcessor;
import tigase.xmpp.XMPPProcessorIfc;
import tigase.xmpp.XMPPResourceConnection;
import tigase.xmpp.XMPPException;
import tigase.db.NonAuthUserRepository;
import static tigase.xmpp.impl.Roster.SubscriptionType;
/**
* Class <code>JabberIqRoster</code> implements part of <em>RFC-3921</em> -
* <em>XMPP Instant Messaging</em> specification describing roster management.
* 7. Roster Management
*
*
* Created: Tue Feb 21 17:42:53 2006
*
* @author <a href="mailto:artur.hefczyc@tigase.org">Artur Hefczyc</a>
* @version $Rev$
*/
public class JabberIqRoster extends XMPPProcessor
implements XMPPProcessorIfc {
/**
* Private logger for class instancess.
*/
private static Logger log =
Logger.getLogger("tigase.xmpp.impl.JabberIqRoster");
private static final String XMLNS = "jabber:iq:roster";
private static final String ID = XMLNS;
private static final String[] ELEMENTS = {"query"};
private static final String[] XMLNSS = {XMLNS};
private static final Element[] DISCO_FEATURES = {
new Element("feature", new String[] {"var"}, new String[] {XMLNS})
};
private static final String DYNAMIC_ROSTERS = "dynamic-rosters";
private static final String DYNAMIC_ROSTERS_CLASSES = "dynamic-roster-classes";
public Element[] supDiscoFeatures(final XMPPResourceConnection session)
{ return Arrays.copyOf(DISCO_FEATURES, DISCO_FEATURES.length); }
public String id() { return ID; }
public String[] supElements()
{ return Arrays.copyOf(ELEMENTS, ELEMENTS.length); }
public String[] supNamespaces()
{ return Arrays.copyOf(XMLNSS, XMLNSS.length); }
private void processSetRequest(final Packet packet,
final XMPPResourceConnection session, final Queue<Packet> results)
throws NotAuthorizedException {
Element request = packet.getElement();
String buddy =
JIDUtils.getNodeID(request.getAttribute("/iq/query/item", "jid"));
Element item = request.findChild("/iq/query/item");
String subscription = item.getAttribute("subscription");
if (subscription != null && subscription.equals("remove")) {
SubscriptionType sub = Roster.getBuddySubscription(session, buddy);
if (sub != null && sub != SubscriptionType.none) {
Element it = new Element("item");
it.setAttribute("jid", buddy);
it.setAttribute("subscription", "remove");
Roster.updateBuddyChange(session, results, it);
Element pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getUserId());
pres.setAttribute("type", "unsubscribe");
results.offer(new Packet(pres));
pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getUserId());
pres.setAttribute("type", "unsubscribed");
results.offer(new Packet(pres));
pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getJID());
pres.setAttribute("type", "unavailable");
results.offer(new Packet(pres));
} // end of if (sub != null && sub != SubscriptionType.none)
Roster.removeBuddy(session, buddy);
results.offer(packet.okResult((String)null, 0));
} else {
String name = request.getAttribute("/iq/query/item", "name");
if (name == null) {
name = buddy;
} // end of if (name == null)
Roster.setBuddyName(session, buddy, name);
if (Roster.getBuddySubscription(session, buddy) == null) {
Roster.setBuddySubscription(session, SubscriptionType.none, buddy);
} // end of if (getBuddySubscription(session, buddy) == null)
List<Element> groups = item.getChildren();
if (groups != null && groups.size() > 0) {
String[] gr = new String[groups.size()];
int cnt = 0;
for (Element group : groups) {
gr[cnt++] = group.getCData();
} // end of for (ElementData group : groups)
session.setDataList(Roster.groupNode(buddy), Roster.GROUPS, gr);
} else {
session.removeData(Roster.groupNode(buddy), Roster.GROUPS);
} // end of else
results.offer(packet.okResult((String)null, 0));
Roster.updateBuddyChange(session, results,
Roster.getBuddyItem(session, buddy));
} // end of else
}
private void processGetRequest(final Packet packet,
final XMPPResourceConnection session, final Queue<Packet> results,
final Map<String, Object> settings)
throws NotAuthorizedException {
String[] buddies = Roster.getBuddies(session);
if (buddies != null) {
Element query = new Element("query");
query.setXMLNS("jabber:iq:roster");
for (String buddy : buddies) {
query.addChild(Roster.getBuddyItem(session, buddy));
}
DynamicRosterIfc[] dynr = null;
if (settings != null) {
synchronized (settings) {
init_settings(settings);
}
dynr = (DynamicRosterIfc[])settings.get(DYNAMIC_ROSTERS);
}
if (dynr != null) {
for (DynamicRosterIfc dri: dynr) {
List<Element> items = dri.getRosterItems(session);
if (items != null) {
query.addChildren(items);
}
}
}
results.offer(packet.okResult(query, 0));
} else {
results.offer(packet.okResult((String)null, 1));
} // end of if (buddies != null) else
}
private void init_settings(final Map<String, Object> settings) {
DynamicRosterIfc[] dynr = (DynamicRosterIfc[])settings.get(DYNAMIC_ROSTERS);
if (dynr == null) {
String[] dyncls = (String[])settings.get(DYNAMIC_ROSTERS_CLASSES);
if (dyncls != null) {
ArrayList<DynamicRosterIfc> al = new ArrayList<DynamicRosterIfc>();
for (String cls: dyncls) {
try {
DynamicRosterIfc dri =
(DynamicRosterIfc)Class.forName(cls).newInstance();
if (settings.get(cls + ".init") != null) {
dri.init((String)settings.get(cls + ".init"));
} else {
dri.init(settings);
}
al.add(dri);
log.info("Initialized dynamic roster: " + cls);
} catch (Exception e) {
log.warning("Problem initializing dynmic roster class: "
+ cls + ", " + e);
}
}
if (al.size() > 0) {
settings.put(DYNAMIC_ROSTERS,
al.toArray(new DynamicRosterIfc[al.size()]));
}
}
}
}
public void process(final Packet packet, final XMPPResourceConnection session,
final NonAuthUserRepository repo, final Queue<Packet> results,
final Map<String, Object> settings) throws XMPPException {
if (session == null) {
return;
} // end of if (session == null)
try {
if (packet.getElemFrom() != null
&& !session.getUserId().equals(JIDUtils.getNodeID(packet.getElemFrom()))) {
// RFC says: ignore such request
log.warning(
"Roster request 'from' attribute doesn't match session userid: "
+ session.getUserId()
+ ", request: " + packet.getStringData());
return;
} // end of if (packet.getElemFrom() != null
// && !session.getUserId().equals(JIDUtils.getNodeID(packet.getElemFrom())))
StanzaType type = packet.getType();
switch (type) {
case get:
processGetRequest(packet, session, results, settings);
break;
case set:
processSetRequest(packet, session, results);
break;
case result:
// Ignore
break;
default:
results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet,
"Request type is incorrect", false));
break;
} // end of switch (type)
} catch (NotAuthorizedException e) {
log.warning(
"Received roster request but user session is not authorized yet: " +
packet.getStringData());
results.offer(Authorization.NOT_AUTHORIZED.getResponseMessage(packet,
"You must authorize session first.", true));
} // end of try-catch
}
} // JabberIqRoster
|
src/main/java/tigase/xmpp/impl/JabberIqRoster.java
|
/*
* Tigase Jabber/XMPP Server
* Copyright (C) 2004-2007 "Artur Hefczyc" <artur.hefczyc@tigase.org>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. Look for COPYING file in the top folder.
* If not, see http://www.gnu.org/licenses/.
*
* $Rev$
* Last modified by $Author$
* $Date$
*/
package tigase.xmpp.impl;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import java.util.Queue;
import java.util.Map;
import java.util.logging.Logger;
import tigase.server.Packet;
import tigase.util.JIDUtils;
import tigase.xml.Element;
import tigase.xmpp.Authorization;
import tigase.xmpp.NotAuthorizedException;
import tigase.xmpp.StanzaType;
import tigase.xmpp.XMPPProcessor;
import tigase.xmpp.XMPPProcessorIfc;
import tigase.xmpp.XMPPResourceConnection;
import tigase.xmpp.XMPPException;
import tigase.db.NonAuthUserRepository;
import static tigase.xmpp.impl.Roster.SubscriptionType;
/**
* Class <code>JabberIqRoster</code> implements part of <em>RFC-3921</em> -
* <em>XMPP Instant Messaging</em> specification describing roster management.
* 7. Roster Management
*
*
* Created: Tue Feb 21 17:42:53 2006
*
* @author <a href="mailto:artur.hefczyc@tigase.org">Artur Hefczyc</a>
* @version $Rev$
*/
public class JabberIqRoster extends XMPPProcessor
implements XMPPProcessorIfc {
/**
* Private logger for class instancess.
*/
private static Logger log =
Logger.getLogger("tigase.xmpp.impl.JabberIqRoster");
private static final String XMLNS = "jabber:iq:roster";
private static final String ID = XMLNS;
private static final String[] ELEMENTS = {"query"};
private static final String[] XMLNSS = {XMLNS};
private static final Element[] DISCO_FEATURES = {
new Element("feature", new String[] {"var"}, new String[] {XMLNS})
};
private static final String DYNAMIC_ROSTERS = "dynamic-rosters";
private static final String DYNAMIC_ROSTERS_CLASSES = "dynamic-roster-classes";
public Element[] supDiscoFeatures(final XMPPResourceConnection session)
{ return Arrays.copyOf(DISCO_FEATURES, DISCO_FEATURES.length); }
public String id() { return ID; }
public String[] supElements()
{ return Arrays.copyOf(ELEMENTS, ELEMENTS.length); }
public String[] supNamespaces()
{ return Arrays.copyOf(XMLNSS, XMLNSS.length); }
private void processSetRequest(final Packet packet,
final XMPPResourceConnection session, final Queue<Packet> results)
throws NotAuthorizedException {
Element request = packet.getElement();
String buddy =
JIDUtils.getNodeID(request.getAttribute("/iq/query/item", "jid"));
Element item = request.findChild("/iq/query/item");
String subscription = item.getAttribute("subscription");
if (subscription != null && subscription.equals("remove")) {
SubscriptionType sub = Roster.getBuddySubscription(session, buddy);
if (sub != null && sub != SubscriptionType.none) {
Element it = new Element("item");
it.setAttribute("jid", buddy);
it.setAttribute("subscription", "remove");
Roster.updateBuddyChange(session, results, it);
Element pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getUserId());
pres.setAttribute("type", "unsubscribe");
results.offer(new Packet(pres));
pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getUserId());
pres.setAttribute("type", "unsubscribed");
results.offer(new Packet(pres));
pres = new Element("presence");
pres.setAttribute("to", buddy);
pres.setAttribute("from", session.getJID());
pres.setAttribute("type", "unavailable");
results.offer(new Packet(pres));
} // end of if (sub != null && sub != SubscriptionType.none)
Roster.removeBuddy(session, buddy);
results.offer(packet.okResult((String)null, 0));
} else {
String name = request.getAttribute("/iq/query/item", "name");
if (name == null) {
name = buddy;
} // end of if (name == null)
Roster.setBuddyName(session, buddy, name);
if (Roster.getBuddySubscription(session, buddy) == null) {
Roster.setBuddySubscription(session, SubscriptionType.none, buddy);
} // end of if (getBuddySubscription(session, buddy) == null)
List<Element> groups = item.getChildren();
if (groups != null && groups.size() > 0) {
String[] gr = new String[groups.size()];
int cnt = 0;
for (Element group : groups) {
gr[cnt++] = group.getCData();
} // end of for (ElementData group : groups)
session.setDataList(Roster.groupNode(buddy), Roster.GROUPS, gr);
} else {
session.removeData(Roster.groupNode(buddy), Roster.GROUPS);
} // end of else
results.offer(packet.okResult((String)null, 0));
Roster.updateBuddyChange(session, results,
Roster.getBuddyItem(session, buddy));
} // end of else
}
private void processGetRequest(final Packet packet,
final XMPPResourceConnection session, final Queue<Packet> results,
final Map<String, Object> settings)
throws NotAuthorizedException {
String[] buddies = Roster.getBuddies(session);
if (buddies != null) {
Element query = new Element("query");
query.setXMLNS("jabber:iq:roster");
for (String buddy : buddies) {
query.addChild(Roster.getBuddyItem(session, buddy));
}
if (settings != null) {
synchronized (settings) {
init_settings(settings);
}
}
DynamicRosterIfc[] dynr = (DynamicRosterIfc[])settings.get(DYNAMIC_ROSTERS);
if (dynr != null) {
for (DynamicRosterIfc dri: dynr) {
List<Element> items = dri.getRosterItems(session);
if (items != null) {
query.addChildren(items);
}
}
}
results.offer(packet.okResult(query, 0));
} else {
results.offer(packet.okResult((String)null, 1));
} // end of if (buddies != null) else
}
private void init_settings(final Map<String, Object> settings) {
DynamicRosterIfc[] dynr = (DynamicRosterIfc[])settings.get(DYNAMIC_ROSTERS);
if (dynr == null) {
String[] dyncls = (String[])settings.get(DYNAMIC_ROSTERS_CLASSES);
if (dyncls != null) {
ArrayList<DynamicRosterIfc> al = new ArrayList<DynamicRosterIfc>();
for (String cls: dyncls) {
try {
DynamicRosterIfc dri =
(DynamicRosterIfc)Class.forName(cls).newInstance();
if (settings.get(cls + ".init") != null) {
dri.init((String)settings.get(cls + ".init"));
} else {
dri.init(settings);
}
al.add(dri);
log.info("Initialized dynamic roster: " + cls);
} catch (Exception e) {
log.warning("Problem initializing dynmic roster class: "
+ cls + ", " + e);
}
}
if (al.size() > 0) {
settings.put(DYNAMIC_ROSTERS,
al.toArray(new DynamicRosterIfc[al.size()]));
}
}
}
}
public void process(final Packet packet, final XMPPResourceConnection session,
final NonAuthUserRepository repo, final Queue<Packet> results,
final Map<String, Object> settings) throws XMPPException {
if (session == null) {
return;
} // end of if (session == null)
try {
if (packet.getElemFrom() != null
&& !session.getUserId().equals(JIDUtils.getNodeID(packet.getElemFrom()))) {
// RFC says: ignore such request
log.warning(
"Roster request 'from' attribute doesn't match session userid: "
+ session.getUserId()
+ ", request: " + packet.getStringData());
return;
} // end of if (packet.getElemFrom() != null
// && !session.getUserId().equals(JIDUtils.getNodeID(packet.getElemFrom())))
StanzaType type = packet.getType();
switch (type) {
case get:
processGetRequest(packet, session, results, settings);
break;
case set:
processSetRequest(packet, session, results);
break;
case result:
// Ignore
break;
default:
results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet,
"Request type is incorrect", false));
break;
} // end of switch (type)
} catch (NotAuthorizedException e) {
log.warning(
"Received roster request but user session is not authorized yet: " +
packet.getStringData());
results.offer(Authorization.NOT_AUTHORIZED.getResponseMessage(packet,
"You must authorize session first.", true));
} // end of try-catch
}
} // JabberIqRoster
|
Fixed null pointer exception in case of missing plugin settings for roster
git-svn-id: 4a0daf30c0bbd291b3bc5fe8f058bf11ee523347@707 7d282ba1-3ae6-0310-8f9b-c9008a0864d2
|
src/main/java/tigase/xmpp/impl/JabberIqRoster.java
|
Fixed null pointer exception in case of missing plugin settings for roster
|
|
Java
|
lgpl-2.1
|
2537bb1cecfee3e2b53d974c781b46a45dc47db4
| 0
|
ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal
|
/*
* Copyright (c) 2004-2020 by Rodney Kinney, Joel Uckelman
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.build.module.map.boardPicker.board.mapgrid;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Polygon;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.InputMap;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.KeyStroke;
import javax.swing.event.MouseInputAdapter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import VASSAL.tools.SequenceEncoder;
import VASSAL.tools.swing.SwingUtils;
public class PolygonEditor extends JPanel {
private static final long serialVersionUID = 1L;
private Polygon polygon;
private int selected = -1;
private List<Point> path;
protected JScrollPane myScroll;
private static final String DELETE = "Delete";
private static final String ESCAPE = "Escape";
private static final int POINT_RADIUS = 10;
private static final int CLICK_THRESHOLD = 10;
public PolygonEditor(Polygon p) {
polygon = p;
}
protected void reset() {
// clear all the listeners
final MouseListener[] ml = getMouseListeners();
for (final MouseListener i: ml) {
removeMouseListener(i);
}
final MouseMotionListener[] mml = getMouseMotionListeners();
for (final MouseMotionListener i: mml) {
removeMouseMotionListener(i);
}
final InputMap im = getInputMap(WHEN_IN_FOCUSED_WINDOW);
im.remove(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0));
im.remove(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0));
if (polygon == null || polygon.npoints == 0) {
setupForCreate();
}
else {
setupForEdit();
}
}
public Polygon getPolygon() {
return polygon;
}
public Polygon clonePolygon() {
return new Polygon(polygon.xpoints, polygon.ypoints, polygon.npoints);
}
public void setPolygon(Polygon polygon) {
this.polygon = polygon;
}
public void setScroll(JScrollPane scroll) {
myScroll = scroll;
}
private void setupForCreate() {
final DefinePicker dp = new DefinePicker();
addMouseListener(dp);
addMouseMotionListener(dp);
path = new ArrayList<>();
requestFocus();
repaint();
}
private void setupForEdit() {
new ModifyPolygon();
requestFocus();
repaint();
}
public void center(Point p) {
final Rectangle r = this.getVisibleRect();
if (r.width == 0) {
r.width = 600;
r.height = 600;
}
int x = p.x - r.width / 2;
int y = p.y - r.height / 2;
if (x < 0) x = 0;
if (y < 0) y = 0;
scrollRectToVisible(new Rectangle(x, y, r.width, r.height));
}
public static void reset(Polygon p, String pathStr) {
p.reset();
final SequenceEncoder.Decoder sd = new SequenceEncoder.Decoder(pathStr, ';');
while (sd.hasMoreTokens()) {
final String s = sd.nextToken();
final SequenceEncoder.Decoder pd = new SequenceEncoder.Decoder(s, ',');
if (pd.hasMoreTokens()) {
try {
final int x = Integer.parseInt(pd.nextToken().trim());
if (pd.hasMoreTokens()) {
final int y = Integer.parseInt(pd.nextToken().trim());
p.addPoint(x, y);
}
}
// FIXME: review error message
catch (final NumberFormatException e) {
}
}
}
}
public static String polygonToString(Polygon p) {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < p.npoints; ++i) {
sb.append(Math.round(p.xpoints[i]))
.append(',')
.append(Math.round(p.ypoints[i]));
if (i < (p.npoints - 1)) {
sb.append(';');
}
}
return sb.toString();
}
@Override
public void paint(Graphics g) {
paintBackground(g);
if ((polygon == null || polygon.npoints == 0) &&
(path == null || path.isEmpty())) {
return;
}
final Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
);
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.5F));
final int r = POINT_RADIUS;
final int d = 2 * r;
if (polygon != null && polygon.npoints > 0) {
// fill the zone
g2d.setColor(Color.WHITE);
g2d.fill(polygon);
// draw the vertex markers
g2d.setColor(Color.BLACK);
for (int i = 0; i < polygon.npoints; ++i) {
final int x = polygon.xpoints[i];
final int y = polygon.ypoints[i];
g2d.drawOval(x - r, y - r, d, d);
}
// draw the selected vertex
if (selected >= 0 && selected < polygon.xpoints.length) {
g2d.setColor(Color.RED);
final int x = polygon.xpoints[selected];
final int y = polygon.ypoints[selected];
g2d.fillOval(x - r, y - r, d, d);
}
// draw the zone
g2d.setComposite(AlphaComposite.SrcAtop);
g2d.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke(2.0F));
g2d.drawPolygon(polygon);
}
else if (path != null && !path.isEmpty()) {
final int ps = path.size();
Point p1, p2;
// draw the vertex markers
g2d.setColor(Color.BLACK);
for (int i = 0; i < ps; ++i) {
p1 = path.get(i);
g2d.drawOval(p1.x - r, p1.y - r, d, d);
}
// highlight the initial vertex if the active vertex overlaps it
p1 = path.get(0);
p2 = path.get(ps - 1);
final double dp = Point2D.distance(p1.x, p1.y, p2.x, p2.y);
if (dp <= 2 * CLICK_THRESHOLD) {
g2d.setColor(Color.YELLOW);
g2d.fillOval(p1.x - r, p1.y - r, d, d);
}
// draw the active vertex
g2d.setColor(Color.RED);
p1 = path.get(ps - 1);
g2d.fillOval(p1.x - r, p1.y - r, d, d);
// draw the path
p1 = path.get(0);
g2d.setComposite(AlphaComposite.SrcAtop);
g2d.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke(2.0F));
for (int i = 1; i < ps; ++i) {
p2 = path.get(i);
g2d.drawLine(p1.x, p1.y, p2.x, p2.y);
p1 = p2;
}
}
}
protected void paintBackground(Graphics g) {
super.paint(g);
}
protected static Pair<Integer, Double> nearestVertex(Polygon p, int x, int y) {
int idx = -1;
double minDist = Double.MAX_VALUE;
for (int i = 0; i < p.npoints; ++i) {
final int x1 = p.xpoints[i];
final int y1 = p.ypoints[i];
final double d = Point2D.distance(x, y, x1, y1);
if (d < minDist) {
minDist = d;
idx = i;
}
}
return Pair.of(idx, minDist);
}
protected static Triple<Integer, Point, Double> nearestSegment(Polygon p, int x, int y) {
int idx = -1;
int min_x = 0;
int min_y = 0;
double minDist = Double.MAX_VALUE;
for (int i = 0; i < p.npoints; ++i) {
final int j = (i + 1) % p.npoints;
final int x1 = p.xpoints[i];
final int y1 = p.ypoints[i];
final int x2 = p.xpoints[j];
final int y2 = p.ypoints[j];
final int px = x2 - x1;
final int py = y2 - y1;
final int norm = px * px + py * py;
double u = ((x - x1) * px + (y - y1) * py) / (double) norm;
u = u > 1.0 ? 1.0 : (u < 0.0 ? 0.0 : u);
// x3,y3 is the point nearest to x,y on x1,y1 - x2,y2
final int x3 = (int) Math.round(x1 + u * px);
final int y3 = (int) Math.round(y1 + u * py);
final double d = Point2D.distance(x, y, x3, y3);
if (d < minDist) {
minDist = d;
min_x = x3;
min_y = y3;
idx = i;
}
}
return Triple.of(idx, new Point(min_x, min_y), minDist);
}
protected static void deleteVertex(Polygon p, int i) {
p.xpoints = ArrayUtils.remove(p.xpoints, i);
p.ypoints = ArrayUtils.remove(p.ypoints, i);
--p.npoints;
p.invalidate();
}
protected static void insertVertex(Polygon p, int i, int x, int y) {
p.xpoints = ArrayUtils.insert(i, p.xpoints, x);
p.ypoints = ArrayUtils.insert(i, p.ypoints, y);
++p.npoints;
p.invalidate();
}
protected static void moveVertex(Polygon p, int i, int x, int y) {
p.xpoints[i] = x;
p.ypoints[i] = y;
p.invalidate();
}
private class ModifyPolygon extends MouseInputAdapter {
public ModifyPolygon() {
addMouseListener(this);
addMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), DELETE);
getActionMap().put(DELETE, new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
deleteKeyPressed();
}
});
}
private void remove() {
removeMouseListener(this);
removeMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).remove(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0));
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
if (selected >= 0 && selected < polygon.xpoints.length) {
moveVertex(polygon, selected, e.getX(), e.getY());
}
scrollAtEdge(e.getPoint(), 15);
repaint();
}
}
@Override
public void mouseClicked(MouseEvent e) {
if (SwingUtils.isContextMouseButtonDown(e) ||
(SwingUtils.isMainMouseButtonDown(e) && e.getClickCount() == 2)) {
final Triple<Integer, Point, Double> t = nearestSegment(polygon, e.getX(), e.getY());
final int ins = t.getLeft() + 1;
if (SwingUtils.isContextMouseButtonDown(e)) {
insertVertex(polygon, ins, e.getX(), e.getY());
}
else {
final Point np = t.getMiddle();
insertVertex(polygon, ins, np.x, np.y);
}
selected = ins;
repaint();
}
}
@Override
public void mousePressed(MouseEvent e) {
if (!SwingUtils.isMainMouseButtonDown(e)) {
return;
}
// On left button press, select nearest vertex within the threshold.
final Pair<Integer, Double> n = nearestVertex(polygon, e.getX(), e.getY());
final double d = n.getRight();
selected = d <= CLICK_THRESHOLD ? n.getLeft() : -1;
repaint();
}
public void scrollAtEdge(Point evtPt, int dist) {
final Point p = new Point(
evtPt.x - myScroll.getViewport().getViewPosition().x,
evtPt.y - myScroll.getViewport().getViewPosition().y
);
int dx = 0, dy = 0;
if (p.x < dist && p.x >= 0) {
dx = -1;
}
if (p.x >= myScroll.getViewport().getSize().width - dist
&& p.x < myScroll.getViewport().getSize().width) {
dx = 1;
}
if (p.y < dist && p.y >= 0) {
dy = -1;
}
if (p.y >= myScroll.getViewport().getSize().height - dist
&& p.y < myScroll.getViewport().getSize().height) {
dy = 1;
}
if (dx != 0 || dy != 0) {
Rectangle r = new Rectangle(myScroll.getViewport().getViewRect());
r.translate(2 * dist * dx, 2 * dist * dy);
r = r.intersection(new Rectangle(new Point(0, 0), getPreferredSize()));
scrollRectToVisible(r);
}
}
public void deleteKeyPressed() {
if (selected >= 0) {
deleteVertex(polygon, selected);
selected = -1;
// It's not possible to add a point when there's no segment to click
// so remove the whole polygon
if (polygon.npoints < 2) {
polygon = null;
remove();
setupForCreate();
}
repaint();
}
}
}
private static Polygon pathToPolygon(List<Point> pl) {
final int ps = pl.size();
final int[] xpoints = new int[ps];
final int[] ypoints = new int[ps];
for (int i = 0; i < ps; ++i) {
final Point p = pl.get(i);
xpoints[i] = p.x;
ypoints[i] = p.y;
}
return new Polygon(xpoints, ypoints, ps);
}
private class DefinePicker extends MouseInputAdapter {
public DefinePicker() {
addMouseListener(this);
addMouseMotionListener(this);
}
private void remove() {
removeMouseListener(this);
removeMouseMotionListener(this);
}
@Override
public void mousePressed(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
path.add(new Point(e.getPoint()));
}
}
@Override
public void mouseReleased(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
remove();
new DefinePolygon();
path.add(new Point(path.get(0)));
repaint();
}
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
remove();
new DefineRectangle();
final int x = path.get(0).x;
final int[] xpoints = { x, x, x, x };
final int y = path.get(0).y;
final int[] ypoints = { y, y, y, y };
polygon = new Polygon(xpoints, ypoints, 4);
repaint();
}
}
}
private class DefinePolygon extends MouseInputAdapter {
public DefinePolygon() {
addMouseListener(this);
addMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), ESCAPE);
getActionMap().put(ESCAPE, new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
escapeKeyPressed();
}
});
}
private void remove() {
removeMouseListener(this);
removeMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).remove(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0));
}
@Override
public void mouseReleased(MouseEvent e) {
if (!SwingUtils.isMainMouseButtonDown(e) || path.isEmpty()) {
return;
}
if (path.size() > 2) {
final Point beg = path.get(0);
final double d = Point2D.distance(e.getX(), e.getY(), beg.x, beg.y);
if (d <= CLICK_THRESHOLD) {
path.remove(path.size() - 1);
polygon = pathToPolygon(path);
selected = 0;
path = null;
remove();
setupForEdit();
return;
}
}
path.add(new Point(e.getPoint()));
repaint();
}
private void moveEndpoint(Point p) {
if (!path.isEmpty()) {
path.get(path.size() - 1).setLocation(p);
repaint();
}
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
moveEndpoint(e.getPoint());
}
}
@Override
public void mouseMoved(MouseEvent e) {
moveEndpoint(e.getPoint());
}
public void escapeKeyPressed() {
remove();
path.clear();
setupForCreate();
}
}
private class DefineRectangle extends MouseInputAdapter {
public DefineRectangle() {
addMouseListener(this);
addMouseMotionListener(this);
}
private void remove() {
removeMouseListener(this);
removeMouseMotionListener(this);
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
polygon.xpoints[1] = polygon.xpoints[2] = e.getX();
polygon.ypoints[2] = polygon.ypoints[3] = e.getY();
repaint();
}
}
@Override
public void mouseReleased(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
selected = nearestVertex(polygon, e.getX(), e.getY()).getLeft();
remove();
setupForEdit();
}
}
}
public static void main(String[] args) {
final JFrame f = new JFrame();
f.add(new PolygonEditor(null));
f.setSize(500, 500);
f.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
f.setVisible(true);
}
}
|
vassal-app/src/main/java/VASSAL/build/module/map/boardPicker/board/mapgrid/PolygonEditor.java
|
/*
* Copyright (c) 2004-2020 by Rodney Kinney, Joel Uckelman
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.build.module.map.boardPicker.board.mapgrid;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Polygon;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.InputMap;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.KeyStroke;
import javax.swing.event.MouseInputAdapter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import VASSAL.tools.SequenceEncoder;
import VASSAL.tools.swing.SwingUtils;
public class PolygonEditor extends JPanel {
private static final long serialVersionUID = 1L;
private Polygon polygon;
private int selected = -1;
private List<Point> path;
protected JScrollPane myScroll;
private static final String DELETE = "Delete";
private static final String ESCAPE = "Escape";
private static final int POINT_RADIUS = 10;
private static final int CLICK_THRESHOLD = 10;
public PolygonEditor(Polygon p) {
polygon = p;
}
protected void reset() {
// clear all the listeners
final MouseListener[] ml = getMouseListeners();
for (final MouseListener i: ml) {
removeMouseListener(i);
}
final MouseMotionListener[] mml = getMouseMotionListeners();
for (final MouseMotionListener i: mml) {
removeMouseMotionListener(i);
}
final InputMap im = getInputMap(WHEN_IN_FOCUSED_WINDOW);
im.remove(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0));
im.remove(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0));
if (polygon == null || polygon.npoints == 0) {
setupForCreate();
}
else {
setupForEdit();
}
}
public Polygon getPolygon() {
return polygon;
}
public Polygon clonePolygon() {
return new Polygon(polygon.xpoints, polygon.ypoints, polygon.npoints);
}
public void setPolygon(Polygon polygon) {
this.polygon = polygon;
}
public void setScroll(JScrollPane scroll) {
myScroll = scroll;
}
private void setupForCreate() {
final DefinePicker dp = new DefinePicker();
addMouseListener(dp);
addMouseMotionListener(dp);
path = new ArrayList<>();
requestFocus();
repaint();
}
private void setupForEdit() {
final ModifyPolygon mp = new ModifyPolygon();
addMouseListener(mp);
addMouseMotionListener(mp);
getInputMap(WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), DELETE);
getActionMap().put(DELETE, new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
mp.deleteKeyPressed();
}
});
requestFocus();
repaint();
}
public void center(Point p) {
final Rectangle r = this.getVisibleRect();
if (r.width == 0) {
r.width = 600;
r.height = 600;
}
int x = p.x - r.width / 2;
int y = p.y - r.height / 2;
if (x < 0) x = 0;
if (y < 0) y = 0;
scrollRectToVisible(new Rectangle(x, y, r.width, r.height));
}
public static void reset(Polygon p, String pathStr) {
p.reset();
final SequenceEncoder.Decoder sd = new SequenceEncoder.Decoder(pathStr, ';');
while (sd.hasMoreTokens()) {
final String s = sd.nextToken();
final SequenceEncoder.Decoder pd = new SequenceEncoder.Decoder(s, ',');
if (pd.hasMoreTokens()) {
try {
final int x = Integer.parseInt(pd.nextToken().trim());
if (pd.hasMoreTokens()) {
final int y = Integer.parseInt(pd.nextToken().trim());
p.addPoint(x, y);
}
}
// FIXME: review error message
catch (final NumberFormatException e) {
}
}
}
}
public static String polygonToString(Polygon p) {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < p.npoints; ++i) {
sb.append(Math.round(p.xpoints[i]))
.append(',')
.append(Math.round(p.ypoints[i]));
if (i < (p.npoints - 1)) {
sb.append(';');
}
}
return sb.toString();
}
@Override
public void paint(Graphics g) {
paintBackground(g);
if ((polygon == null || polygon.npoints == 0) &&
(path == null || path.isEmpty())) {
return;
}
final Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
);
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.5F));
final int r = POINT_RADIUS;
final int d = 2 * r;
if (polygon != null && polygon.npoints > 0) {
// fill the zone
g2d.setColor(Color.WHITE);
g2d.fill(polygon);
// draw the vertex markers
g2d.setColor(Color.BLACK);
for (int i = 0; i < polygon.npoints; ++i) {
final int x = polygon.xpoints[i];
final int y = polygon.ypoints[i];
g2d.drawOval(x - r, y - r, d, d);
}
// draw the selected vertex
if (selected >= 0 && selected < polygon.xpoints.length) {
g2d.setColor(Color.RED);
final int x = polygon.xpoints[selected];
final int y = polygon.ypoints[selected];
g2d.fillOval(x - r, y - r, d, d);
}
// draw the zone
g2d.setComposite(AlphaComposite.SrcAtop);
g2d.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke(2.0F));
g2d.drawPolygon(polygon);
}
else if (path != null && !path.isEmpty()) {
final int ps = path.size();
Point p1, p2;
// draw the vertex markers
g2d.setColor(Color.BLACK);
for (int i = 0; i < ps; ++i) {
p1 = path.get(i);
g2d.drawOval(p1.x - r, p1.y - r, d, d);
}
// highlight the initial vertex if the active vertex overlaps it
p1 = path.get(0);
p2 = path.get(ps - 1);
final double dp = Point2D.distance(p1.x, p1.y, p2.x, p2.y);
if (dp <= 2 * CLICK_THRESHOLD) {
g2d.setColor(Color.YELLOW);
g2d.fillOval(p1.x - r, p1.y - r, d, d);
}
// draw the active vertex
g2d.setColor(Color.RED);
p1 = path.get(ps - 1);
g2d.fillOval(p1.x - r, p1.y - r, d, d);
// draw the path
p1 = path.get(0);
g2d.setComposite(AlphaComposite.SrcAtop);
g2d.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke(2.0F));
for (int i = 1; i < ps; ++i) {
p2 = path.get(i);
g2d.drawLine(p1.x, p1.y, p2.x, p2.y);
p1 = p2;
}
}
}
protected void paintBackground(Graphics g) {
super.paint(g);
}
protected static Pair<Integer, Double> nearestVertex(Polygon p, int x, int y) {
int idx = -1;
double minDist = Double.MAX_VALUE;
for (int i = 0; i < p.npoints; ++i) {
final int x1 = p.xpoints[i];
final int y1 = p.ypoints[i];
final double d = Point2D.distance(x, y, x1, y1);
if (d < minDist) {
minDist = d;
idx = i;
}
}
return Pair.of(idx, minDist);
}
protected static Triple<Integer, Point, Double> nearestSegment(Polygon p, int x, int y) {
int idx = -1;
int min_x = 0;
int min_y = 0;
double minDist = Double.MAX_VALUE;
for (int i = 0; i < p.npoints; ++i) {
final int j = (i + 1) % p.npoints;
final int x1 = p.xpoints[i];
final int y1 = p.ypoints[i];
final int x2 = p.xpoints[j];
final int y2 = p.ypoints[j];
final int px = x2 - x1;
final int py = y2 - y1;
final int norm = px * px + py * py;
double u = ((x - x1) * px + (y - y1) * py) / (double) norm;
u = u > 1.0 ? 1.0 : (u < 0.0 ? 0.0 : u);
// x3,y3 is the point nearest to x,y on x1,y1 - x2,y2
final int x3 = (int) Math.round(x1 + u * px);
final int y3 = (int) Math.round(y1 + u * py);
final double d = Point2D.distance(x, y, x3, y3);
if (d < minDist) {
minDist = d;
min_x = x3;
min_y = y3;
idx = i;
}
}
return Triple.of(idx, new Point(min_x, min_y), minDist);
}
protected static void deleteVertex(Polygon p, int i) {
p.xpoints = ArrayUtils.remove(p.xpoints, i);
p.ypoints = ArrayUtils.remove(p.ypoints, i);
--p.npoints;
p.invalidate();
}
protected static void insertVertex(Polygon p, int i, int x, int y) {
p.xpoints = ArrayUtils.insert(i, p.xpoints, x);
p.ypoints = ArrayUtils.insert(i, p.ypoints, y);
++p.npoints;
p.invalidate();
}
protected static void moveVertex(Polygon p, int i, int x, int y) {
p.xpoints[i] = x;
p.ypoints[i] = y;
p.invalidate();
}
private class ModifyPolygon extends MouseInputAdapter {
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
if (selected >= 0 && selected < polygon.xpoints.length) {
moveVertex(polygon, selected, e.getX(), e.getY());
}
scrollAtEdge(e.getPoint(), 15);
repaint();
}
}
@Override
public void mouseClicked(MouseEvent e) {
if (SwingUtils.isContextMouseButtonDown(e) ||
(SwingUtils.isMainMouseButtonDown(e) && e.getClickCount() == 2)) {
final Triple<Integer, Point, Double> t = nearestSegment(polygon, e.getX(), e.getY());
final int ins = t.getLeft() + 1;
if (SwingUtils.isContextMouseButtonDown(e)) {
insertVertex(polygon, ins, e.getX(), e.getY());
}
else {
final Point np = t.getMiddle();
insertVertex(polygon, ins, np.x, np.y);
}
selected = ins;
repaint();
}
}
@Override
public void mousePressed(MouseEvent e) {
if (!SwingUtils.isMainMouseButtonDown(e)) {
return;
}
// On left button press, select nearest vertex within the threshold.
final Pair<Integer, Double> n = nearestVertex(polygon, e.getX(), e.getY());
final double d = n.getRight();
selected = d <= CLICK_THRESHOLD ? n.getLeft() : -1;
repaint();
}
public void scrollAtEdge(Point evtPt, int dist) {
final Point p = new Point(
evtPt.x - myScroll.getViewport().getViewPosition().x,
evtPt.y - myScroll.getViewport().getViewPosition().y
);
int dx = 0, dy = 0;
if (p.x < dist && p.x >= 0) {
dx = -1;
}
if (p.x >= myScroll.getViewport().getSize().width - dist
&& p.x < myScroll.getViewport().getSize().width) {
dx = 1;
}
if (p.y < dist && p.y >= 0) {
dy = -1;
}
if (p.y >= myScroll.getViewport().getSize().height - dist
&& p.y < myScroll.getViewport().getSize().height) {
dy = 1;
}
if (dx != 0 || dy != 0) {
Rectangle r = new Rectangle(myScroll.getViewport().getViewRect());
r.translate(2 * dist * dx, 2 * dist * dy);
r = r.intersection(new Rectangle(new Point(0, 0), getPreferredSize()));
scrollRectToVisible(r);
}
}
public void deleteKeyPressed() {
if (selected >= 0) {
deleteVertex(polygon, selected);
selected = -1;
// It's not possible to add a point when there's no segment to click
// so remove the whole polygon
if (polygon.npoints < 2) {
polygon = null;
removeMouseListener(this);
removeMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).remove(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0));
setupForCreate();
}
repaint();
}
}
}
private static Polygon pathToPolygon(List<Point> pl) {
final int ps = pl.size();
final int[] xpoints = new int[ps];
final int[] ypoints = new int[ps];
for (int i = 0; i < ps; ++i) {
final Point p = pl.get(i);
xpoints[i] = p.x;
ypoints[i] = p.y;
}
return new Polygon(xpoints, ypoints, ps);
}
private class DefinePicker extends MouseInputAdapter {
@Override
public void mousePressed(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
path.add(new Point(e.getPoint()));
}
}
@Override
public void mouseReleased(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
removeMouseListener(this);
removeMouseMotionListener(this);
final DefinePolygon dp = new DefinePolygon();
addMouseListener(dp);
addMouseMotionListener(dp);
getInputMap(WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), ESCAPE);
getActionMap().put(ESCAPE, new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
dp.escapeKeyPressed();
}
});
path.add(new Point(path.get(0)));
repaint();
}
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
removeMouseListener(this);
removeMouseMotionListener(this);
final DefineRectangle dr = new DefineRectangle();
addMouseListener(dr);
addMouseMotionListener(dr);
final int x = path.get(0).x;
final int[] xpoints = { x, x, x, x };
final int y = path.get(0).y;
final int[] ypoints = { y, y, y, y };
polygon = new Polygon(xpoints, ypoints, 4);
repaint();
}
}
}
private class DefinePolygon extends MouseInputAdapter {
@Override
public void mouseReleased(MouseEvent e) {
if (!SwingUtils.isMainMouseButtonDown(e) || path.isEmpty()) {
return;
}
if (path.size() > 2) {
final Point beg = path.get(0);
final double d = Point2D.distance(e.getX(), e.getY(), beg.x, beg.y);
if (d <= CLICK_THRESHOLD) {
path.remove(path.size() - 1);
polygon = pathToPolygon(path);
selected = 0;
path = null;
removeMouseListener(this);
removeMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).remove(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0));
setupForEdit();
return;
}
}
path.add(new Point(e.getPoint()));
repaint();
}
private void moveEndpoint(Point p) {
if (!path.isEmpty()) {
path.get(path.size() - 1).setLocation(p);
repaint();
}
}
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
moveEndpoint(e.getPoint());
}
}
@Override
public void mouseMoved(MouseEvent e) {
moveEndpoint(e.getPoint());
}
public void escapeKeyPressed() {
path.clear();
removeMouseListener(this);
removeMouseMotionListener(this);
getInputMap(WHEN_IN_FOCUSED_WINDOW).remove(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0));
setupForCreate();
}
}
private class DefineRectangle extends MouseInputAdapter {
@Override
public void mouseDragged(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
polygon.xpoints[1] = polygon.xpoints[2] = e.getX();
polygon.ypoints[2] = polygon.ypoints[3] = e.getY();
repaint();
}
}
@Override
public void mouseReleased(MouseEvent e) {
if (SwingUtils.isMainMouseButtonDown(e)) {
selected = nearestVertex(polygon, e.getX(), e.getY()).getLeft();
removeMouseListener(this);
removeMouseMotionListener(this);
setupForEdit();
}
}
}
public static void main(String[] args) {
final JFrame f = new JFrame();
f.add(new PolygonEditor(null));
f.setSize(500, 500);
f.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
f.setVisible(true);
}
}
|
Cleanup.
|
vassal-app/src/main/java/VASSAL/build/module/map/boardPicker/board/mapgrid/PolygonEditor.java
|
Cleanup.
|
|
Java
|
apache-2.0
|
34a9b89d2992d133dafcbafd45c636cef637ac6a
| 0
|
Hi-Fi/robotframework-selenium2library-java
|
package com.github.markusbernhardt.seleniumlibrary.keywords;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.python.core.PyString;
import org.python.util.PythonInterpreter;
import org.robotframework.javalib.annotation.ArgumentNames;
import org.robotframework.javalib.annotation.Autowired;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywordOverload;
import org.robotframework.javalib.annotation.RobotKeywords;
import com.github.markusbernhardt.seleniumlibrary.RunOnFailureKeywordsAdapter;
import com.github.markusbernhardt.seleniumlibrary.SeleniumLibraryNonFatalException;
@RobotKeywords
public class Logging extends RunOnFailureKeywordsAdapter {
protected final static Map<String, String[]> VALID_LOG_LEVELS;
protected static String logDir = null;
static {
VALID_LOG_LEVELS = new HashMap<String, String[]>();
VALID_LOG_LEVELS.put("debug", new String[] { "debug", "" });
VALID_LOG_LEVELS.put("html", new String[] { "info", ", True, False" });
VALID_LOG_LEVELS.put("info", new String[] { "info", "" });
VALID_LOG_LEVELS.put("trace", new String[] { "trace", "" });
VALID_LOG_LEVELS.put("warn", new String[] { "warn", "" });
VALID_LOG_LEVELS.put("error", new String[] { "error", "" });
}
/**
* Instantiated BrowserManagement keyword bean
*/
@Autowired
protected BrowserManagement browserManagement;
@Autowired
protected Window window;
// ##############################
// Keywords
// ##############################
@RobotKeywordOverload
public List<String> logWindowIdentifiers() {
return logWindowIdentifiers("INFO");
}
/**
* Logs and returns the id attributes of all windows known to the current
* browser instance.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of window id attributes.
*
* @see BrowserManagement#getWindowIdentifiers
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowIdentifiers(String logLevel) {
List<String> windowIdentifiers = window.getWindowIdentifiers();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public List<String> logWindowNames() {
return logWindowNames("INFO");
}
/**
* Logs and returns the names of all windows known to the current browser
* instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of windows names.
*
* @see BrowserManagement#getWindowNames
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowNames(String logLevel) {
List<String> windowIdentifiers = window.getWindowNames();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public List<String> logWindowTitles() {
return logWindowTitles("INFO");
}
/**
* Logs and returns the titles of all windows known to the current browser
* instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of window titles.
*
* @see BrowserManagement#getWindowTitles
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowTitles(String logLevel) {
List<String> windowIdentifiers = window.getWindowTitles();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public String logLocation() {
return logLocation("INFO");
}
/**
* Logs and returns the location of the current browser instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The current location.
*
* @see BrowserManagement#getLocation
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logLocation(String logLevel) {
String actual = browserManagement.getLocation();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logSource() {
return logSource("INFO");
}
/**
* Logs and returns the entire html source of the current page or frame.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The entire html source.
*
* @see BrowserManagement#getSource
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logSource(String logLevel) {
String actual = browserManagement.getSource();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logTitle() {
return logTitle("INFO");
}
/**
* Logs and returns the title of current page.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The page title.
*
* @see BrowserManagement#getSource
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logTitle(String logLevel) {
String actual = browserManagement.getTitle();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logSystemInfo() {
return logSystemInfo("INFO");
}
/**
* Logs and returns basic system information about the execution
* environment.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return System information.
*
* @see BrowserManagement#getSystemInfo
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logSystemInfo(String logLevel) {
String actual = browserManagement.getSystemInfo();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logRemoteCapabilities() {
return logRemoteCapabilities("INFO");
}
/**
* Logs and returns the actually supported capabilities of the remote browser
* instance.<br>
* <br>
* Not all server implementations will support every WebDriver feature.
* Therefore, the client and server should use JSON objects with the properties
* listed below when describing which features a user requests that a session
* support. <b>If a session cannot support a capability that is requested in the
* desired capabilities, no error is thrown;</b> a read-only capabilities object
* is returned that indicates the capabilities the session actually supports.
* For more information see:
* <a href= "http://code.google.com/p/selenium/wiki/DesiredCapabilities"
* >DesiredCapabilities</a><br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The capabilities of the remote node.
*
* @see BrowserManagement#getRemoteCapabilities
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logRemoteCapabilities(String logLevel) {
String actual = browserManagement.getRemoteCapabilities();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logRemoteSessionId() {
return logRemoteSessionId("INFO");
}
/**
* Logs and returns the session id of the remote browser instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The remote session id.
*
* @see BrowserManagement#getRemoteSessionId
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logRemoteSessionId(String logLevel) {
String actual = browserManagement.getRemoteSessionId();
log(actual, logLevel);
return actual;
}
/**
* Set the <b>logDirectory</b>, where captured screenshots are stored, to some
* custom path.<br>
* <br>
* Fails, if either the given path does not exist, is no directory or is not
* writable.<br>
*
* @param logDirectory
* The directory to log to.
* @throws Exception
* - if anything goes wrong.
*/
@RobotKeyword
@ArgumentNames({ "logDirectory" })
public void setLogDirectory(String logDirectory) throws Exception {
File file = new File(logDirectory);
if (file.exists() && file.isDirectory() && file.canWrite()) {
Logging.setLogDir(file.getAbsolutePath());
} else {
throw new Exception(
"Location given as parameter: " + logDirectory + " must exist and must be a writeable directory!");
}
}
// ##############################
// Internal Methods
// ##############################
protected void trace(String msg) {
log(msg, "trace");
}
protected void debug(String msg) {
log(msg, "debug");
}
protected void info(String msg) {
log(msg, "info");
}
protected void html(String msg) {
log(msg, "html");
}
protected void warn(String msg) {
log(msg, "warn");
}
protected void error(String msg) {
log(msg, "error");
}
protected void log(String msg, String logLevel) {
String[] methodParameters = VALID_LOG_LEVELS.get(logLevel.toLowerCase());
if (methodParameters != null) {
log0(msg, methodParameters[0], methodParameters[1]);
} else {
throw new SeleniumLibraryNonFatalException(String.format("Given log level %s is invalid.", logLevel));
}
}
protected void log0(String msg, String methodName, String methodArguments) {
msg = String.valueOf(msg);
if (msg.length() > 1024) {
// Message is too large.
// There is a hard limit of 100k in the Jython source code parser
try {
// Write message to temp file
File tempFile = File.createTempFile("SeleniumLibrary-", ".log");
tempFile.deleteOnExit();
FileWriter writer = new FileWriter(tempFile);
writer.write(msg);
writer.close();
// Read the message in Python back and log it.
loggingPythonInterpreter.get()
.exec(String.format(
"from __future__ import with_statement\n" + "\n" + "with open('%s', 'r') as msg_file:\n"
+ " msg = msg_file.read()\n" + " logger.%s(msg%s)",
tempFile.getAbsolutePath().replace("\\", "\\\\"), methodName, methodArguments));
} catch (IOException e) {
throw new SeleniumLibraryNonFatalException("Error in handling temp file for long log message.", e);
}
} else {
// Message is small enough to get parsed by Jython
loggingPythonInterpreter.get().exec(String.format("logger.%s('%s'%s)", methodName,
msg.replace("\\", "\\\\").replace("'", "\\'").replace("\n", "\\n"), methodArguments));
}
}
protected File getLogDir() {
if (logDir == null
&& !loggingPythonInterpreter.get().eval("EXECUTION_CONTEXTS.current").toString().equals("None")) {
PyString logDirName = (PyString) loggingPythonInterpreter.get()
.eval("BuiltIn().get_variables()['${LOG FILE}']");
if (logDirName != null && !(logDirName.asString().toUpperCase().equals("NONE"))) {
return new File(logDirName.asString()).getParentFile();
}
logDirName = (PyString) loggingPythonInterpreter.get().eval("BuiltIn().get_variables()['${OUTPUTDIR}']");
return new File(logDirName.asString()).getParentFile();
} else {
return new File(logDir);
}
}
public static void setLogDir(String logDirectory) {
logDir = logDirectory;
}
protected static ThreadLocal<PythonInterpreter> loggingPythonInterpreter = new ThreadLocal<PythonInterpreter>() {
@Override
protected PythonInterpreter initialValue() {
PythonInterpreter pythonInterpreter = new PythonInterpreter();
pythonInterpreter.exec(
"from robot.libraries.BuiltIn import BuiltIn; from robot.running.context import EXECUTION_CONTEXTS; from robot.api import logger;");
return pythonInterpreter;
}
};
}
|
src/main/java/com/github/markusbernhardt/seleniumlibrary/keywords/Logging.java
|
package com.github.markusbernhardt.seleniumlibrary.keywords;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.python.core.PyString;
import org.python.util.PythonInterpreter;
import org.robotframework.javalib.annotation.ArgumentNames;
import org.robotframework.javalib.annotation.Autowired;
import org.robotframework.javalib.annotation.RobotKeyword;
import org.robotframework.javalib.annotation.RobotKeywordOverload;
import org.robotframework.javalib.annotation.RobotKeywords;
import com.github.markusbernhardt.seleniumlibrary.RunOnFailureKeywordsAdapter;
import com.github.markusbernhardt.seleniumlibrary.SeleniumLibraryNonFatalException;
@RobotKeywords
public class Logging extends RunOnFailureKeywordsAdapter {
protected final static Map<String, String[]> VALID_LOG_LEVELS;
protected static String logDir = null;
static {
VALID_LOG_LEVELS = new HashMap<String, String[]>();
VALID_LOG_LEVELS.put("debug", new String[] { "debug", "" });
VALID_LOG_LEVELS.put("html", new String[] { "info", ", True, False" });
VALID_LOG_LEVELS.put("info", new String[] { "info", "" });
VALID_LOG_LEVELS.put("trace", new String[] { "trace", "" });
VALID_LOG_LEVELS.put("warn", new String[] { "warn", "" });
VALID_LOG_LEVELS.put("error", new String[] { "error", "" });
}
/**
* Instantiated BrowserManagement keyword bean
*/
@Autowired
protected BrowserManagement browserManagement;
// ##############################
// Keywords
// ##############################
@RobotKeywordOverload
public List<String> logWindowIdentifiers() {
return logWindowIdentifiers("INFO");
}
/**
* Logs and returns the id attributes of all windows known to the current
* browser instance.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of window id attributes.
*
* @see BrowserManagement#getWindowIdentifiers
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowIdentifiers(String logLevel) {
List<String> windowIdentifiers = browserManagement.getWindowIdentifiers();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public List<String> logWindowNames() {
return logWindowNames("INFO");
}
/**
* Logs and returns the names of all windows known to the current browser
* instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of windows names.
*
* @see BrowserManagement#getWindowNames
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowNames(String logLevel) {
List<String> windowIdentifiers = browserManagement.getWindowNames();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public List<String> logWindowTitles() {
return logWindowTitles("INFO");
}
/**
* Logs and returns the titles of all windows known to the current browser
* instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return List of window titles.
*
* @see BrowserManagement#getWindowTitles
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public List<String> logWindowTitles(String logLevel) {
List<String> windowIdentifiers = browserManagement.getWindowTitles();
for (String windowIdentifier : windowIdentifiers) {
log(windowIdentifier, logLevel);
}
return windowIdentifiers;
}
@RobotKeywordOverload
public String logLocation() {
return logLocation("INFO");
}
/**
* Logs and returns the location of the current browser instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The current location.
*
* @see BrowserManagement#getLocation
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logLocation(String logLevel) {
String actual = browserManagement.getLocation();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logSource() {
return logSource("INFO");
}
/**
* Logs and returns the entire html source of the current page or frame.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The entire html source.
*
* @see BrowserManagement#getSource
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logSource(String logLevel) {
String actual = browserManagement.getSource();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logTitle() {
return logTitle("INFO");
}
/**
* Logs and returns the title of current page.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The page title.
*
* @see BrowserManagement#getSource
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logTitle(String logLevel) {
String actual = browserManagement.getTitle();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logSystemInfo() {
return logSystemInfo("INFO");
}
/**
* Logs and returns basic system information about the execution
* environment.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return System information.
*
* @see BrowserManagement#getSystemInfo
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logSystemInfo(String logLevel) {
String actual = browserManagement.getSystemInfo();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logRemoteCapabilities() {
return logRemoteCapabilities("INFO");
}
/**
* Logs and returns the actually supported capabilities of the remote browser
* instance.<br>
* <br>
* Not all server implementations will support every WebDriver feature.
* Therefore, the client and server should use JSON objects with the properties
* listed below when describing which features a user requests that a session
* support. <b>If a session cannot support a capability that is requested in the
* desired capabilities, no error is thrown;</b> a read-only capabilities object
* is returned that indicates the capabilities the session actually supports.
* For more information see:
* <a href= "http://code.google.com/p/selenium/wiki/DesiredCapabilities"
* >DesiredCapabilities</a><br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The capabilities of the remote node.
*
* @see BrowserManagement#getRemoteCapabilities
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logRemoteCapabilities(String logLevel) {
String actual = browserManagement.getRemoteCapabilities();
log(actual, logLevel);
return actual;
}
@RobotKeywordOverload
public String logRemoteSessionId() {
return logRemoteSessionId("INFO");
}
/**
* Logs and returns the session id of the remote browser instance.<br>
* <br>
* See `Introduction` for details about the <b>logLevel</b>.<br>
*
* @param logLevel
* Default=INFO. Optional log level.
* @return The remote session id.
*
* @see BrowserManagement#getRemoteSessionId
*/
@RobotKeyword
@ArgumentNames({ "logLevel=INFO" })
public String logRemoteSessionId(String logLevel) {
String actual = browserManagement.getRemoteSessionId();
log(actual, logLevel);
return actual;
}
/**
* Set the <b>logDirectory</b>, where captured screenshots are stored, to some
* custom path.<br>
* <br>
* Fails, if either the given path does not exist, is no directory or is not
* writable.<br>
*
* @param logDirectory
* The directory to log to.
* @throws Exception
* - if anything goes wrong.
*/
@RobotKeyword
@ArgumentNames({ "logDirectory" })
public void setLogDirectory(String logDirectory) throws Exception {
File file = new File(logDirectory);
if (file.exists() && file.isDirectory() && file.canWrite()) {
Logging.setLogDir(file.getAbsolutePath());
} else {
throw new Exception(
"Location given as parameter: " + logDirectory + " must exist and must be a writeable directory!");
}
}
// ##############################
// Internal Methods
// ##############################
protected void trace(String msg) {
log(msg, "trace");
}
protected void debug(String msg) {
log(msg, "debug");
}
protected void info(String msg) {
log(msg, "info");
}
protected void html(String msg) {
log(msg, "html");
}
protected void warn(String msg) {
log(msg, "warn");
}
protected void error(String msg) {
log(msg, "error");
}
protected void log(String msg, String logLevel) {
String[] methodParameters = VALID_LOG_LEVELS.get(logLevel.toLowerCase());
if (methodParameters != null) {
log0(msg, methodParameters[0], methodParameters[1]);
} else {
throw new SeleniumLibraryNonFatalException(String.format("Given log level %s is invalid.", logLevel));
}
}
protected void log0(String msg, String methodName, String methodArguments) {
msg = String.valueOf(msg);
if (msg.length() > 1024) {
// Message is too large.
// There is a hard limit of 100k in the Jython source code parser
try {
// Write message to temp file
File tempFile = File.createTempFile("SeleniumLibrary-", ".log");
tempFile.deleteOnExit();
FileWriter writer = new FileWriter(tempFile);
writer.write(msg);
writer.close();
// Read the message in Python back and log it.
loggingPythonInterpreter.get()
.exec(String.format(
"from __future__ import with_statement\n" + "\n" + "with open('%s', 'r') as msg_file:\n"
+ " msg = msg_file.read()\n" + " logger.%s(msg%s)",
tempFile.getAbsolutePath().replace("\\", "\\\\"), methodName, methodArguments));
} catch (IOException e) {
throw new SeleniumLibraryNonFatalException("Error in handling temp file for long log message.", e);
}
} else {
// Message is small enough to get parsed by Jython
loggingPythonInterpreter.get().exec(String.format("logger.%s('%s'%s)", methodName,
msg.replace("\\", "\\\\").replace("'", "\\'").replace("\n", "\\n"), methodArguments));
}
}
protected File getLogDir() {
if (logDir == null
&& !loggingPythonInterpreter.get().eval("EXECUTION_CONTEXTS.current").toString().equals("None")) {
PyString logDirName = (PyString) loggingPythonInterpreter.get()
.eval("BuiltIn().get_variables()['${LOG FILE}']");
if (logDirName != null && !(logDirName.asString().toUpperCase().equals("NONE"))) {
return new File(logDirName.asString()).getParentFile();
}
logDirName = (PyString) loggingPythonInterpreter.get().eval("BuiltIn().get_variables()['${OUTPUTDIR}']");
return new File(logDirName.asString()).getParentFile();
} else {
return new File(logDir);
}
}
public static void setLogDir(String logDirectory) {
logDir = logDirectory;
}
protected static ThreadLocal<PythonInterpreter> loggingPythonInterpreter = new ThreadLocal<PythonInterpreter>() {
@Override
protected PythonInterpreter initialValue() {
PythonInterpreter pythonInterpreter = new PythonInterpreter();
pythonInterpreter.exec(
"from robot.libraries.BuiltIn import BuiltIn; from robot.running.context import EXECUTION_CONTEXTS; from robot.api import logger;");
return pythonInterpreter;
}
};
}
|
Fixed keyword refactoring changes
|
src/main/java/com/github/markusbernhardt/seleniumlibrary/keywords/Logging.java
|
Fixed keyword refactoring changes
|
|
Java
|
apache-2.0
|
43e1e3e366b979ed3cdae63f180edf3ce77b7a4e
| 0
|
mdamt/pdfbox,mathieufortin01/pdfbox,benmccann/pdfbox,joansmith/pdfbox,mdamt/pdfbox,ChunghwaTelecom/pdfbox,torakiki/sambox,benmccann/pdfbox,BezrukovM/veraPDF-pdfbox,veraPDF/veraPDF-pdfbox,veraPDF/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,ZhenyaM/veraPDF-pdfbox,ZhenyaM/veraPDF-pdfbox,mathieufortin01/pdfbox,gavanx/pdflearn,gavanx/pdflearn,torakiki/sambox,ChunghwaTelecom/pdfbox,joansmith/pdfbox
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.util.operator.pagedrawer;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.List;
import org.apache.pdfbox.pdfviewer.PageDrawer;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.graphics.xobject.PDInlinedImage;
import org.apache.pdfbox.util.ImageParameters;
import org.apache.pdfbox.util.Matrix;
import org.apache.pdfbox.util.PDFOperator;
import org.apache.pdfbox.util.operator.OperatorProcessor;
/**
* Implementation of content stream operator for page drawer.
*
* @author <a href="mailto:ben@benlitchfield.com">Ben Litchfield</a>
* @version $Revision: 1.2 $
*/
public class BeginInlineImage extends OperatorProcessor
{
/**
* process : BI : begin inline image.
* @param operator The operator that is being executed.
* @param arguments List
* @throws IOException If there is an error displaying the inline image.
*/
public void process(PDFOperator operator, List arguments) throws IOException
{
PageDrawer drawer = (PageDrawer)context;
PDPage page = drawer.getPage();
Dimension pageSize = drawer.getPageSize();
Graphics2D graphics = drawer.getGraphics();
//begin inline image object
ImageParameters params = operator.getImageParameters();
PDInlinedImage image = new PDInlinedImage();
image.setImageParameters( params );
image.setImageData( operator.getImageData() );
BufferedImage awtImage = image.createImage( context.getColorSpaces() );
if (awtImage == null)
{
logger().warn("BeginInlineImage.process(): createImage returned NULL");
return;
}
int imageWidth = awtImage.getWidth();
int imageHeight = awtImage.getHeight();
double pageHeight = pageSize.getHeight();
Matrix ctm = drawer.getGraphicsState().getCurrentTransformationMatrix();
int pageRotation = page.findRotation();
AffineTransform ctmAT = ctm.createAffineTransform();
ctmAT.scale(1f/imageWidth, 1f/imageHeight);
Matrix rotationMatrix = new Matrix();
rotationMatrix.setFromAffineTransform( ctmAT );
if (pageRotation == 0 || pageRotation == 180)
{
rotationMatrix.setValue(2,1,(float)pageHeight-ctm.getYPosition()-ctm.getYScale());
}
else if (pageRotation == 90 || pageRotation == 270)
{
rotationMatrix.setValue(2,0,(float)ctm.getXPosition()-ctm.getYScale());
rotationMatrix.setValue(2,1,(float)pageHeight-ctm.getYPosition());
}
rotationMatrix.setValue(0, 1, (-1)*rotationMatrix.getValue(0, 1));
rotationMatrix.setValue(1, 0, (-1)*rotationMatrix.getValue(1, 0));
AffineTransform at = new AffineTransform(
rotationMatrix.getValue(0,0),rotationMatrix.getValue(0,1),
rotationMatrix.getValue(1,0), rotationMatrix.getValue( 1, 1),
rotationMatrix.getValue(2,0),rotationMatrix.getValue(2,1)
);
graphics.setClip(context.getGraphicsState().getCurrentClippingPath());
graphics.drawImage( awtImage, at, null );
}
}
|
src/main/java/org/apache/pdfbox/util/operator/pagedrawer/BeginInlineImage.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.util.operator.pagedrawer;
import java.awt.Graphics2D;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.List;
import org.apache.pdfbox.pdfviewer.PageDrawer;
import org.apache.pdfbox.pdmodel.graphics.xobject.PDInlinedImage;
import org.apache.pdfbox.util.ImageParameters;
import org.apache.pdfbox.util.Matrix;
import org.apache.pdfbox.util.PDFOperator;
import org.apache.pdfbox.util.operator.OperatorProcessor;
/**
* Implementation of content stream operator for page drawer.
*
* @author <a href="mailto:ben@benlitchfield.com">Ben Litchfield</a>
* @version $Revision: 1.2 $
*/
public class BeginInlineImage extends OperatorProcessor
{
/**
* process : BI : begin inline image.
* @param operator The operator that is being executed.
* @param arguments List
* @throws IOException If there is an error displaying the inline image.
*/
public void process(PDFOperator operator, List arguments) throws IOException
{
PageDrawer drawer = (PageDrawer)context;
Graphics2D graphics = drawer.getGraphics();
//begin inline image object
ImageParameters params = operator.getImageParameters();
PDInlinedImage image = new PDInlinedImage();
image.setImageParameters( params );
image.setImageData( operator.getImageData() );
BufferedImage awtImage = image.createImage( context.getColorSpaces() );
Matrix ctm = drawer.getGraphicsState().getCurrentTransformationMatrix();
int width = awtImage.getWidth();
int height = awtImage.getHeight();
AffineTransform at = new AffineTransform(
ctm.getValue(0,0)/width,
ctm.getValue(0,1),
ctm.getValue(1,0),
ctm.getValue(1,1)/height,
ctm.getValue(2,0),
ctm.getValue(2,1)
);
graphics.setClip(context.getGraphicsState().getCurrentClippingPath());
graphics.drawImage( awtImage, at, null );
}
}
|
PDFBOX529: adding page rotation handling to inline image operator. Patch from Andrew Duffy (aduffy at simiolus dot com)
git-svn-id: 73746072a361c8027bb2177f17128911b05b2bab@814296 13f79535-47bb-0310-9956-ffa450edef68
|
src/main/java/org/apache/pdfbox/util/operator/pagedrawer/BeginInlineImage.java
|
PDFBOX529: adding page rotation handling to inline image operator. Patch from Andrew Duffy (aduffy at simiolus dot com)
|
|
Java
|
apache-2.0
|
9ba7760ff6d34651c8f62ab94853dba4a272eea7
| 0
|
jeffbrown/grailsnolib,jeffbrown/grailsnolib
|
/*
* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.servlet.mvc;
import groovy.lang.Closure;
import groovy.lang.GroovyObject;
import groovy.lang.ParameterArray;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.WordUtils;
import org.codehaus.groovy.grails.commons.GrailsApplication;
import org.codehaus.groovy.grails.commons.GrailsControllerClass;
import org.codehaus.groovy.grails.web.servlet.GrailsHttpServletRequest;
import org.codehaus.groovy.grails.web.servlet.GrailsHttpServletResponse;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.IncompatibleParameterCountException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoClosurePropertyForURIException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoViewNameDefinedException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.UnknownControllerException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.UnsupportedReturnValueException;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.Controller;
import org.springframework.web.util.UrlPathHelper;
/**
* <p>Base class for Grails controllers.
*
* @author Steven Devijver
* @since Jul 2, 2005
*/
public class SimpleGrailsController implements Controller, ApplicationContextAware {
private static final String SLASH = "/";
private UrlPathHelper urlPathHelper = new UrlPathHelper();
private GrailsApplication application = null;
private ApplicationContext applicationContext = null;
public SimpleGrailsController() {
super();
}
public void setApplicationContext(ApplicationContext applicationContext)
throws BeansException {
this.applicationContext = applicationContext;
}
public void setGrailsApplication(GrailsApplication application) {
this.application = application;
}
/**
* <p>This method wraps regular request and response objects into Grails request and response objects.
*
* <p>It can handle maps as model types next to ModelAndView instances.
*
* @param request HTTP request
* @param response HTTP response
* @return the model
*/
public ModelAndView handleRequest(HttpServletRequest request,
HttpServletResponse response) throws Exception {
// Step 1: determine the correct URI of the request.
String uri = this.urlPathHelper.getLookupPathForRequest(request);
if (uri.indexOf("?") > -1) {
uri = uri.substring(0, uri.indexOf("?"));
}
// Step 2: lookup the controller in the application.
GrailsControllerClass controllerClass = this.application.getControllerByURI(uri);
if (controllerClass == null) {
throw new UnknownControllerException("No controller found for URI [" + uri + "]!");
}
String controllerName = WordUtils.uncapitalize(controllerClass.getName());
// Step 3: load controller from application context.
GroovyObject controller = (GroovyObject)this.applicationContext.getBean(controllerClass.getFullName());
// Step 4: get closure property name for URI.
String closurePropertyName = controllerClass.getClosurePropertyName(uri);
if (closurePropertyName == null) {
throw new NoClosurePropertyForURIException("Could not find closure property for URI [" + uri + "] for controller [" + controllerClass.getFullName() + "]!");
}
// Step 5: get the view name for this URI.
String viewName = controllerClass.getViewName(uri);
boolean viewNameBlank = (viewName == null || viewName.length() == 0);
// Step 6: get closure from closure property
Closure closure = (Closure)controller.getProperty(closurePropertyName);
// Step 7: determine argument count and execute.
Object returnValue = null;
if (closure.getParameterTypes().length == 1) {
// closure may have zero or one parameter, we cannot be sure.
returnValue = closure.call(new ParameterArray(new Object[] { new GrailsHttpServletRequest(request) }));
} else if (closure.getParameterTypes().length == 2) {
returnValue = closure.call(new ParameterArray(new Object[] { new GrailsHttpServletRequest(request), new GrailsHttpServletResponse(response) }));
} else {
throw new IncompatibleParameterCountException("Closure on property [" + closurePropertyName + "] in [" + controllerClass.getFullName() + "] has an incompatible parameter count [" + closure.getParameterTypes().length + "]! Supported values are 0 and 2.");
}
// Step 8: determine return value type and handle accordingly
if (returnValue == null) {
if (viewNameBlank) {
return null;
} else {
return new ModelAndView(viewName);
}
} else if (returnValue instanceof Map) {
if (viewNameBlank) {
throw new NoViewNameDefinedException("Map instance returned by and no view name specified for closure on property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
} else {
return new ModelAndView(viewName, (Map)returnValue);
}
} else if (returnValue instanceof ModelAndView) {
ModelAndView modelAndView = (ModelAndView)returnValue;
if (modelAndView.getView() == null && modelAndView.getViewName() == null) {
if (viewNameBlank) {
throw new NoViewNameDefinedException("ModelAndView instance returned by and no view name defined by nor for closure on property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
} else {
modelAndView.setViewName(viewName);
}
}
return modelAndView;
}
throw new UnsupportedReturnValueException("Return value [" + returnValue + "] is not supported for closure property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
}
}
|
src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsController.java
|
/*
* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.servlet.mvc;
import groovy.lang.Closure;
import groovy.lang.GroovyObject;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.WordUtils;
import org.codehaus.groovy.grails.commons.GrailsApplication;
import org.codehaus.groovy.grails.commons.GrailsControllerClass;
import org.codehaus.groovy.grails.web.servlet.GrailsHttpServletRequest;
import org.codehaus.groovy.grails.web.servlet.GrailsHttpServletResponse;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.IncompatibleParameterCountException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoClosurePropertyForURIException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoViewNameDefinedException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.UnknownControllerException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.UnsupportedReturnValueException;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.Controller;
import org.springframework.web.util.UrlPathHelper;
/**
* <p>Base class for Grails controllers.
*
* @author Steven Devijver
* @since Jul 2, 2005
*/
public class SimpleGrailsController implements Controller, ApplicationContextAware {
private static final String SLASH = "/";
private UrlPathHelper urlPathHelper = new UrlPathHelper();
private GrailsApplication application = null;
private ApplicationContext applicationContext = null;
public SimpleGrailsController() {
super();
}
public void setApplicationContext(ApplicationContext applicationContext)
throws BeansException {
this.applicationContext = applicationContext;
}
public void setGrailsApplication(GrailsApplication application) {
this.application = application;
}
/**
* <p>This method wraps regular request and response objects into Grails request and response objects.
*
* <p>It can handle maps as model types next to ModelAndView instances.
*
* @param request HTTP request
* @param response HTTP response
* @return the model
*/
public ModelAndView handleRequest(HttpServletRequest request,
HttpServletResponse response) throws Exception {
// Step 1: determine the correct URI of the request.
String uri = this.urlPathHelper.getLookupPathForRequest(request);
if (uri.indexOf("?") > -1) {
uri = uri.substring(0, uri.indexOf("?"));
}
// Step 2: lookup the controller in the application.
GrailsControllerClass controllerClass = this.application.getControllerByURI(uri);
if (controllerClass == null) {
throw new UnknownControllerException("No controller found for URI [" + uri + "]!");
}
String controllerName = WordUtils.uncapitalize(controllerClass.getName());
// Step 3: load controller from application context.
GroovyObject controller = (GroovyObject)this.applicationContext.getBean(controllerClass.getFullName());
// Step 4: get closure property name for URI.
String closurePropertyName = controllerClass.getClosurePropertyName(uri);
if (closurePropertyName == null) {
throw new NoClosurePropertyForURIException("Could not find closure property for URI [" + uri + "] for controller [" + controllerClass.getFullName() + "]!");
}
// Step 5: get the view name for this URI.
String viewName = controllerClass.getViewName(uri);
boolean viewNameBlank = (viewName == null || viewName.length() == 0);
// Step 6: get closure from closure property
Closure closure = (Closure)controller.getProperty(closurePropertyName);
// Step 7: determine argument count and execute.
Object returnValue = null;
if (closure.getParameterTypes().length == 1) {
// closure may have zero or one parameter, we cannot be sure.
returnValue = closure.call(new Object[] { new GrailsHttpServletRequest(request) });
} else if (closure.getParameterTypes().length == 2) {
returnValue = closure.call(new Object[] { new GrailsHttpServletRequest(request), new GrailsHttpServletResponse(response) });
} else {
throw new IncompatibleParameterCountException("Closure on property [" + closurePropertyName + "] in [" + controllerClass.getFullName() + "] has an incompatible parameter count [" + closure.getParameterTypes().length + "]! Supported values are 0 and 2.");
}
// Step 8: determine return value type and handle accordingly
if (returnValue == null) {
if (viewNameBlank) {
return null;
} else {
return new ModelAndView(viewName);
}
} else if (returnValue instanceof Map) {
if (viewNameBlank) {
throw new NoViewNameDefinedException("Map instance returned by and no view name specified for closure on property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
} else {
return new ModelAndView(viewName, (Map)returnValue);
}
} else if (returnValue instanceof ModelAndView) {
ModelAndView modelAndView = (ModelAndView)returnValue;
if (modelAndView.getView() == null && modelAndView.getViewName() == null) {
if (viewNameBlank) {
throw new NoViewNameDefinedException("ModelAndView instance returned by and no view name defined by nor for closure on property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
} else {
modelAndView.setViewName(viewName);
}
}
return modelAndView;
}
throw new UnsupportedReturnValueException("Return value [" + returnValue + "] is not supported for closure property [" + closurePropertyName + "] in controller [" + controllerClass.getFullName() + "]!");
}
}
|
Pass request and response parameters in ParameterArray object to closure.
git-svn-id: 29aad96320b2a07b98332cd568fc1316025c072f@119 1cfb16fd-6d17-0410-8ff1-b7e8e1e2867d
|
src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsController.java
|
Pass request and response parameters in ParameterArray object to closure.
|
|
Java
|
apache-2.0
|
0335713738af0d9522b0c6645c404b0301821166
| 0
|
GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.hypervisor.vmware.mo;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.vmware.vim25.VStorageObject;
import com.vmware.vim25.VStorageObjectConfigInfo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import com.google.gson.Gson;
import com.vmware.vim25.ArrayOfManagedObjectReference;
import com.vmware.vim25.ChoiceOption;
import com.vmware.vim25.CustomFieldStringValue;
import com.vmware.vim25.DistributedVirtualSwitchPortConnection;
import com.vmware.vim25.DynamicProperty;
import com.vmware.vim25.ElementDescription;
import com.vmware.vim25.GuestInfo;
import com.vmware.vim25.GuestOsDescriptor;
import com.vmware.vim25.HttpNfcLeaseDeviceUrl;
import com.vmware.vim25.HttpNfcLeaseInfo;
import com.vmware.vim25.HttpNfcLeaseState;
import com.vmware.vim25.ManagedObjectReference;
import com.vmware.vim25.ObjectContent;
import com.vmware.vim25.ObjectSpec;
import com.vmware.vim25.OptionValue;
import com.vmware.vim25.OvfCreateDescriptorParams;
import com.vmware.vim25.OvfCreateDescriptorResult;
import com.vmware.vim25.OvfFile;
import com.vmware.vim25.ParaVirtualSCSIController;
import com.vmware.vim25.PropertyFilterSpec;
import com.vmware.vim25.PropertySpec;
import com.vmware.vim25.TraversalSpec;
import com.vmware.vim25.VirtualBusLogicController;
import com.vmware.vim25.VirtualCdrom;
import com.vmware.vim25.VirtualCdromIsoBackingInfo;
import com.vmware.vim25.VirtualCdromRemotePassthroughBackingInfo;
import com.vmware.vim25.VirtualController;
import com.vmware.vim25.VirtualDevice;
import com.vmware.vim25.VirtualDeviceBackingInfo;
import com.vmware.vim25.VirtualDeviceConfigSpec;
import com.vmware.vim25.VirtualDeviceConfigSpecFileOperation;
import com.vmware.vim25.VirtualDeviceConfigSpecOperation;
import com.vmware.vim25.VirtualDeviceConnectInfo;
import com.vmware.vim25.VirtualDisk;
import com.vmware.vim25.VirtualDiskFlatVer1BackingInfo;
import com.vmware.vim25.VirtualDiskFlatVer2BackingInfo;
import com.vmware.vim25.VirtualDiskMode;
import com.vmware.vim25.VirtualDiskRawDiskMappingVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer2BackingInfo;
import com.vmware.vim25.VirtualDiskType;
import com.vmware.vim25.VirtualEthernetCard;
import com.vmware.vim25.VirtualEthernetCardDistributedVirtualPortBackingInfo;
import com.vmware.vim25.VirtualHardwareOption;
import com.vmware.vim25.VirtualIDEController;
import com.vmware.vim25.VirtualLsiLogicController;
import com.vmware.vim25.VirtualLsiLogicSASController;
import com.vmware.vim25.VirtualMachineCloneSpec;
import com.vmware.vim25.VirtualMachineConfigInfo;
import com.vmware.vim25.VirtualMachineConfigOption;
import com.vmware.vim25.VirtualMachineConfigSpec;
import com.vmware.vim25.VirtualMachineConfigSummary;
import com.vmware.vim25.VirtualMachineFileInfo;
import com.vmware.vim25.VirtualMachineFileLayoutEx;
import com.vmware.vim25.VirtualMachineMessage;
import com.vmware.vim25.VirtualMachineMovePriority;
import com.vmware.vim25.VirtualMachinePowerState;
import com.vmware.vim25.VirtualMachineQuestionInfo;
import com.vmware.vim25.VirtualMachineRelocateDiskMoveOptions;
import com.vmware.vim25.VirtualMachineRelocateSpec;
import com.vmware.vim25.VirtualMachineRelocateSpecDiskLocator;
import com.vmware.vim25.VirtualMachineRuntimeInfo;
import com.vmware.vim25.VirtualMachineSnapshotInfo;
import com.vmware.vim25.VirtualMachineSnapshotTree;
import com.vmware.vim25.VirtualSCSIController;
import com.vmware.vim25.VirtualSCSISharing;
import com.cloud.hypervisor.vmware.mo.SnapshotDescriptor.SnapshotInfo;
import com.cloud.hypervisor.vmware.util.VmwareContext;
import com.cloud.hypervisor.vmware.util.VmwareHelper;
import com.cloud.utils.ActionDelegate;
import com.cloud.utils.Pair;
import com.cloud.utils.Ternary;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.script.Script;
import static com.cloud.utils.NumbersUtil.toHumanReadableSize;
public class VirtualMachineMO extends BaseMO {
private static final Logger s_logger = Logger.getLogger(VirtualMachineMO.class);
private static final ExecutorService MonitorServiceExecutor = Executors.newCachedThreadPool(new NamedThreadFactory("VM-Question-Monitor"));
public static final String ANSWER_YES = "0";
public static final String ANSWER_NO = "1";
private ManagedObjectReference _vmEnvironmentBrowser = null;
public VirtualMachineMO(VmwareContext context, ManagedObjectReference morVm) {
super(context, morVm);
}
public VirtualMachineMO(VmwareContext context, String morType, String morValue) {
super(context, morType, morValue);
}
public Pair<DatacenterMO, String> getOwnerDatacenter() throws Exception {
return DatacenterMO.getOwnerDatacenter(getContext(), getMor());
}
public Pair<DatastoreMO, String> getOwnerDatastore(String dsFullPath) throws Exception {
String dsName = DatastoreFile.getDatastoreNameFromPath(dsFullPath);
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datastore");
pSpec.getPathSet().add("name");
TraversalSpec vmDatastoreTraversal = new TraversalSpec();
vmDatastoreTraversal.setType("VirtualMachine");
vmDatastoreTraversal.setPath("datastore");
vmDatastoreTraversal.setName("vmDatastoreTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vmDatastoreTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null) {
for (ObjectContent oc : ocs) {
DynamicProperty prop = oc.getPropSet().get(0);
if (prop.getVal().toString().equals(dsName)) {
return new Pair<DatastoreMO, String>(new DatastoreMO(_context, oc.getObj()), dsName);
}
}
}
return null;
}
public HostMO getRunningHost() throws Exception {
VirtualMachineRuntimeInfo runtimeInfo = getRuntimeInfo();
return new HostMO(_context, runtimeInfo.getHost());
}
public String getVmName() throws Exception {
return (String)getContext().getVimClient().getDynamicProperty(_mor, "name");
}
public GuestInfo getVmGuestInfo() throws Exception {
return (GuestInfo)getContext().getVimClient().getDynamicProperty(_mor, "guest");
}
public void answerVM(String questionId, String choice) throws Exception {
getContext().getService().answerVM(_mor, questionId, choice);
}
public boolean isVMwareToolsRunning() throws Exception {
GuestInfo guestInfo = getVmGuestInfo();
if (guestInfo != null) {
if ("guestToolsRunning".equalsIgnoreCase(guestInfo.getToolsRunningStatus()))
return true;
}
return false;
}
public boolean powerOn() throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_ON)
return true;
ManagedObjectReference morTask = _context.getService().powerOnVMTask(_mor, null);
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
s_logger.info("Question id: " + question.getId());
s_logger.info("Question text: " + question.getText());
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isInfoEnabled()) {
s_logger.info("msg id: " + msg.getId());
s_logger.info("msg text: " + msg.getText());
}
if ("msg.uuid.altered".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId()
+ ", we will automatically answer as 'moved it' to address out of band HA for the VM");
vmMo.answerVM(question.getId(), "1");
break;
}
}
}
if (s_logger.isTraceEnabled())
s_logger.trace("These are the choices we can have just in case");
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while dealing with vm questions.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware powerOnVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
} finally {
// make sure to let VM question monitor exit
flags[0] = true;
}
return false;
}
public boolean powerOff() throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF)
return true;
return powerOffNoCheck();
}
public boolean safePowerOff(int shutdownWaitMs) throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF)
return true;
if (isVMwareToolsRunning()) {
try {
String vmName = getName();
s_logger.info("Try gracefully shut down VM " + vmName);
shutdown();
long startTick = System.currentTimeMillis();
while (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF && System.currentTimeMillis() - startTick < shutdownWaitMs) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while powering of vm.");
}
}
if (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF) {
s_logger.info("can not gracefully shutdown VM within " + (shutdownWaitMs / 1000) + " seconds, we will perform force power off on VM " + vmName);
return powerOffNoCheck();
}
return true;
} catch (Exception e) {
s_logger.warn("Failed to do guest-os graceful shutdown due to " + VmwareHelper.getExceptionMessage(e));
}
}
return powerOffNoCheck();
}
private boolean powerOffNoCheck() throws Exception {
ManagedObjectReference morTask = _context.getService().powerOffVMTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
// It seems that even if a power-off task is returned done, VM state may still not be marked,
// wait up to 5 seconds to make sure to avoid race conditioning for immediate following on operations
// that relies on a powered-off VM
long startTick = System.currentTimeMillis();
while (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF && System.currentTimeMillis() - startTick < 5000) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while powering of vm unconditionaly.");
}
}
return true;
} else {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF) {
// to help deal with possible race-condition
s_logger.info("Current power-off task failed. However, VM has been switched to the state we are expecting for");
return true;
}
s_logger.error("VMware powerOffVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachinePowerState getResetSafePowerState() throws Exception {
VirtualMachinePowerState powerState = VirtualMachinePowerState.POWERED_OFF;
// This is really ugly, there is a case that when windows guest VM is doing sysprep, the temporary
// rebooting process may let us pick up a "poweredOff" state during VMsync process, this can trigger
// a series actions. Unfortunately, from VMware API we can not distinguish power state into such details.
// We hope by giving it 3 second to re-read the state can cover this as a short-term solution.
//
// In the future, VMsync should not kick off CloudStack action (this is not a HA case) based on VM
// state report, until then we can remove this hacking fix
for (int i = 0; i < 3; i++) {
powerState = (VirtualMachinePowerState)getContext().getVimClient().getDynamicProperty(_mor, "runtime.powerState");
if (powerState == VirtualMachinePowerState.POWERED_OFF) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while pausing after power off.");
}
} else {
break;
}
}
return powerState;
}
public VirtualMachinePowerState getPowerState() throws Exception {
return (VirtualMachinePowerState)getContext().getVimClient().getDynamicProperty(_mor, "runtime.powerState");
}
public boolean reset() throws Exception {
ManagedObjectReference morTask = _context.getService().resetVMTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware resetVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public void shutdown() throws Exception {
_context.getService().shutdownGuest(_mor);
}
public void rebootGuest() throws Exception {
_context.getService().rebootGuest(_mor);
}
public void markAsTemplate() throws Exception {
_context.getService().markAsTemplate(_mor);
}
public void markAsVirtualMachine(ManagedObjectReference resourcePool, ManagedObjectReference host) throws Exception {
_context.getService().markAsVirtualMachine(_mor, resourcePool, host);
}
public boolean isTemplate() throws Exception {
VirtualMachineConfigInfo configInfo = getConfigInfo();
return configInfo.isTemplate();
}
public boolean migrate(ManagedObjectReference morRp, ManagedObjectReference morTargetHost) throws Exception {
ManagedObjectReference morTask = _context.getService().migrateVMTask(_mor, morRp, morTargetHost, VirtualMachineMovePriority.DEFAULT_PRIORITY, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware migrateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeDatastore(VirtualMachineRelocateSpec relocateSpec) throws Exception {
ManagedObjectReference morTask = _context.getVimClient().getService().relocateVMTask(_mor, relocateSpec, VirtualMachineMovePriority.DEFAULT_PRIORITY);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware RelocateVM_Task to change datastore failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeHost(VirtualMachineRelocateSpec relocateSpec) throws Exception {
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, VirtualMachineMovePriority.DEFAULT_PRIORITY);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware RelocateVM_Task to change host failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeDatastore(ManagedObjectReference morDataStore) throws Exception {
VirtualMachineRelocateSpec relocateSpec = new VirtualMachineRelocateSpec();
relocateSpec.setDatastore(morDataStore);
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware change datastore relocateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean relocate(ManagedObjectReference morTargetHost) throws Exception {
VirtualMachineRelocateSpec relocateSpec = new VirtualMachineRelocateSpec();
relocateSpec.setHost(morTargetHost);
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware relocateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachineSnapshotInfo getSnapshotInfo() throws Exception {
return (VirtualMachineSnapshotInfo)_context.getVimClient().getDynamicProperty(_mor, "snapshot");
}
public boolean createSnapshot(String snapshotName, String snapshotDescription, boolean dumpMemory, boolean quiesce) throws Exception {
return createSnapshotGetReference(snapshotName, snapshotDescription, dumpMemory, quiesce) != null;
}
public ManagedObjectReference createSnapshotGetReference(String snapshotName, String snapshotDescription, boolean dumpMemory, boolean quiesce) throws Exception {
long apiTimeout = _context.getVimClient().getVcenterSessionTimeout();
ManagedObjectReference morTask = _context.getService().createSnapshotTask(_mor, snapshotName, snapshotDescription, dumpMemory, quiesce);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
ManagedObjectReference morSnapshot = null;
// We still need to wait until the object appear in vCenter
long startTick = System.currentTimeMillis();
while (System.currentTimeMillis() - startTick < apiTimeout) {
morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot != null) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while waiting for snapshot to be done.");
}
}
if (morSnapshot == null) {
s_logger.error("We've been waiting for over " + apiTimeout + " milli seconds for snapshot MOR to be appearing in vCenter after CreateSnapshot task is done, " +
"but it is still not there?!");
return null;
}
s_logger.debug("Waited for " + (System.currentTimeMillis() - startTick) + " seconds for snapshot object [" + snapshotName + "] to appear in vCenter.");
return morSnapshot;
} else {
s_logger.error("VMware createSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return null;
}
public boolean removeSnapshot(String snapshotName, boolean removeChildren) throws Exception {
ManagedObjectReference morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot == null) {
s_logger.warn("Unable to find snapshot: " + snapshotName);
return false;
}
ManagedObjectReference morTask = _context.getService().removeSnapshotTask(morSnapshot, removeChildren, true);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware removeSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean revertToSnapshot(String snapshotName) throws Exception {
ManagedObjectReference morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot == null) {
s_logger.warn("Unable to find snapshot: " + snapshotName);
return false;
}
ManagedObjectReference morTask = _context.getService().revertToSnapshotTask(morSnapshot, _mor, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware revert to snapshot failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
/**
* Deletes all of the snapshots of a VM.
*/
public void consolidateAllSnapshots() throws Exception {
ManagedObjectReference task = _context.getService().removeAllSnapshotsTask(_mor, true);
boolean result = _context.getVimClient().waitForTask(task);
if (result) {
_context.waitForTaskProgressDone(task);
} else {
throw new Exception("Unable to register VM due to the following issue: " + TaskMO.getTaskFailureInfo(_context, task));
}
}
public boolean removeAllSnapshots() throws Exception {
VirtualMachineSnapshotInfo snapshotInfo = getSnapshotInfo();
if (snapshotInfo != null && snapshotInfo.getRootSnapshotList() != null) {
List<VirtualMachineSnapshotTree> tree = snapshotInfo.getRootSnapshotList();
for (VirtualMachineSnapshotTree treeNode : tree) {
ManagedObjectReference morTask = _context.getService().removeSnapshotTask(treeNode.getSnapshot(), true, true);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
} else {
s_logger.error("VMware removeSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
return false;
}
}
}
return true;
}
public String
getSnapshotDiskFileDatastorePath(VirtualMachineFileInfo vmFileInfo, List<Pair<ManagedObjectReference, String>> datastoreMounts, String snapshotDiskFile)
throws Exception {
// if file path start with "/", need to search all datastore mounts on the host in order
// to form fully qualified datastore path
if (snapshotDiskFile.startsWith("/")) {
for (Pair<ManagedObjectReference, String> mount : datastoreMounts) {
if (snapshotDiskFile.startsWith(mount.second())) {
DatastoreMO dsMo = new DatastoreMO(_context, mount.first());
String dsFullPath = String.format("[%s] %s", dsMo.getName(), snapshotDiskFile.substring(mount.second().length() + 1));
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + dsFullPath);
return dsFullPath;
}
}
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + snapshotDiskFile);
return snapshotDiskFile;
} else {
// snapshot directory string from VirtualMachineFileInfo ends with /
String dsFullPath = vmFileInfo.getSnapshotDirectory() + snapshotDiskFile;
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + dsFullPath);
return dsFullPath;
}
}
public SnapshotDescriptor getSnapshotDescriptor() throws Exception {
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String dsPath = getSnapshotDescriptorDatastorePath();
assert (dsPath != null);
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), dsPath);
byte[] content = getContext().getResourceContent(url);
if (content == null || content.length < 1) {
s_logger.warn("Snapshot descriptor file (vsd) does not exist anymore?");
}
SnapshotDescriptor descriptor = new SnapshotDescriptor();
descriptor.parse(content);
return descriptor;
}
public String getSnapshotDescriptorDatastorePath() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.getPathSet().add("name");
pSpec.getPathSet().add("config.files");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.FALSE);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
assert (ocs != null);
String vmName = null;
VirtualMachineFileInfo fileInfo = null;
assert (ocs.size() == 1);
for (ObjectContent oc : ocs) {
List<DynamicProperty> props = oc.getPropSet();
if (props != null) {
assert (props.size() == 2);
for (DynamicProperty prop : props) {
if (prop.getName().equals("name")) {
vmName = prop.getVal().toString();
} else {
fileInfo = (VirtualMachineFileInfo)prop.getVal();
}
}
}
}
assert (vmName != null);
assert (fileInfo != null);
// .vmsd file exists at the same directory of .vmx file
DatastoreFile vmxFile = new DatastoreFile(fileInfo.getVmPathName());
return vmxFile.getCompanionPath(vmName + ".vmsd");
}
public ManagedObjectReference getSnapshotMor(String snapshotName) throws Exception {
VirtualMachineSnapshotInfo info = getSnapshotInfo();
if (info != null) {
List<VirtualMachineSnapshotTree> snapTree = info.getRootSnapshotList();
return VmwareHelper.findSnapshotInTree(snapTree, snapshotName);
}
return null;
}
public boolean hasSnapshot() throws Exception {
VirtualMachineSnapshotInfo info = getSnapshotInfo();
if (info != null) {
ManagedObjectReference currentSnapshot = info.getCurrentSnapshot();
if (currentSnapshot != null) {
return true;
}
List<VirtualMachineSnapshotTree> rootSnapshotList = info.getRootSnapshotList();
if (rootSnapshotList != null && rootSnapshotList.size() > 0) {
return true;
}
}
return false;
}
public boolean createFullClone(String cloneName, ManagedObjectReference morFolder, ManagedObjectReference morResourcePool, ManagedObjectReference morDs)
throws Exception {
VirtualMachineCloneSpec cloneSpec = new VirtualMachineCloneSpec();
VirtualMachineRelocateSpec relocSpec = new VirtualMachineRelocateSpec();
cloneSpec.setLocation(relocSpec);
cloneSpec.setPowerOn(false);
cloneSpec.setTemplate(false);
relocSpec.setDatastore(morDs);
relocSpec.setPool(morResourcePool);
ManagedObjectReference morTask = _context.getService().cloneVMTask(_mor, morFolder, cloneName, cloneSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware cloneVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean createLinkedClone(String cloneName, ManagedObjectReference morBaseSnapshot, ManagedObjectReference morFolder, ManagedObjectReference morResourcePool,
ManagedObjectReference morDs) throws Exception {
assert (morBaseSnapshot != null);
assert (morFolder != null);
assert (morResourcePool != null);
assert (morDs != null);
VirtualDisk[] independentDisks = getAllIndependentDiskDevice();
VirtualMachineRelocateSpec rSpec = new VirtualMachineRelocateSpec();
if (independentDisks.length > 0) {
List<VirtualMachineRelocateSpecDiskLocator> diskLocator = new ArrayList<VirtualMachineRelocateSpecDiskLocator>(independentDisks.length);
for (int i = 0; i < independentDisks.length; i++) {
VirtualMachineRelocateSpecDiskLocator loc = new VirtualMachineRelocateSpecDiskLocator();
loc.setDatastore(morDs);
loc.setDiskId(independentDisks[i].getKey());
loc.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.MOVE_ALL_DISK_BACKINGS_AND_DISALLOW_SHARING.value());
diskLocator.add(loc);
}
rSpec.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.CREATE_NEW_CHILD_DISK_BACKING.value());
rSpec.getDisk().addAll(diskLocator);
} else {
rSpec.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.CREATE_NEW_CHILD_DISK_BACKING.value());
}
rSpec.setPool(morResourcePool);
VirtualMachineCloneSpec cloneSpec = new VirtualMachineCloneSpec();
cloneSpec.setPowerOn(false);
cloneSpec.setTemplate(false);
cloneSpec.setLocation(rSpec);
cloneSpec.setSnapshot(morBaseSnapshot);
ManagedObjectReference morTask = _context.getService().cloneVMTask(_mor, morFolder, cloneName, cloneSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware cloneVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachineRuntimeInfo getRuntimeInfo() throws Exception {
return (VirtualMachineRuntimeInfo)_context.getVimClient().getDynamicProperty(_mor, "runtime");
}
public VirtualMachineConfigInfo getConfigInfo() throws Exception {
return (VirtualMachineConfigInfo)_context.getVimClient().getDynamicProperty(_mor, "config");
}
public boolean isToolsInstallerMounted() throws Exception {
return _context.getVimClient().getDynamicProperty(_mor, "runtime.toolsInstallerMounted");
}
public GuestInfo getGuestInfo() throws Exception {
return (GuestInfo)_context.getVimClient().getDynamicProperty(_mor, "guest");
}
public VirtualMachineConfigSummary getConfigSummary() throws Exception {
return (VirtualMachineConfigSummary)_context.getVimClient().getDynamicProperty(_mor, "summary.config");
}
public VirtualMachineFileInfo getFileInfo() throws Exception {
return (VirtualMachineFileInfo)_context.getVimClient().getDynamicProperty(_mor, "config.files");
}
public VirtualMachineFileLayoutEx getFileLayout() throws Exception {
VirtualMachineFileLayoutEx fileLayout = null;
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.getPathSet().add("layoutEx");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.FALSE);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null) {
for (ObjectContent oc : ocs) {
List<DynamicProperty> props = oc.getPropSet();
if (props != null) {
for (DynamicProperty prop : props) {
if (prop.getName().equals("layoutEx")) {
fileLayout = (VirtualMachineFileLayoutEx)prop.getVal();
break;
}
}
}
}
}
return fileLayout;
}
@Override
public ManagedObjectReference getParentMor() throws Exception {
return (ManagedObjectReference)_context.getVimClient().getDynamicProperty(_mor, "parent");
}
public String[] getNetworks() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Network");
pSpec.getPathSet().add("name");
TraversalSpec vm2NetworkTraversal = new TraversalSpec();
vm2NetworkTraversal.setType("VirtualMachine");
vm2NetworkTraversal.setPath("network");
vm2NetworkTraversal.setName("vm2NetworkTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vm2NetworkTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
List<String> networks = new ArrayList<String>();
if (ocs != null && ocs.size() > 0) {
for (ObjectContent oc : ocs) {
networks.add(oc.getPropSet().get(0).getVal().toString());
}
}
return networks.toArray(new String[0]);
}
public List<NetworkDetails> getNetworksWithDetails() throws Exception {
List<NetworkDetails> networks = new ArrayList<NetworkDetails>();
int gcTagKey = getCustomFieldKey("Network", CustomFieldConstants.CLOUD_GC);
if (gcTagKey == 0) {
gcTagKey = getCustomFieldKey("DistributedVirtualPortgroup", CustomFieldConstants.CLOUD_GC_DVP);
s_logger.debug("The custom key for dvPortGroup is : " + gcTagKey);
}
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Network");
pSpec.getPathSet().add("name");
pSpec.getPathSet().add("vm");
pSpec.getPathSet().add(String.format("value[%d]", gcTagKey));
TraversalSpec vm2NetworkTraversal = new TraversalSpec();
vm2NetworkTraversal.setType("VirtualMachine");
vm2NetworkTraversal.setPath("network");
vm2NetworkTraversal.setName("vm2NetworkTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vm2NetworkTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null && ocs.size() > 0) {
for (ObjectContent oc : ocs) {
ArrayOfManagedObjectReference morVms = null;
String gcTagValue = null;
String name = null;
for (DynamicProperty prop : oc.getPropSet()) {
if (prop.getName().equals("name"))
name = prop.getVal().toString();
else if (prop.getName().equals("vm"))
morVms = (ArrayOfManagedObjectReference)prop.getVal();
else if (prop.getName().startsWith("value[")) {
CustomFieldStringValue val = (CustomFieldStringValue)prop.getVal();
if (val != null)
gcTagValue = val.getValue();
}
}
NetworkDetails details =
new NetworkDetails(name, oc.getObj(), (morVms != null ? morVms.getManagedObjectReference().toArray(
new ManagedObjectReference[morVms.getManagedObjectReference().size()]) : null), gcTagValue);
networks.add(details);
}
s_logger.debug("Retrieved " + networks.size() + " networks with key : " + gcTagKey);
}
return networks;
}
public List<DatastoreMO> getAllDatastores() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datastore");
pSpec.getPathSet().add("name");
TraversalSpec vmDatastoreTraversal = new TraversalSpec();
vmDatastoreTraversal.setType("VirtualMachine");
vmDatastoreTraversal.setPath("datastore");
vmDatastoreTraversal.setName("vmDatastoreTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vmDatastoreTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
List<DatastoreMO> datastores = new ArrayList<DatastoreMO>();
if (CollectionUtils.isNotEmpty(ocs)) {
for (ObjectContent oc : ocs) {
datastores.add(new DatastoreMO(_context, oc.getObj()));
}
}
return datastores;
}
/**
* Retrieve path info to access VM files via vSphere web interface
* @return [0] vm-name, [1] data-center-name, [2] datastore-name
* @throws Exception
*/
public String[] getHttpAccessPathInfo() throws Exception {
String[] pathInfo = new String[3];
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
VirtualMachineFileInfo fileInfo = getFileInfo();
String vmxFilePath = fileInfo.getVmPathName();
String vmxPathTokens[] = vmxFilePath.split("\\[|\\]|/");
assert (vmxPathTokens.length == 4);
pathInfo[1] = vmxPathTokens[1].trim(); // vSphere vm name
pathInfo[2] = dcInfo.second(); // vSphere datacenter name
pathInfo[3] = vmxPathTokens[0].trim(); // vSphere datastore name
return pathInfo;
}
public String getVmxHttpAccessUrl() throws Exception {
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
VirtualMachineFileInfo fileInfo = getFileInfo();
String vmxFilePath = fileInfo.getVmPathName();
String vmxPathTokens[] = vmxFilePath.split("\\[|\\]|/");
StringBuffer sb = new StringBuffer("https://" + _context.getServerAddress() + "/folder/");
sb.append(URLEncoder.encode(vmxPathTokens[2].trim(), "UTF-8"));
sb.append("/");
sb.append(URLEncoder.encode(vmxPathTokens[3].trim(), "UTF-8"));
sb.append("?dcPath=");
sb.append(URLEncoder.encode(dcInfo.second(), "UTF-8"));
sb.append("&dsName=");
sb.append(URLEncoder.encode(vmxPathTokens[1].trim(), "UTF-8"));
return sb.toString();
}
public boolean setVncConfigInfo(boolean enableVnc, String vncPassword, int vncPort, String keyboard) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
OptionValue[] vncOptions = VmwareHelper.composeVncOptions(null, enableVnc, vncPassword, vncPort, keyboard);
vmConfigSpec.getExtraConfig().addAll(Arrays.asList(vncOptions));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, vmConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean configureVm(VirtualMachineConfigSpec vmConfigSpec) throws Exception {
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, vmConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean configureVm(Ternary<VirtualDevice, VirtualDeviceConfigSpecOperation, VirtualDeviceConfigSpecFileOperation>[] devices) throws Exception {
assert (devices != null);
VirtualMachineConfigSpec configSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[devices.length];
int i = 0;
for (Ternary<VirtualDevice, VirtualDeviceConfigSpecOperation, VirtualDeviceConfigSpecFileOperation> deviceTernary : devices) {
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(deviceTernary.first());
deviceConfigSpec.setOperation(deviceTernary.second());
deviceConfigSpec.setFileOperation(deviceTernary.third());
deviceConfigSpecArray[i++] = deviceConfigSpec;
}
configSpec.getDeviceChange().addAll(Arrays.asList(deviceConfigSpecArray));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, configSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public Pair<String, Integer> getVncPort(String hostNetworkName) throws Exception {
HostMO hostMo = getRunningHost();
VmwareHypervisorHostNetworkSummary summary = hostMo.getHyperHostNetworkSummary(hostNetworkName);
VirtualMachineConfigInfo configInfo = getConfigInfo();
List<OptionValue> values = configInfo.getExtraConfig();
if (values != null) {
for (OptionValue option : values) {
if (option.getKey().equals("RemoteDisplay.vnc.port")) {
String value = (String)option.getValue();
if (value != null) {
return new Pair<String, Integer>(summary.getHostIp(), Integer.parseInt(value));
}
}
}
}
return new Pair<String, Integer>(summary.getHostIp(), 0);
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public void createDisk(String vmdkDatastorePath, long sizeInMb, ManagedObjectReference morDs, int controllerKey) throws Exception {
createDisk(vmdkDatastorePath, VirtualDiskType.THIN, VirtualDiskMode.PERSISTENT, null, sizeInMb, morDs, controllerKey);
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public void createDisk(String vmdkDatastorePath, VirtualDiskType diskType, VirtualDiskMode diskMode, String rdmDeviceName, long sizeInMb,
ManagedObjectReference morDs, int controllerKey) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath + ", sizeInMb: " + sizeInMb +
", diskType: " + diskType + ", diskMode: " + diskMode + ", rdmDeviceName: " + rdmDeviceName + ", datastore: " + morDs.getValue() + ", controllerKey: " +
controllerKey);
assert (vmdkDatastorePath != null);
assert (morDs != null);
int ideControllerKey = getIDEDeviceControllerKey();
if (controllerKey < 0) {
controllerKey = ideControllerKey;
}
VirtualDisk newDisk = new VirtualDisk();
if (diskType == VirtualDiskType.THIN || diskType == VirtualDiskType.PREALLOCATED || diskType == VirtualDiskType.EAGER_ZEROED_THICK) {
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDiskMode(VirtualDiskMode.PERSISTENT.value());
if (diskType == VirtualDiskType.THIN) {
backingInfo.setThinProvisioned(true);
} else {
backingInfo.setThinProvisioned(false);
}
if (diskType == VirtualDiskType.EAGER_ZEROED_THICK) {
backingInfo.setEagerlyScrub(true);
} else {
backingInfo.setEagerlyScrub(false);
}
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
newDisk.setBacking(backingInfo);
} else if (diskType == VirtualDiskType.RDM || diskType == VirtualDiskType.RDMP) {
VirtualDiskRawDiskMappingVer1BackingInfo backingInfo = new VirtualDiskRawDiskMappingVer1BackingInfo();
if (diskType == VirtualDiskType.RDM) {
backingInfo.setCompatibilityMode("virtualMode");
} else {
backingInfo.setCompatibilityMode("physicalMode");
}
backingInfo.setDeviceName(rdmDeviceName);
if (diskType == VirtualDiskType.RDM) {
backingInfo.setDiskMode(VirtualDiskMode.PERSISTENT.value());
}
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
newDisk.setBacking(backingInfo);
}
int deviceNumber = getNextDeviceNumber(controllerKey);
newDisk.setControllerKey(controllerKey);
newDisk.setKey(-deviceNumber);
newDisk.setUnitNumber(deviceNumber);
newDisk.setCapacityInKB(sizeInMb * 1024);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(newDisk);
deviceConfigSpec.setFileOperation(VirtualDeviceConfigSpecFileOperation.CREATE);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk() done(failed)");
throw new Exception("Unable to create disk " + vmdkDatastorePath + " due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk() done(successfully)");
}
public void updateVmdkAdapter(String vmdkFileName, String diskController) throws Exception {
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
VmdkAdapterType vmdkAdapterType = VmdkAdapterType.getAdapterType(diskControllerType);
if (vmdkAdapterType == VmdkAdapterType.none) {
String message = "Failed to attach disk due to invalid vmdk adapter type for vmdk file [" +
vmdkFileName + "] with controller : " + diskControllerType;
s_logger.debug(message);
throw new Exception(message);
}
String newAdapterType = vmdkAdapterType.toString();
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(vmdkFileName);
VmdkFileDescriptor vmdkFileDescriptor = vmdkInfo.first();
boolean isVmfsSparseFile = vmdkFileDescriptor.isVmfsSparseFile();
if (!isVmfsSparseFile) {
String currentAdapterType = vmdkFileDescriptor.getAdapterType();
if (!currentAdapterType.equalsIgnoreCase(newAdapterType)) {
s_logger.info("Updating adapter type to " + newAdapterType + " for VMDK file " + vmdkFileName);
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
byte[] newVmdkContent = vmdkFileDescriptor.changeVmdkAdapterType(vmdkInfo.second(), newAdapterType);
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcInfo.first().getName(), vmdkFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
s_logger.info("Updated VMDK file " + vmdkFileName);
}
}
}
public void updateAdapterTypeIfRequired(String vmdkFileName) throws Exception {
// Validate existing adapter type of VMDK file. Update it with a supported adapter type if validation fails.
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(vmdkFileName);
VmdkFileDescriptor vmdkFileDescriptor = vmdkInfo.first();
boolean isVmfsSparseFile = vmdkFileDescriptor.isVmfsSparseFile();
if (!isVmfsSparseFile) {
String currentAdapterTypeStr = vmdkFileDescriptor.getAdapterType();
if (s_logger.isTraceEnabled()) {
s_logger.trace("Detected adapter type " + currentAdapterTypeStr + " for VMDK file " + vmdkFileName);
}
VmdkAdapterType currentAdapterType = VmdkAdapterType.getType(currentAdapterTypeStr);
if (currentAdapterType == VmdkAdapterType.none) {
// Value of currentAdapterType can be VmdkAdapterType.none only if adapter type of vmdk is set to either
// lsisas1068 (SAS controller) or pvscsi (Vmware Paravirtual) only. Valid adapter type for those controllers is lsilogic.
// Hence use adapter type lsilogic. Other adapter types ide, lsilogic, buslogic are valid and does not need to be modified.
VmdkAdapterType newAdapterType = VmdkAdapterType.lsilogic;
s_logger.debug("Updating adapter type to " + newAdapterType + " from " + currentAdapterTypeStr + " for VMDK file " + vmdkFileName);
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
byte[] newVmdkContent = vmdkFileDescriptor.changeVmdkAdapterType(vmdkInfo.second(), newAdapterType.toString());
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcInfo.first().getName(), vmdkFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
s_logger.debug("Updated VMDK file " + vmdkFileName);
}
}
}
public void attachDisk(String[] vmdkDatastorePathChain, ManagedObjectReference morDs) throws Exception {
attachDisk(vmdkDatastorePathChain, morDs, null);
}
public void attachDisk(String[] vmdkDatastorePathChain, ManagedObjectReference morDs, String diskController) throws Exception {
if(s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: "
+ new Gson().toJson(vmdkDatastorePathChain) + ", datastore: " + morDs.getValue());
int controllerKey = 0;
int unitNumber = 0;
if (DiskControllerType.getType(diskController) == DiskControllerType.ide) {
// IDE virtual disk cannot be added if VM is running
if (getPowerState() == VirtualMachinePowerState.POWERED_ON) {
throw new Exception("Adding a virtual disk over IDE controller is not supported while VM is running in VMware hypervisor. Please re-try when VM is not running.");
}
// Get next available unit number and controller key
int ideDeviceCount = getNumberOfIDEDevices();
if (ideDeviceCount >= VmwareHelper.MAX_IDE_CONTROLLER_COUNT * VmwareHelper.MAX_ALLOWED_DEVICES_IDE_CONTROLLER) {
throw new Exception("Maximum limit of devices supported on IDE controllers [" + VmwareHelper.MAX_IDE_CONTROLLER_COUNT
* VmwareHelper.MAX_ALLOWED_DEVICES_IDE_CONTROLLER + "] is reached.");
}
controllerKey = getIDEControllerKey(ideDeviceCount);
unitNumber = getFreeUnitNumberOnIDEController(controllerKey);
} else {
if (StringUtils.isNotBlank(diskController)) {
controllerKey = getScsiDiskControllerKey(diskController);
} else {
controllerKey = getScsiDeviceControllerKey();
}
unitNumber = -1;
}
synchronized (_mor.getValue().intern()) {
VirtualDevice newDisk = VmwareHelper.prepareDiskDevice(this, null, controllerKey, vmdkDatastorePathChain, morDs, unitNumber, 1);
if (StringUtils.isNotBlank(diskController)) {
String vmdkFileName = vmdkDatastorePathChain[0];
updateVmdkAdapter(vmdkFileName, diskController);
}
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(newDisk);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk() done(failed)");
throw new Exception("Failed to attach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
}
if(s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk() done(successfully)");
}
private int getControllerBusNumber(int controllerKey) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualController && device.getKey() == controllerKey) {
return ((VirtualController)device).getBusNumber();
}
}
}
throw new Exception("SCSI Controller with key " + controllerKey + " is Not Found");
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public List<Pair<String, ManagedObjectReference>> detachDisk(String vmdkDatastorePath, boolean deleteBackingFile) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath + ", deleteBacking: " +
deleteBackingFile);
// Note: if VM has been taken snapshot, original backing file will be renamed, therefore, when we try to find the matching
// VirtualDisk, we only perform prefix matching
Pair<VirtualDisk, String> deviceInfo = getDiskDevice(vmdkDatastorePath);
if (deviceInfo == null) {
s_logger.warn("vCenter API trace - detachDisk() done (failed)");
throw new Exception("No such disk device: " + vmdkDatastorePath);
}
// IDE virtual disk cannot be detached if VM is running
if (deviceInfo.second() != null && deviceInfo.second().contains("ide")) {
if (getPowerState() == VirtualMachinePowerState.POWERED_ON) {
throw new Exception("Removing a virtual disk over IDE controller is not supported while VM is running in VMware hypervisor. " +
"Please re-try when VM is not running.");
}
}
List<Pair<String, ManagedObjectReference>> chain = getDiskDatastorePathChain(deviceInfo.first(), true);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(deviceInfo.first());
if (deleteBackingFile) {
deviceConfigSpec.setFileOperation(VirtualDeviceConfigSpecFileOperation.DESTROY);
}
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk() done (failed)");
throw new Exception("Failed to detach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
// VMware does not update snapshot references to the detached disk, we have to work around it
SnapshotDescriptor snapshotDescriptor = null;
try {
snapshotDescriptor = getSnapshotDescriptor();
} catch (Exception e) {
s_logger.info("Unable to retrieve snapshot descriptor, will skip updating snapshot reference");
}
if (snapshotDescriptor != null) {
for (Pair<String, ManagedObjectReference> pair : chain) {
DatastoreFile dsFile = new DatastoreFile(pair.first());
snapshotDescriptor.removeDiskReferenceFromSnapshot(dsFile.getFileName());
}
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String dsPath = getSnapshotDescriptorDatastorePath();
assert (dsPath != null);
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), dsPath);
getContext().uploadResourceContent(url, snapshotDescriptor.getVmsdContent());
}
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk() done (successfully)");
return chain;
}
public void detachAllDisks() throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk(). target MOR: " + _mor.getValue());
VirtualDisk[] disks = getAllDiskDevice();
if (disks.length > 0) {
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[disks.length];
for (int i = 0; i < disks.length; i++) {
deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec();
deviceConfigSpecArray[i].setDevice(disks[i]);
deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
}
reConfigSpec.getDeviceChange().addAll(Arrays.asList(deviceConfigSpecArray));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk() done(failed)");
throw new Exception("Failed to detach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
}
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk() done(successfully)");
}
// isoDatastorePath: [datastore name] isoFilePath
public void attachIso(String isoDatastorePath, ManagedObjectReference morDs, boolean connect, boolean connectAtBoot) throws Exception {
attachIso(isoDatastorePath, morDs, connect, connectAtBoot, null);
}
// isoDatastorePath: [datastore name] isoFilePath
public void attachIso(String isoDatastorePath, ManagedObjectReference morDs,
boolean connect, boolean connectAtBoot, Integer key) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso(). target MOR: " + _mor.getValue() + ", isoDatastorePath: " + isoDatastorePath + ", datastore: " +
morDs.getValue() + ", connect: " + connect + ", connectAtBoot: " + connectAtBoot);
assert (isoDatastorePath != null);
assert (morDs != null);
boolean newCdRom = false;
VirtualCdrom cdRom;
if (key == null) {
cdRom = (VirtualCdrom) getIsoDevice();
} else {
cdRom = (VirtualCdrom) getIsoDevice(key);
}
if (cdRom == null) {
newCdRom = true;
cdRom = new VirtualCdrom();
cdRom.setControllerKey(getIDEDeviceControllerKey());
int deviceNumber = getNextIDEDeviceNumber();
cdRom.setUnitNumber(deviceNumber);
cdRom.setKey(-deviceNumber);
}
VirtualDeviceConnectInfo cInfo = new VirtualDeviceConnectInfo();
cInfo.setConnected(connect);
cInfo.setStartConnected(connectAtBoot);
cdRom.setConnectable(cInfo);
VirtualCdromIsoBackingInfo backingInfo = new VirtualCdromIsoBackingInfo();
backingInfo.setFileName(isoDatastorePath);
backingInfo.setDatastore(morDs);
cdRom.setBacking(backingInfo);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
//VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[1];
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(cdRom);
if (newCdRom) {
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
} else {
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.EDIT);
}
//deviceConfigSpecArray[0] = deviceConfigSpec;
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso() done(failed)");
throw new Exception("Failed to attach ISO due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso() done(successfully)");
}
public int detachIso(String isoDatastorePath) throws Exception {
return detachIso(isoDatastorePath, false);
}
public int detachIso(String isoDatastorePath, final boolean force) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachIso(). target MOR: " + _mor.getValue() + ", isoDatastorePath: " + isoDatastorePath);
VirtualDevice device = getIsoDevice(isoDatastorePath);
if (device == null) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachIso() done(failed)");
throw new Exception("Unable to find a CDROM device");
}
VirtualCdromRemotePassthroughBackingInfo backingInfo = new VirtualCdromRemotePassthroughBackingInfo();
backingInfo.setDeviceName("");
device.setBacking(backingInfo);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
//VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[1];
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.EDIT);
//deviceConfigSpecArray[0] = deviceConfigSpec;
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Question id: " + question.getId());
s_logger.trace("Question text: " + question.getText());
}
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("msg id: " + msg.getId());
s_logger.trace("msg text: " + msg.getText());
}
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId() +
", for safe operation we will automatically decline it");
vmMo.answerVM(question.getId(), force ? ANSWER_YES : ANSWER_NO);
break;
}
}
} else if (question.getText() != null) {
String text = question.getText();
String msgId;
String msgText;
if (s_logger.isDebugEnabled()) {
s_logger.debug("question text : " + text);
}
String[] tokens = text.split(":");
msgId = tokens[0];
msgText = tokens[1];
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msgId)) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + question.getId() +
". Message id : " + msgId + ". Message text : " + msgText + ", for safe operation we will automatically decline it.");
vmMo.answerVM(question.getId(), force ? ANSWER_YES : ANSWER_NO);
}
}
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while handling vm question about iso detach.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isDebugEnabled())
s_logger.trace("vCenter API trace - detachIso() done(failed)");
throw new Exception("Failed to detachIso due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
s_logger.trace("vCenter API trace - detachIso() done(successfully)");
} finally {
flags[0] = true;
future.cancel(true);
}
return device.getKey();
}
public Pair<VmdkFileDescriptor, byte[]> getVmdkFileInfo(String vmdkDatastorePath) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - getVmdkFileInfo(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath);
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), vmdkDatastorePath);
byte[] content = getContext().getResourceContent(url);
VmdkFileDescriptor descriptor = new VmdkFileDescriptor();
descriptor.parse(content);
Pair<VmdkFileDescriptor, byte[]> result = new Pair<VmdkFileDescriptor, byte[]>(descriptor, content);
if (s_logger.isTraceEnabled()) {
s_logger.trace("vCenter API trace - getVmdkFileInfo() done");
s_logger.trace("VMDK file descriptor: " + new Gson().toJson(result.first()));
}
return result;
}
public void exportVm(String exportDir, String exportName, boolean packToOva, boolean leaveOvaFileOnly) throws Exception {
ManagedObjectReference morOvf = _context.getServiceContent().getOvfManager();
VirtualMachineRuntimeInfo runtimeInfo = getRuntimeInfo();
HostMO hostMo = new HostMO(_context, runtimeInfo.getHost());
String hostName = hostMo.getHostName();
String vmName = getVmName();
DatacenterMO dcMo = new DatacenterMO(_context, hostMo.getHyperHostDatacenter());
if (runtimeInfo.getPowerState() != VirtualMachinePowerState.POWERED_OFF) {
String msg = "Unable to export VM because it is not at powerdOff state. vmName: " + vmName + ", host: " + hostName;
s_logger.error(msg);
throw new Exception(msg);
}
ManagedObjectReference morLease = _context.getService().exportVm(getMor());
if (morLease == null) {
s_logger.error("exportVm() failed");
throw new Exception("exportVm() failed");
}
HttpNfcLeaseMO leaseMo = new HttpNfcLeaseMO(_context, morLease);
HttpNfcLeaseState state = leaseMo.waitState(new HttpNfcLeaseState[] {HttpNfcLeaseState.READY, HttpNfcLeaseState.ERROR});
try {
if (state == HttpNfcLeaseState.READY) {
final HttpNfcLeaseMO.ProgressReporter progressReporter = leaseMo.createProgressReporter();
boolean success = false;
List<String> fileNames = new ArrayList<String>();
try {
HttpNfcLeaseInfo leaseInfo = leaseMo.getLeaseInfo();
final long totalBytes = leaseInfo.getTotalDiskCapacityInKB() * 1024;
long totalBytesDownloaded = 0;
List<HttpNfcLeaseDeviceUrl> deviceUrls = leaseInfo.getDeviceUrl();
s_logger.info("volss: copy vmdk and ovf file starts " + System.currentTimeMillis());
if (deviceUrls != null) {
OvfFile[] ovfFiles = new OvfFile[deviceUrls.size()];
for (int i = 0; i < deviceUrls.size(); i++) {
String deviceId = deviceUrls.get(i).getKey();
String deviceUrlStr = deviceUrls.get(i).getUrl();
String orgDiskFileName = deviceUrlStr.substring(deviceUrlStr.lastIndexOf("/") + 1);
String diskFileName = String.format("%s-disk%d%s", exportName, i, VmwareHelper.getFileExtension(orgDiskFileName, ".vmdk"));
String diskUrlStr = deviceUrlStr.replace("*", hostName);
diskUrlStr = HypervisorHostHelper.resolveHostNameInUrl(dcMo, diskUrlStr);
String diskLocalPath = exportDir + File.separator + diskFileName;
fileNames.add(diskLocalPath);
if (s_logger.isInfoEnabled()) {
s_logger.info("Download VMDK file for export. url: " + deviceUrlStr);
}
long lengthOfDiskFile = _context.downloadVmdkFile(diskUrlStr, diskLocalPath, totalBytesDownloaded, new ActionDelegate<Long>() {
@Override
public void action(Long param) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Download progress " + param + "/" + toHumanReadableSize(totalBytes));
}
progressReporter.reportProgress((int)(param * 100 / totalBytes));
}
});
totalBytesDownloaded += lengthOfDiskFile;
OvfFile ovfFile = new OvfFile();
ovfFile.setPath(diskFileName);
ovfFile.setDeviceId(deviceId);
ovfFile.setSize(lengthOfDiskFile);
ovfFiles[i] = ovfFile;
}
// write OVF descriptor file
OvfCreateDescriptorParams ovfDescParams = new OvfCreateDescriptorParams();
ovfDescParams.getOvfFiles().addAll(Arrays.asList(ovfFiles));
OvfCreateDescriptorResult ovfCreateDescriptorResult = _context.getService().createDescriptor(morOvf, getMor(), ovfDescParams);
String ovfPath = exportDir + File.separator + exportName + ".ovf";
fileNames.add(ovfPath);
OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(ovfPath),"UTF-8");
out.write(ovfCreateDescriptorResult.getOvfDescriptor());
out.close();
// tar files into OVA
if (packToOva) {
// Important! we need to sync file system before we can safely use tar to work around a linux kernal bug(or feature)
s_logger.info("Sync file system before we package OVA...");
Script commandSync = new Script(true, "sync", 0, s_logger);
commandSync.execute();
Script command = new Script(false, "tar", 0, s_logger);
command.setWorkDir(exportDir);
command.add("-cf", exportName + ".ova");
command.add(exportName + ".ovf"); // OVF file should be the first file in OVA archive
for (String name : fileNames) {
command.add((new File(name).getName()));
}
s_logger.info("Package OVA with commmand: " + command.toString());
command.execute();
// to be safe, physically test existence of the target OVA file
if ((new File(exportDir + File.separator + exportName + ".ova")).exists()) {
success = true;
} else {
s_logger.error(exportDir + File.separator + exportName + ".ova is not created as expected");
}
} else {
success = true;
}
}
s_logger.info("volss: copy vmdk and ovf file finishes " + System.currentTimeMillis());
} catch (Throwable e) {
s_logger.error("Unexpected exception ", e);
} finally {
progressReporter.close();
if (leaveOvaFileOnly) {
for (String name : fileNames) {
new File(name).delete();
}
}
if (!success)
throw new Exception("Unable to finish the whole process to package as a OVA file");
}
}
} finally {
leaseMo.updateLeaseProgress(100);
leaseMo.completeLease();
}
}
// snapshot directory in format of: /vmfs/volumes/<datastore name>/<path>
@Deprecated
public void setSnapshotDirectory(String snapshotDir) throws Exception {
VirtualMachineFileInfo fileInfo = getFileInfo();
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
String vmxUrl = _context.composeDatastoreBrowseUrl(dcInfo.second(), fileInfo.getVmPathName());
byte[] vmxContent = _context.getResourceContent(vmxUrl);
BufferedReader in = null;
BufferedWriter out = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
boolean replaced = false;
try {
in = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(vmxContent),"UTF-8"));
out = new BufferedWriter(new OutputStreamWriter(bos,"UTF-8"));
String line;
while ((line = in.readLine()) != null) {
if (line.startsWith("workingDir")) {
replaced = true;
out.write(String.format("workingDir=\"%s\"", snapshotDir));
out.newLine();
} else {
out.write(line);
out.newLine();
}
}
if (!replaced) {
out.newLine();
out.write(String.format("workingDir=\"%s\"", snapshotDir));
out.newLine();
}
} finally {
if (in != null) {
in.close();
}
if (out != null) {
out.close();
}
}
_context.uploadResourceContent(vmxUrl, bos.toByteArray());
// It seems that I don't need to do re-registration. VMware has bug in writing the correct snapshot's VMDK path to
// its disk backing info anyway.
// redoRegistration();
}
// destName does not contain extension name
public void backupCurrentSnapshot(String deviceName, ManagedObjectReference morDestDs, String destDsDirectory, String destName, boolean includeBase) throws Exception {
SnapshotDescriptor descriptor = getSnapshotDescriptor();
SnapshotInfo[] snapshotInfo = descriptor.getCurrentDiskChain();
if (snapshotInfo.length == 0) {
String msg = "No snapshot found in this VM";
throw new Exception(msg);
}
HostMO hostMo = getRunningHost();
DatacenterMO dcMo = getOwnerDatacenter().first();
List<Pair<ManagedObjectReference, String>> mounts = hostMo.getDatastoreMountsOnHost();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
List<Ternary<String, String, String>> backupInfo = new ArrayList<Ternary<String, String, String>>();
for (int i = 0; i < snapshotInfo.length; i++) {
if (!includeBase && i == snapshotInfo.length - 1) {
break;
}
SnapshotDescriptor.DiskInfo[] disks = snapshotInfo[i].getDisks();
if (disks != null) {
String destBaseFileName;
String destFileName;
String destParentFileName;
for (SnapshotDescriptor.DiskInfo disk : disks) {
if (deviceName == null || deviceName.equals(disk.getDeviceName())) {
String srcVmdkFullDsPath = getSnapshotDiskFileDatastorePath(vmFileInfo, mounts, disk.getDiskFileName());
Pair<DatastoreMO, String> srcDsInfo = getOwnerDatastore(srcVmdkFullDsPath);
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(srcVmdkFullDsPath);
String srcVmdkBaseFilePath = DatastoreFile.getCompanionDatastorePath(srcVmdkFullDsPath, vmdkInfo.first().getBaseFileName());
destFileName = destName + (snapshotInfo.length - i - 1) + ".vmdk";
if (vmdkInfo.first().getParentFileName() != null) {
destBaseFileName = destName + (snapshotInfo.length - i - 1) + "-delta.vmdk";
destParentFileName = destName + (snapshotInfo.length - i - 2) + ".vmdk";
} else {
destBaseFileName = destName + (snapshotInfo.length - i - 1) + "-flat.vmdk";
destParentFileName = null;
}
s_logger.info("Copy VMDK base file " + srcVmdkBaseFilePath + " to " + destDsDirectory + "/" + destBaseFileName);
srcDsInfo.first().copyDatastoreFile(srcVmdkBaseFilePath, dcMo.getMor(), morDestDs, destDsDirectory + "/" + destBaseFileName, dcMo.getMor(), true);
byte[] newVmdkContent = VmdkFileDescriptor.changeVmdkContentBaseInfo(vmdkInfo.second(), destBaseFileName, destParentFileName);
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcMo.getName(), destDsDirectory + "/" + destFileName);
s_logger.info("Upload VMDK content file to " + destDsDirectory + "/" + destFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
backupInfo.add(new Ternary<String, String, String>(destFileName, destBaseFileName, destParentFileName));
}
}
}
}
byte[] vdiskInfo = VmwareHelper.composeDiskInfo(backupInfo, snapshotInfo.length, includeBase);
String vdiskUploadUrl = getContext().composeDatastoreBrowseUrl(dcMo.getName(), destDsDirectory + "/" + destName + ".vdisk");
getContext().uploadResourceContent(vdiskUploadUrl, vdiskInfo);
}
public String[] getCurrentSnapshotDiskChainDatastorePaths(String diskDevice) throws Exception {
HostMO hostMo = getRunningHost();
List<Pair<ManagedObjectReference, String>> mounts = hostMo.getDatastoreMountsOnHost();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
SnapshotDescriptor descriptor = getSnapshotDescriptor();
SnapshotInfo[] snapshotInfo = descriptor.getCurrentDiskChain();
List<String> diskDsFullPaths = new ArrayList<String>();
for (int i = 0; i < snapshotInfo.length; i++) {
SnapshotDescriptor.DiskInfo[] disks = snapshotInfo[i].getDisks();
if (disks != null) {
for (SnapshotDescriptor.DiskInfo disk : disks) {
String deviceNameInDisk = disk.getDeviceName();
if (diskDevice == null || diskDevice.equalsIgnoreCase(deviceNameInDisk)) {
String vmdkFullDsPath = getSnapshotDiskFileDatastorePath(vmFileInfo, mounts, disk.getDiskFileName());
diskDsFullPaths.add(vmdkFullDsPath);
}
}
}
}
return diskDsFullPaths.toArray(new String[0]);
}
// return the disk chain (VMDK datastore paths) for cloned snapshot
public Pair<VirtualMachineMO, String[]> cloneFromCurrentSnapshot(String clonedVmName, int cpuSpeedMHz, int memoryMb, String diskDevice, ManagedObjectReference morDs, String virtualHardwareVersion)
throws Exception {
assert (morDs != null);
String[] disks = getCurrentSnapshotDiskChainDatastorePaths(diskDevice);
VirtualMachineMO clonedVm = cloneFromDiskChain(clonedVmName, cpuSpeedMHz, memoryMb, disks, morDs, virtualHardwareVersion);
return new Pair<VirtualMachineMO, String[]>(clonedVm, disks);
}
public VirtualMachineMO cloneFromDiskChain(String clonedVmName, int cpuSpeedMHz, int memoryMb, String[] disks, ManagedObjectReference morDs, String cloneHardwareVersion) throws Exception {
assert (disks != null);
assert (disks.length >= 1);
HostMO hostMo = getRunningHost();
VirtualMachineMO clonedVmMo = HypervisorHostHelper.createWorkerVM(hostMo, new DatastoreMO(hostMo.getContext(), morDs), clonedVmName, cloneHardwareVersion);
if (clonedVmMo == null)
throw new Exception("Unable to find just-created blank VM");
boolean bSuccess = false;
try {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
VirtualDevice device = VmwareHelper.prepareDiskDevice(clonedVmMo, null, -1, disks, morDs, -1, 1);
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
clonedVmMo.configureVm(vmConfigSpec);
bSuccess = true;
return clonedVmMo;
} finally {
if (!bSuccess) {
clonedVmMo.detachAllDisks();
clonedVmMo.destroy();
}
}
}
public GuestOsDescriptor getGuestOsDescriptor(String guestOsId) throws Exception {
GuestOsDescriptor guestOsDescriptor = null;
String guestId = guestOsId;
if (guestId == null) {
guestId = getGuestId();
}
ManagedObjectReference vmEnvironmentBrowser = _context.getVimClient().getMoRefProp(_mor, "environmentBrowser");
VirtualMachineConfigOption vmConfigOption = _context.getService().queryConfigOption(vmEnvironmentBrowser, null, null);
List<GuestOsDescriptor> guestDescriptors = vmConfigOption.getGuestOSDescriptor();
for (GuestOsDescriptor descriptor : guestDescriptors) {
if (guestId != null && guestId.equalsIgnoreCase(descriptor.getId())) {
guestOsDescriptor = descriptor;
break;
}
}
return guestOsDescriptor;
}
public void plugDevice(VirtualDevice device) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to add devices");
}
}
public void tearDownDevice(VirtualDevice device) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to detach devices");
}
}
public void tearDownDevices(Class<?>[] deviceClasses) throws Exception {
VirtualDevice[] devices = getMatchedDevices(deviceClasses);
if (devices.length > 0) {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[devices.length];
for (int i = 0; i < devices.length; i++) {
deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec();
deviceConfigSpecArray[i].setDevice(devices[i]);
deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
vmConfigSpec.getDeviceChange().add(deviceConfigSpecArray[i]);
}
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to detach devices");
}
}
}
public void copyAllVmDiskFiles(DatastoreMO destDsMo, String destDsDir, boolean followDiskChain) throws Exception {
VirtualDevice[] disks = getAllDiskDevice();
DatacenterMO dcMo = getOwnerDatacenter().first();
if (disks != null) {
for (VirtualDevice disk : disks) {
List<Pair<String, ManagedObjectReference>> vmdkFiles = getDiskDatastorePathChain((VirtualDisk)disk, followDiskChain);
for (Pair<String, ManagedObjectReference> fileItem : vmdkFiles) {
DatastoreMO srcDsMo = new DatastoreMO(_context, fileItem.second());
DatastoreFile srcFile = new DatastoreFile(fileItem.first());
DatastoreFile destFile = new DatastoreFile(destDsMo.getName(), destDsDir, srcFile.getFileName());
Pair<VmdkFileDescriptor, byte[]> vmdkDescriptor = null;
vmdkDescriptor = getVmdkFileInfo(fileItem.first());
s_logger.info("Copy VM disk file " + srcFile.getPath() + " to " + destFile.getPath());
srcDsMo.copyDatastoreFile(fileItem.first(), dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
if (vmdkDescriptor != null) {
String vmdkBaseFileName = vmdkDescriptor.first().getBaseFileName();
String baseFilePath = srcFile.getCompanionPath(vmdkBaseFileName);
destFile = new DatastoreFile(destDsMo.getName(), destDsDir, vmdkBaseFileName);
s_logger.info("Copy VM disk file " + baseFilePath + " to " + destFile.getPath());
srcDsMo.copyDatastoreFile(baseFilePath, dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
}
}
}
}
}
public List<String> getVmdkFileBaseNames() throws Exception {
List<String> vmdkFileBaseNames = new ArrayList<String>();
VirtualDevice[] devices = getAllDiskDevice();
for(VirtualDevice device : devices) {
if(device instanceof VirtualDisk) {
vmdkFileBaseNames.add(getVmdkFileBaseName((VirtualDisk)device));
}
}
return vmdkFileBaseNames;
}
public String getVmdkFileBaseName(VirtualDisk disk) throws Exception {
String vmdkFileBaseName = null;
VirtualDeviceBackingInfo backingInfo = disk.getBacking();
if(backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
vmdkFileBaseName = dsBackingFile.getFileBaseName();
}
return vmdkFileBaseName;
}
// this method relies on un-offical VMware API
@Deprecated
public void moveAllVmDiskFiles(DatastoreMO destDsMo, String destDsDir, boolean followDiskChain) throws Exception {
VirtualDevice[] disks = getAllDiskDevice();
DatacenterMO dcMo = getOwnerDatacenter().first();
if (disks != null) {
for (VirtualDevice disk : disks) {
List<Pair<String, ManagedObjectReference>> vmdkFiles = getDiskDatastorePathChain((VirtualDisk)disk, followDiskChain);
for (Pair<String, ManagedObjectReference> fileItem : vmdkFiles) {
DatastoreMO srcDsMo = new DatastoreMO(_context, fileItem.second());
DatastoreFile srcFile = new DatastoreFile(fileItem.first());
DatastoreFile destFile = new DatastoreFile(destDsMo.getName(), destDsDir, srcFile.getFileName());
Pair<VmdkFileDescriptor, byte[]> vmdkDescriptor = null;
vmdkDescriptor = getVmdkFileInfo(fileItem.first());
s_logger.info("Move VM disk file " + srcFile.getPath() + " to " + destFile.getPath());
srcDsMo.moveDatastoreFile(fileItem.first(), dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
if (vmdkDescriptor != null) {
String vmdkBaseFileName = vmdkDescriptor.first().getBaseFileName();
String baseFilePath = srcFile.getCompanionPath(vmdkBaseFileName);
destFile = new DatastoreFile(destDsMo.getName(), destDsDir, vmdkBaseFileName);
s_logger.info("Move VM disk file " + baseFilePath + " to " + destFile.getPath());
srcDsMo.moveDatastoreFile(baseFilePath, dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
}
}
}
}
}
public int getPvScsiDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof ParaVirtualSCSIController) {
return device.getKey();
}
}
}
return -1;
}
public int getPvScsiDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof ParaVirtualSCSIController) {
return device.getKey();
}
}
}
assert (false);
throw new Exception("VMware Paravirtual SCSI Controller Not Found");
}
public void ensurePvScsiDeviceController(int requiredNumScsiControllers, int availableBusNum) throws Exception {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < requiredNumScsiControllers) {
ParaVirtualSCSIController scsiController = new ParaVirtualSCSIController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + requiredNumScsiControllers + " SCSI controllers.");
}
}
public String getRecommendedDiskController(String guestOsId) throws Exception {
String recommendedController;
GuestOsDescriptor guestOsDescriptor = getGuestOsDescriptor(guestOsId);
recommendedController = VmwareHelper.getRecommendedDiskControllerFromDescriptor(guestOsDescriptor);
return recommendedController;
}
public boolean isPvScsiSupported() throws Exception {
int virtualHardwareVersion;
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if virtual machine is using hardware version 7 or later.
if (virtualHardwareVersion < 7) {
s_logger.error("The virtual hardware version of the VM is " + virtualHardwareVersion
+ ", which doesn't support PV SCSI controller type for virtual harddisks. Please upgrade this VM's virtual hardware version to 7 or later.");
return false;
}
return true;
}
// Would be useful if there exists multiple sub types of SCSI controllers per VM are supported in CloudStack f
public int getScsiDiskControllerKey(String diskController) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (CollectionUtils.isNotEmpty(devices)) {
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
for (VirtualDevice device : devices) {
if ((diskControllerType == DiskControllerType.lsilogic || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualLsiLogicController && isValidScsiDiskController((VirtualLsiLogicController)device)) {
return ((VirtualLsiLogicController)device).getKey();
} else if ((diskControllerType == DiskControllerType.lsisas1068 || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualLsiLogicSASController && isValidScsiDiskController((VirtualLsiLogicSASController)device)) {
return ((VirtualLsiLogicSASController)device).getKey();
} else if ((diskControllerType == DiskControllerType.pvscsi || diskControllerType == DiskControllerType.scsi)
&& device instanceof ParaVirtualSCSIController && isValidScsiDiskController((ParaVirtualSCSIController)device)) {
return ((ParaVirtualSCSIController)device).getKey();
} else if ((diskControllerType == DiskControllerType.buslogic || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualBusLogicController && isValidScsiDiskController((VirtualBusLogicController)device)) {
return ((VirtualBusLogicController)device).getKey();
}
}
}
assert (false);
throw new IllegalStateException("Scsi disk controller of type " + diskController + " not found among configured devices.");
}
public int getScsiDiskControllerKeyNoException(String diskController, int scsiUnitNumber) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (CollectionUtils.isNotEmpty(devices) && scsiUnitNumber >= 0) {
int requiredScsiController = scsiUnitNumber / VmwareHelper.MAX_ALLOWED_DEVICES_SCSI_CONTROLLER;
int scsiControllerDeviceCount = 0;
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
for (VirtualDevice device : devices) {
if ((diskControllerType == DiskControllerType.lsilogic || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualLsiLogicController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualLsiLogicController)device)) {
return ((VirtualLsiLogicController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.lsisas1068 || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualLsiLogicSASController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualLsiLogicSASController)device)) {
return ((VirtualLsiLogicSASController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.pvscsi || diskControllerType == DiskControllerType.scsi) && device instanceof ParaVirtualSCSIController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((ParaVirtualSCSIController)device)) {
return ((ParaVirtualSCSIController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.buslogic || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualBusLogicController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualBusLogicController)device)) {
return ((VirtualBusLogicController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
}
}
}
return -1;
}
public int getNextScsiDiskDeviceNumber() throws Exception {
int scsiControllerKey = getScsiDeviceControllerKey();
int deviceNumber = getNextDeviceNumber(scsiControllerKey);
return deviceNumber;
}
public int getScsiDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController && isValidScsiDiskController((VirtualSCSIController)device)) {
return device.getKey();
}
}
}
assert (false);
throw new Exception("SCSI Controller Not Found");
}
public int getScsiDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController && isValidScsiDiskController((VirtualSCSIController)device)) {
return device.getKey();
}
}
}
return -1;
}
public void ensureLsiLogicDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getLsiLogicDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Lsi Logic controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " LsiLogic Parallel SCSI controllers.");
}
}
}
private int getLsiLogicDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualLsiLogicController) {
return device.getKey();
}
}
}
return -1;
}
public void ensureScsiDeviceController() throws Exception {
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
// Scsi controller
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(0);
scsiController.setKey(1);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controller");
}
}
}
public void ensureScsiDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " SCSI controllers.");
}
}
}
private boolean isValidScsiDiskController(VirtualSCSIController scsiDiskController) {
if (scsiDiskController == null) {
return false;
}
List<Integer> scsiDiskDevicesOnController = scsiDiskController.getDevice();
if (scsiDiskDevicesOnController == null || scsiDiskDevicesOnController.size() >= (VmwareHelper.MAX_SUPPORTED_DEVICES_SCSI_CONTROLLER)) {
return false;
}
if (scsiDiskController.getBusNumber() >= VmwareHelper.MAX_SCSI_CONTROLLER_COUNT) {
return false;
}
return true;
}
// return pair of VirtualDisk and disk device bus name(ide0:0, etc)
public Pair<VirtualDisk, String> getDiskDevice(String vmdkDatastorePath) throws Exception {
final String zeroLengthString = "";
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
ArrayList<Pair<VirtualDisk, String>> partialMatchingDiskDevices = new ArrayList<>();
DatastoreFile dsSrcFile = new DatastoreFile(vmdkDatastorePath);
String srcBaseName = dsSrcFile.getFileBaseName();
String trimmedSrcBaseName = VmwareHelper.trimSnapshotDeltaPostfix(srcBaseName);
String srcDatastoreName = dsSrcFile.getDatastoreName() != null ? dsSrcFile.getDatastoreName() : zeroLengthString;
s_logger.info("Look for disk device info for volume : " + vmdkDatastorePath + " with base name: " + srcBaseName);
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = device.getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
do {
s_logger.info("Test against disk backing : " + diskBackingInfo.getFileName());
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingDatastoreName = dsBackingFile.getDatastoreName() != null ? dsBackingFile.getDatastoreName() : zeroLengthString;
if (srcDatastoreName.equals(zeroLengthString)) {
backingDatastoreName = zeroLengthString;
}
if (srcDatastoreName.equalsIgnoreCase(backingDatastoreName)) {
String backingBaseName = dsBackingFile.getFileBaseName();
if (backingBaseName.equalsIgnoreCase(srcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<>((VirtualDisk)device, deviceNumbering);
}
if (backingBaseName.contains(trimmedSrcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
partialMatchingDiskDevices.add(new Pair<>((VirtualDisk)device, deviceNumbering));
}
}
diskBackingInfo = diskBackingInfo.getParent();
} while (diskBackingInfo != null);
}
}
}
}
// No disk device was found with an exact match for the volume path, hence look for disk device that matches the trimmed name.
s_logger.info("No disk device with an exact match found for volume : " + vmdkDatastorePath + ". Look for disk device info against trimmed base name: " + srcBaseName);
if (partialMatchingDiskDevices != null) {
if (partialMatchingDiskDevices.size() == 1) {
VirtualDiskFlatVer2BackingInfo matchingDiskBackingInfo = (VirtualDiskFlatVer2BackingInfo)partialMatchingDiskDevices.get(0).first().getBacking();
s_logger.info("Disk backing : " + matchingDiskBackingInfo.getFileName() + " matches ==> " + partialMatchingDiskDevices.get(0).second());
return partialMatchingDiskDevices.get(0);
} else if (partialMatchingDiskDevices.size() > 1) {
s_logger.warn("Disk device info lookup for volume: " + vmdkDatastorePath + " failed as multiple disk devices were found to match" +
" volume's trimmed base name: " + trimmedSrcBaseName);
return null;
}
}
s_logger.warn("Disk device info lookup for volume: " + vmdkDatastorePath + " failed as no matching disk device found");
return null;
}
// return pair of VirtualDisk and disk device bus name(ide0:0, etc)
public Pair<VirtualDisk, String> getDiskDevice(String vmdkDatastorePath, boolean matchExactly) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
DatastoreFile dsSrcFile = new DatastoreFile(vmdkDatastorePath);
String srcBaseName = dsSrcFile.getFileBaseName();
String trimmedSrcBaseName = VmwareHelper.trimSnapshotDeltaPostfix(srcBaseName);
if (matchExactly) {
s_logger.info("Look for disk device info from volume : " + vmdkDatastorePath + " with base name: " + srcBaseName);
} else {
s_logger.info("Look for disk device info from volume : " + vmdkDatastorePath + " with trimmed base name: " + trimmedSrcBaseName);
}
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
do {
s_logger.info("Test against disk backing : " + diskBackingInfo.getFileName());
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingBaseName = dsBackingFile.getFileBaseName();
if (matchExactly) {
if (backingBaseName.equalsIgnoreCase(srcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<VirtualDisk, String>((VirtualDisk)device, deviceNumbering);
}
} else {
if (backingBaseName.contains(trimmedSrcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<VirtualDisk, String>((VirtualDisk)device, deviceNumbering);
}
}
diskBackingInfo = diskBackingInfo.getParent();
} while (diskBackingInfo != null);
}
}
}
}
return null;
}
public void registerVirtualDisk(VirtualDisk device, DatastoreFile dsBackingFile) {
if (((VirtualDisk) device).getVDiskId() == null) {
try {
s_logger.debug("vDiskid does not exist for volume " + dsBackingFile.getFileName() + " registering the disk now");
VirtualStorageObjectManagerMO vStorageObjectManagerMO = new VirtualStorageObjectManagerMO(getOwnerDatacenter().first().getContext());
VStorageObject vStorageObject = vStorageObjectManagerMO.registerVirtualDisk(dsBackingFile, null, getOwnerDatacenter().first().getName());
VStorageObjectConfigInfo diskConfigInfo = vStorageObject.getConfig();
((VirtualDisk) device).setVDiskId(diskConfigInfo.getId());
} catch (Exception e) {
s_logger.warn("Exception while trying to register a disk as first class disk to get the unique identifier, main operation still continues: " + e.getMessage());
}
}
}
public String getDiskCurrentTopBackingFileInChain(String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
String deviceNumbering = getDeviceBusName(devices, device);
if (deviceNumbering.equals(deviceBusName))
return diskBackingInfo.getFileName();
}
}
}
}
return null;
}
public VirtualDisk getDiskDeviceByDeviceBusName(String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
String deviceNumbering = getDeviceBusName(devices, device);
if (deviceNumbering.equals(deviceBusName))
return (VirtualDisk)device;
}
}
}
return null;
}
public VirtualMachineDiskInfoBuilder getDiskInfoBuilder() throws Exception {
VirtualMachineDiskInfoBuilder builder = new VirtualMachineDiskInfoBuilder();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
String diskBackingFileName = diskBackingInfo.getFileName();
while (diskBackingInfo != null) {
String deviceBusName = getDeviceBusName(devices, device);
builder.addDisk(deviceBusName, diskBackingInfo.getFileName());
diskBackingInfo = diskBackingInfo.getParent();
}
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingFileName);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
}
}
}
}
return builder;
}
public List<Pair<Integer, ManagedObjectReference>> getAllDiskDatastores() throws Exception {
List<Pair<Integer, ManagedObjectReference>> disks = new ArrayList<Pair<Integer, ManagedObjectReference>>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
disks.add(new Pair<Integer, ManagedObjectReference>(new Integer(device.getKey()), diskBackingInfo.getDatastore()));
}
}
}
}
return disks;
}
@Deprecated
public List<Pair<String, ManagedObjectReference>> getDiskDatastorePathChain(VirtualDisk disk, boolean followChain) throws Exception {
VirtualDeviceBackingInfo backingInfo = disk.getBacking();
if (!(backingInfo instanceof VirtualDiskFlatVer2BackingInfo)) {
throw new Exception("Unsupported VirtualDeviceBackingInfo");
}
List<Pair<String, ManagedObjectReference>> pathList = new ArrayList<Pair<String, ManagedObjectReference>>();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
if (!followChain) {
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
return pathList;
}
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
VirtualMachineFileInfo vmFilesInfo = getFileInfo();
DatastoreFile snapshotDirFile = new DatastoreFile(vmFilesInfo.getSnapshotDirectory());
DatastoreFile vmxDirFile = new DatastoreFile(vmFilesInfo.getVmPathName());
do {
if (diskBackingInfo.getParent() != null) {
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
diskBackingInfo = diskBackingInfo.getParent();
} else {
// try getting parent info from VMDK file itself
byte[] content = null;
try {
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), diskBackingInfo.getFileName());
content = getContext().getResourceContent(url);
if (content == null || content.length == 0) {
break;
}
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
} catch (Exception e) {
// if snapshot directory has been changed to place other than default. VMware has a bug
// that its corresponding disk backing info is not updated correctly. therefore, we will try search
// in snapshot directory one more time
DatastoreFile currentFile = new DatastoreFile(diskBackingInfo.getFileName());
String vmdkFullDsPath = snapshotDirFile.getCompanionPath(currentFile.getFileName());
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), vmdkFullDsPath);
content = getContext().getResourceContent(url);
if (content == null || content.length == 0) {
break;
}
pathList.add(new Pair<String, ManagedObjectReference>(vmdkFullDsPath, diskBackingInfo.getDatastore()));
}
VmdkFileDescriptor descriptor = new VmdkFileDescriptor();
descriptor.parse(content);
if (descriptor.getParentFileName() != null && !descriptor.getParentFileName().isEmpty()) {
// create a fake one
VirtualDiskFlatVer2BackingInfo parentDiskBackingInfo = new VirtualDiskFlatVer2BackingInfo();
parentDiskBackingInfo.setDatastore(diskBackingInfo.getDatastore());
String parentFileName = descriptor.getParentFileName();
if (parentFileName.startsWith("/")) {
int fileNameStartPos = parentFileName.lastIndexOf("/");
parentFileName = parentFileName.substring(fileNameStartPos + 1);
parentDiskBackingInfo.setFileName(vmxDirFile.getCompanionPath(parentFileName));
} else {
parentDiskBackingInfo.setFileName(snapshotDirFile.getCompanionPath(parentFileName));
}
diskBackingInfo = parentDiskBackingInfo;
} else {
break;
}
}
} while (diskBackingInfo != null);
return pathList;
}
public String getDeviceBusName(List<VirtualDevice> allDevices, VirtualDevice theDevice) throws Exception {
for (VirtualDevice device : allDevices) {
if (device.getKey() == theDevice.getControllerKey().intValue()) {
if (device instanceof VirtualIDEController) {
return String.format("ide%d:%d", ((VirtualIDEController)device).getBusNumber(), theDevice.getUnitNumber());
} else if (device instanceof VirtualSCSIController) {
return String.format("scsi%d:%d", ((VirtualSCSIController)device).getBusNumber(), theDevice.getUnitNumber());
} else {
throw new Exception("Device controller is not supported yet");
}
}
}
throw new Exception("Unable to find device controller");
}
public List<VirtualDisk> getVirtualDisks() throws Exception {
List<VirtualDisk> virtualDisks = new ArrayList<VirtualDisk>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = device.getBacking();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
virtualDisks.add((VirtualDisk)device);
}
}
return virtualDisks;
}
public List<String> detachAllDisksExcept(String vmdkBaseName, String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
List<String> detachedDiskFiles = new ArrayList<String>();
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)device.getBacking();
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingBaseName = dsBackingFile.getFileBaseName();
String deviceNumbering = getDeviceBusName(devices, device);
if (backingBaseName.equalsIgnoreCase(vmdkBaseName) || (deviceBusName != null && deviceBusName.equals(deviceNumbering))) {
continue;
} else {
s_logger.info("Detach " + diskBackingInfo.getFileName() + " from " + getName());
detachedDiskFiles.add(diskBackingInfo.getFileName());
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
}
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
}
}
if (detachedDiskFiles.size() > 0) {
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
} else {
s_logger.warn("Unable to reconfigure the VM to detach disks");
throw new Exception("Unable to reconfigure the VM to detach disks");
}
}
return detachedDiskFiles;
}
public List<VirtualDevice> getAllDeviceList() throws Exception {
return _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
}
public VirtualDisk[] getAllDiskDevice() throws Exception {
List<VirtualDisk> deviceList = new ArrayList<VirtualDisk>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
if (((VirtualDisk) device).getVDiskId() == null) {
try {
// Register as first class disk
VirtualDeviceBackingInfo backingInfo = device.getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo) backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
s_logger.debug("vDiskid does not exist for volume " + diskBackingInfo.getFileName() + " registering the disk now");
VirtualStorageObjectManagerMO vStorageObjectManagerMO = new VirtualStorageObjectManagerMO(getOwnerDatacenter().first().getContext());
VStorageObject vStorageObject = vStorageObjectManagerMO.registerVirtualDisk(dsBackingFile, null, getOwnerDatacenter().first().getName());
VStorageObjectConfigInfo diskConfigInfo = vStorageObject.getConfig();
((VirtualDisk) device).setVDiskId(diskConfigInfo.getId());
}
} catch (Exception e) {
s_logger.warn("Exception while trying to register a disk as first class disk to get the unique identifier, main operation still continues: " + e.getMessage());
}
}
deviceList.add((VirtualDisk)device);
}
}
}
return deviceList.toArray(new VirtualDisk[0]);
}
public VirtualDisk getDiskDeviceByBusName(List<VirtualDevice> allDevices, String busName) throws Exception {
for (VirtualDevice device : allDevices) {
if (device instanceof VirtualDisk) {
VirtualDisk disk = (VirtualDisk)device;
String diskBusName = getDeviceBusName(allDevices, disk);
if (busName.equalsIgnoreCase(diskBusName))
return disk;
}
}
return null;
}
public VirtualDisk[] getAllIndependentDiskDevice() throws Exception {
List<VirtualDisk> independentDisks = new ArrayList<VirtualDisk>();
VirtualDisk[] allDisks = getAllDiskDevice();
if (allDisks.length > 0) {
for (VirtualDisk disk : allDisks) {
String diskMode = "";
if (disk.getBacking() instanceof VirtualDiskFlatVer1BackingInfo) {
diskMode = ((VirtualDiskFlatVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskFlatVer2BackingInfo) {
diskMode = ((VirtualDiskFlatVer2BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskRawDiskMappingVer1BackingInfo) {
diskMode = ((VirtualDiskRawDiskMappingVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskSparseVer1BackingInfo) {
diskMode = ((VirtualDiskSparseVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskSparseVer2BackingInfo) {
diskMode = ((VirtualDiskSparseVer2BackingInfo)disk.getBacking()).getDiskMode();
}
if (diskMode.indexOf("independent") != -1) {
independentDisks.add(disk);
}
}
}
return independentDisks.toArray(new VirtualDisk[0]);
}
public int tryGetIDEDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
return ((VirtualIDEController)device).getKey();
}
}
}
return -1;
}
public int getIDEDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
return ((VirtualIDEController)device).getKey();
}
}
}
assert (false);
throw new Exception("IDE Controller Not Found");
}
public int getIDEControllerKey(int ideUnitNumber) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int requiredIdeController = ideUnitNumber / VmwareHelper.MAX_IDE_CONTROLLER_COUNT;
int ideControllerCount = 0;
if(devices != null && devices.size() > 0) {
for(VirtualDevice device : devices) {
if(device instanceof VirtualIDEController) {
if (ideControllerCount == requiredIdeController) {
return ((VirtualIDEController)device).getKey();
}
ideControllerCount++;
}
}
}
assert(false);
throw new Exception("IDE Controller Not Found");
}
public int getNumberOfIDEDevices() throws Exception {
int ideDeviceCount = 0;
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
ideDeviceCount += ((VirtualIDEController)device).getDevice().size();
}
}
}
return ideDeviceCount;
}
public int getFreeUnitNumberOnIDEController(int controllerKey) throws Exception {
int freeUnitNumber = 0;
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int deviceCount = 0;
int ideDeviceUnitNumber = -1;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk && (controllerKey == device.getControllerKey())) {
deviceCount++;
ideDeviceUnitNumber = device.getUnitNumber();
}
}
}
if (deviceCount == 1) {
if (ideDeviceUnitNumber == 0) {
freeUnitNumber = 1;
} // else freeUnitNumber is already initialized to 0
} else if (deviceCount == 2) {
throw new Exception("IDE controller with key [" + controllerKey + "] already has 2 device attached. Cannot attach more than the limit of 2.");
}
return freeUnitNumber;
}
public int getNextIDEDeviceNumber() throws Exception {
int controllerKey = getIDEDeviceControllerKey();
return getNextDeviceNumber(controllerKey);
}
public VirtualDevice getIsoDevice() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualCdrom) {
return device;
}
}
}
return null;
}
public VirtualDevice getIsoDevice(int key) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualCdrom && device.getKey() == key) {
return device;
}
}
}
return null;
}
public VirtualDevice getIsoDevice(String filename) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if(devices != null && devices.size() > 0) {
long isoDevices = devices.stream()
.filter(x -> x instanceof VirtualCdrom && x.getBacking() instanceof VirtualCdromIsoBackingInfo)
.count();
for(VirtualDevice device : devices) {
if(device instanceof VirtualCdrom && device.getBacking() instanceof VirtualCdromIsoBackingInfo) {
if (((VirtualCdromIsoBackingInfo)device.getBacking()).getFileName().equals(filename)) {
return device;
} else if (isoDevices == 1L){
s_logger.warn(String.format("VM ISO filename %s differs from the expected filename %s",
((VirtualCdromIsoBackingInfo)device.getBacking()).getFileName(), filename));
return device;
}
}
}
}
return null;
}
public int getNextDeviceNumber(int controllerKey) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
List<Integer> existingUnitNumbers = new ArrayList<Integer>();
int deviceNumber = 0;
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device.getControllerKey() != null && device.getControllerKey().intValue() == controllerKey) {
existingUnitNumbers.add(device.getUnitNumber());
}
}
}
while (true) {
// Next device number should be the lowest device number on the key that is not in use and is not reserved.
if (!existingUnitNumbers.contains(Integer.valueOf(deviceNumber))) {
if (controllerKey != scsiControllerKey || !VmwareHelper.isReservedScsiDeviceNumber(deviceNumber))
break;
}
++deviceNumber;
}
return deviceNumber;
}
private List<VirtualDevice> getNicDevices(boolean sorted) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
List<VirtualDevice> nics = new ArrayList<VirtualDevice>();
if (devices != null) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualEthernetCard) {
nics.add(device);
}
}
}
if (sorted) {
Collections.sort(nics, new Comparator<VirtualDevice>() {
@Override
public int compare(VirtualDevice arg0, VirtualDevice arg1) {
int unitNumber0 = arg0.getUnitNumber() != null ? arg0.getUnitNumber().intValue() : -1;
int unitNumber1 = arg1.getUnitNumber() != null ? arg1.getUnitNumber().intValue() : -1;
if (unitNumber0 < unitNumber1)
return -1;
else if (unitNumber0 > unitNumber1)
return 1;
return 0;
}
});
}
return nics;
}
public VirtualDevice[] getSortedNicDevices() throws Exception {
return getNicDevices(true).toArray(new VirtualDevice[0]);
}
public VirtualDevice[] getNicDevices() throws Exception {
return getNicDevices(false).toArray(new VirtualDevice[0]);
}
public VirtualDevice getNicDeviceByIndex(int index) throws Exception {
List<VirtualDevice> nics = getNicDevices(true);
try {
return nics.get(index);
} catch (IndexOutOfBoundsException e) {
// Not found
return null;
}
}
public Pair<Integer, VirtualDevice> getNicDeviceIndex(String networkNamePrefix) throws Exception {
List<VirtualDevice> nics = getNicDevices(true);
int index = 0;
String attachedNetworkSummary;
String dvPortGroupName;
for (VirtualDevice nic : nics) {
attachedNetworkSummary = ((VirtualEthernetCard)nic).getDeviceInfo().getSummary();
if (attachedNetworkSummary.startsWith(networkNamePrefix)) {
return new Pair<Integer, VirtualDevice>(new Integer(index), nic);
} else if (attachedNetworkSummary.endsWith("DistributedVirtualPortBackingInfo.summary") || attachedNetworkSummary.startsWith("DVSwitch")) {
dvPortGroupName = getDvPortGroupName((VirtualEthernetCard)nic);
if (dvPortGroupName != null && dvPortGroupName.startsWith(networkNamePrefix)) {
s_logger.debug("Found a dvPortGroup already associated with public NIC.");
return new Pair<Integer, VirtualDevice>(new Integer(index), nic);
}
}
index++;
}
return new Pair<Integer, VirtualDevice>(new Integer(-1), null);
}
public String getDvPortGroupName(VirtualEthernetCard nic) throws Exception {
VirtualEthernetCardDistributedVirtualPortBackingInfo dvpBackingInfo = (VirtualEthernetCardDistributedVirtualPortBackingInfo)nic.getBacking();
DistributedVirtualSwitchPortConnection dvsPort = dvpBackingInfo.getPort();
String dvPortGroupKey = dvsPort.getPortgroupKey();
ManagedObjectReference dvPortGroupMor = new ManagedObjectReference();
dvPortGroupMor.setValue(dvPortGroupKey);
dvPortGroupMor.setType("DistributedVirtualPortgroup");
return (String)_context.getVimClient().getDynamicProperty(dvPortGroupMor, "name");
}
public VirtualDevice[] getMatchedDevices(Class<?>[] deviceClasses) throws Exception {
assert (deviceClasses != null);
List<VirtualDevice> returnList = new ArrayList<VirtualDevice>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null) {
for (VirtualDevice device : devices) {
for (Class<?> clz : deviceClasses) {
if (clz.isInstance(device)) {
returnList.add(device);
break;
}
}
}
}
return returnList.toArray(new VirtualDevice[0]);
}
public void mountToolsInstaller() throws Exception {
_context.getService().mountToolsInstaller(_mor);
}
public boolean unmountToolsInstaller() throws Exception {
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
final boolean[] encounterQuestion = new boolean[1];
encounterQuestion[0] = false;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
encounterQuestion[0] = true;
if (s_logger.isTraceEnabled()) {
s_logger.trace("Question id: " + question.getId());
s_logger.trace("Question text: " + question.getText());
}
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("msg id: " + msg.getId());
s_logger.trace("msg text: " + msg.getText());
}
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId() +
", for safe operation we will automatically decline it");
vmMo.answerVM(question.getId(), ANSWER_NO);
break;
}
}
} else if (question.getText() != null) {
String text = question.getText();
String msgId;
String msgText;
if (s_logger.isDebugEnabled()) {
s_logger.debug("question text : " + text);
}
String[] tokens = text.split(":");
msgId = tokens[0];
msgText = tokens[1];
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msgId)) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + question.getId() +
". Message id : " + msgId + ". Message text : " + msgText + ", for safe operation we will automatically decline it.");
vmMo.answerVM(question.getId(), ANSWER_NO);
}
}
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while handling vm question about umount tools install.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
_context.getService().unmountToolsInstaller(_mor);
} finally {
flags[0] = true;
future.cancel(true);
}
if (encounterQuestion[0]) {
s_logger.warn("cdrom is locked by VM. Failed to detach the ISO.");
return false;
} else {
s_logger.info("Successfully unmounted tools installer from VM.");
return true;
}
}
public void redoRegistration(ManagedObjectReference morHost) throws Exception {
String vmName = getVmName();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
boolean isTemplate = isTemplate();
HostMO hostMo;
if (morHost != null)
hostMo = new HostMO(getContext(), morHost);
else
hostMo = getRunningHost();
ManagedObjectReference morFolder = getParentMor();
ManagedObjectReference morPool = hostMo.getHyperHostOwnerResourcePool();
_context.getService().unregisterVM(_mor);
ManagedObjectReference morTask = _context.getService().registerVMTask(morFolder, vmFileInfo.getVmPathName(), vmName, false, morPool, hostMo.getMor());
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
throw new Exception("Unable to register template due to " + TaskMO.getTaskFailureInfo(_context, morTask));
} else {
_context.waitForTaskProgressDone(morTask);
if (isTemplate) {
VirtualMachineMO vmNewRegistration = hostMo.findVmOnHyperHost(vmName);
assert (vmNewRegistration != null);
vmNewRegistration.markAsTemplate();
}
}
}
public long getHotAddMemoryIncrementSizeInMb() throws Exception {
return (Long)_context.getVimClient().getDynamicProperty(_mor, "config.hotPlugMemoryIncrementSize");
}
public long getHotAddMemoryLimitInMb() throws Exception {
return (Long)_context.getVimClient().getDynamicProperty(_mor, "config.hotPlugMemoryLimit");
}
public String getGuestId() throws Exception {
return (String)_context.getVimClient().getDynamicProperty(_mor, "config.guestId");
}
public int getCoresPerSocket() throws Exception {
// number of cores per socket is 1 in case of ESXi. It's not defined explicitly and the property is support since vSphere API 5.0.
String apiVersion = HypervisorHostHelper.getVcenterApiVersion(_context);
if (apiVersion.compareTo("5.0") < 0) {
return 1;
}
Integer coresPerSocket = (Integer)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.numCoresPerSocket");
return coresPerSocket != null ? coresPerSocket : 1;
}
public int getVirtualHardwareVersion() throws Exception {
VirtualHardwareOption vhOption = getVirtualHardwareOption();
return vhOption.getHwVersion();
}
public VirtualHardwareOption getVirtualHardwareOption() throws Exception {
VirtualMachineConfigOption vmConfigOption = _context.getService().queryConfigOption(getEnvironmentBrowser(), null, null);
return vmConfigOption.getHardwareOptions();
}
private ManagedObjectReference getEnvironmentBrowser() throws Exception {
if (_vmEnvironmentBrowser == null) {
_vmEnvironmentBrowser = _context.getVimClient().getMoRefProp(_mor, "environmentBrowser");
}
return _vmEnvironmentBrowser;
}
public boolean isCpuHotAddSupported(String guestOsId) throws Exception {
boolean guestOsSupportsCpuHotAdd = false;
boolean virtualHardwareSupportsCpuHotAdd = false;
GuestOsDescriptor guestOsDescriptor;
int virtualHardwareVersion;
int numCoresPerSocket;
guestOsDescriptor = getGuestOsDescriptor(guestOsId);
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if guest operating system supports cpu hotadd
if (guestOsDescriptor.isSupportsCpuHotAdd()) {
guestOsSupportsCpuHotAdd = true;
}
// Check if virtual machine is using hardware version 8 or later.
// If hardware version is 7, then only 1 core per socket is supported. Hot adding multi-core vcpus is not allowed if hardware version is 7.
if (virtualHardwareVersion >= 8) {
virtualHardwareSupportsCpuHotAdd = true;
} else if (virtualHardwareVersion == 7) {
// Check if virtual machine has only 1 core per socket.
numCoresPerSocket = getCoresPerSocket();
if (numCoresPerSocket == 1) {
virtualHardwareSupportsCpuHotAdd = true;
}
}
return guestOsSupportsCpuHotAdd && virtualHardwareSupportsCpuHotAdd;
}
public boolean isMemoryHotAddSupported(String guestOsId) throws Exception {
boolean guestOsSupportsMemoryHotAdd = false;
boolean virtualHardwareSupportsMemoryHotAdd = false;
GuestOsDescriptor guestOsDescriptor;
int virtualHardwareVersion;
guestOsDescriptor = getGuestOsDescriptor(guestOsId);
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if guest operating system supports memory hotadd
if (guestOsDescriptor != null && guestOsDescriptor.isSupportsMemoryHotAdd()) {
guestOsSupportsMemoryHotAdd = true;
}
// Check if virtual machine is using hardware version 7 or later.
if (virtualHardwareVersion >= 7) {
virtualHardwareSupportsMemoryHotAdd = true;
}
return guestOsSupportsMemoryHotAdd && virtualHardwareSupportsMemoryHotAdd;
}
public void ensureLsiLogicSasDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getLsiLogicSasDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicSASController scsiController = new VirtualLsiLogicSASController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controller of type LsiLogic SAS.");
}
}
}
private int getLsiLogicSasDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualLsiLogicSASController) {
return device.getKey();
}
}
}
return -1;
}
public void ensureBusLogicDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getBusLogicDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualBusLogicController scsiController = new VirtualBusLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi BusLogic controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " SCSI BusLogic controllers.");
}
}
}
private int getBusLogicDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualBusLogicController) {
return device.getKey();
}
}
}
return -1;
}
public Ternary<Integer, Integer, DiskControllerType> getScsiControllerInfo() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int scsiControllerCount = 0;
int busNum = -1;
DiskControllerType controllerType = DiskControllerType.lsilogic;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController) {
scsiControllerCount++;
int deviceBus = ((VirtualSCSIController)device).getBusNumber();
if (busNum < deviceBus) {
busNum = deviceBus;
}
if (device instanceof VirtualLsiLogicController) {
controllerType = DiskControllerType.lsilogic;
} else if (device instanceof VirtualLsiLogicSASController) {
controllerType = DiskControllerType.lsisas1068;
} else if (device instanceof VirtualBusLogicController) {
controllerType = DiskControllerType.buslogic;
} else if (device instanceof ParaVirtualSCSIController) {
controllerType = DiskControllerType.pvscsi;
}
}
}
}
return new Ternary<Integer, Integer, DiskControllerType>(scsiControllerCount, busNum, controllerType);
}
public int getNumberOfVirtualDisks() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
s_logger.info("Counting disk devices attached to VM " + getVmName());
int count = 0;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
count++;
}
}
}
return count;
}
public boolean consolidateVmDisks() throws Exception {
ManagedObjectReference morTask = _context.getService().consolidateVMDisksTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware ConsolidateVMDisks_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
}
|
vmware-base/src/main/java/com/cloud/hypervisor/vmware/mo/VirtualMachineMO.java
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.hypervisor.vmware.mo;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.vmware.vim25.VStorageObject;
import com.vmware.vim25.VStorageObjectConfigInfo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import com.google.gson.Gson;
import com.vmware.vim25.ArrayOfManagedObjectReference;
import com.vmware.vim25.ChoiceOption;
import com.vmware.vim25.CustomFieldStringValue;
import com.vmware.vim25.DistributedVirtualSwitchPortConnection;
import com.vmware.vim25.DynamicProperty;
import com.vmware.vim25.ElementDescription;
import com.vmware.vim25.GuestInfo;
import com.vmware.vim25.GuestOsDescriptor;
import com.vmware.vim25.HttpNfcLeaseDeviceUrl;
import com.vmware.vim25.HttpNfcLeaseInfo;
import com.vmware.vim25.HttpNfcLeaseState;
import com.vmware.vim25.ManagedObjectReference;
import com.vmware.vim25.ObjectContent;
import com.vmware.vim25.ObjectSpec;
import com.vmware.vim25.OptionValue;
import com.vmware.vim25.OvfCreateDescriptorParams;
import com.vmware.vim25.OvfCreateDescriptorResult;
import com.vmware.vim25.OvfFile;
import com.vmware.vim25.ParaVirtualSCSIController;
import com.vmware.vim25.PropertyFilterSpec;
import com.vmware.vim25.PropertySpec;
import com.vmware.vim25.TraversalSpec;
import com.vmware.vim25.VirtualBusLogicController;
import com.vmware.vim25.VirtualCdrom;
import com.vmware.vim25.VirtualCdromIsoBackingInfo;
import com.vmware.vim25.VirtualCdromRemotePassthroughBackingInfo;
import com.vmware.vim25.VirtualController;
import com.vmware.vim25.VirtualDevice;
import com.vmware.vim25.VirtualDeviceBackingInfo;
import com.vmware.vim25.VirtualDeviceConfigSpec;
import com.vmware.vim25.VirtualDeviceConfigSpecFileOperation;
import com.vmware.vim25.VirtualDeviceConfigSpecOperation;
import com.vmware.vim25.VirtualDeviceConnectInfo;
import com.vmware.vim25.VirtualDisk;
import com.vmware.vim25.VirtualDiskFlatVer1BackingInfo;
import com.vmware.vim25.VirtualDiskFlatVer2BackingInfo;
import com.vmware.vim25.VirtualDiskMode;
import com.vmware.vim25.VirtualDiskRawDiskMappingVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer2BackingInfo;
import com.vmware.vim25.VirtualDiskType;
import com.vmware.vim25.VirtualEthernetCard;
import com.vmware.vim25.VirtualEthernetCardDistributedVirtualPortBackingInfo;
import com.vmware.vim25.VirtualHardwareOption;
import com.vmware.vim25.VirtualIDEController;
import com.vmware.vim25.VirtualLsiLogicController;
import com.vmware.vim25.VirtualLsiLogicSASController;
import com.vmware.vim25.VirtualMachineCloneSpec;
import com.vmware.vim25.VirtualMachineConfigInfo;
import com.vmware.vim25.VirtualMachineConfigOption;
import com.vmware.vim25.VirtualMachineConfigSpec;
import com.vmware.vim25.VirtualMachineConfigSummary;
import com.vmware.vim25.VirtualMachineFileInfo;
import com.vmware.vim25.VirtualMachineFileLayoutEx;
import com.vmware.vim25.VirtualMachineMessage;
import com.vmware.vim25.VirtualMachineMovePriority;
import com.vmware.vim25.VirtualMachinePowerState;
import com.vmware.vim25.VirtualMachineQuestionInfo;
import com.vmware.vim25.VirtualMachineRelocateDiskMoveOptions;
import com.vmware.vim25.VirtualMachineRelocateSpec;
import com.vmware.vim25.VirtualMachineRelocateSpecDiskLocator;
import com.vmware.vim25.VirtualMachineRuntimeInfo;
import com.vmware.vim25.VirtualMachineSnapshotInfo;
import com.vmware.vim25.VirtualMachineSnapshotTree;
import com.vmware.vim25.VirtualSCSIController;
import com.vmware.vim25.VirtualSCSISharing;
import com.cloud.hypervisor.vmware.mo.SnapshotDescriptor.SnapshotInfo;
import com.cloud.hypervisor.vmware.util.VmwareContext;
import com.cloud.hypervisor.vmware.util.VmwareHelper;
import com.cloud.utils.ActionDelegate;
import com.cloud.utils.Pair;
import com.cloud.utils.Ternary;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.script.Script;
import static com.cloud.utils.NumbersUtil.toHumanReadableSize;
public class VirtualMachineMO extends BaseMO {
private static final Logger s_logger = Logger.getLogger(VirtualMachineMO.class);
private static final ExecutorService MonitorServiceExecutor = Executors.newCachedThreadPool(new NamedThreadFactory("VM-Question-Monitor"));
public static final String ANSWER_YES = "0";
public static final String ANSWER_NO = "1";
private ManagedObjectReference _vmEnvironmentBrowser = null;
public VirtualMachineMO(VmwareContext context, ManagedObjectReference morVm) {
super(context, morVm);
}
public VirtualMachineMO(VmwareContext context, String morType, String morValue) {
super(context, morType, morValue);
}
public Pair<DatacenterMO, String> getOwnerDatacenter() throws Exception {
return DatacenterMO.getOwnerDatacenter(getContext(), getMor());
}
public Pair<DatastoreMO, String> getOwnerDatastore(String dsFullPath) throws Exception {
String dsName = DatastoreFile.getDatastoreNameFromPath(dsFullPath);
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datastore");
pSpec.getPathSet().add("name");
TraversalSpec vmDatastoreTraversal = new TraversalSpec();
vmDatastoreTraversal.setType("VirtualMachine");
vmDatastoreTraversal.setPath("datastore");
vmDatastoreTraversal.setName("vmDatastoreTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vmDatastoreTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null) {
for (ObjectContent oc : ocs) {
DynamicProperty prop = oc.getPropSet().get(0);
if (prop.getVal().toString().equals(dsName)) {
return new Pair<DatastoreMO, String>(new DatastoreMO(_context, oc.getObj()), dsName);
}
}
}
return null;
}
public HostMO getRunningHost() throws Exception {
VirtualMachineRuntimeInfo runtimeInfo = getRuntimeInfo();
return new HostMO(_context, runtimeInfo.getHost());
}
public String getVmName() throws Exception {
return (String)getContext().getVimClient().getDynamicProperty(_mor, "name");
}
public GuestInfo getVmGuestInfo() throws Exception {
return (GuestInfo)getContext().getVimClient().getDynamicProperty(_mor, "guest");
}
public void answerVM(String questionId, String choice) throws Exception {
getContext().getService().answerVM(_mor, questionId, choice);
}
public boolean isVMwareToolsRunning() throws Exception {
GuestInfo guestInfo = getVmGuestInfo();
if (guestInfo != null) {
if ("guestToolsRunning".equalsIgnoreCase(guestInfo.getToolsRunningStatus()))
return true;
}
return false;
}
public boolean powerOn() throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_ON)
return true;
ManagedObjectReference morTask = _context.getService().powerOnVMTask(_mor, null);
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
s_logger.info("Question id: " + question.getId());
s_logger.info("Question text: " + question.getText());
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isInfoEnabled()) {
s_logger.info("msg id: " + msg.getId());
s_logger.info("msg text: " + msg.getText());
}
if ("msg.uuid.altered".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId()
+ ", we will automatically answer as 'moved it' to address out of band HA for the VM");
vmMo.answerVM(question.getId(), "1");
break;
}
}
}
if (s_logger.isTraceEnabled())
s_logger.trace("These are the choices we can have just in case");
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while dealing with vm questions.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware powerOnVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
} finally {
// make sure to let VM question monitor exit
flags[0] = true;
}
return false;
}
public boolean powerOff() throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF)
return true;
return powerOffNoCheck();
}
public boolean safePowerOff(int shutdownWaitMs) throws Exception {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF)
return true;
if (isVMwareToolsRunning()) {
try {
String vmName = getName();
s_logger.info("Try gracefully shut down VM " + vmName);
shutdown();
long startTick = System.currentTimeMillis();
while (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF && System.currentTimeMillis() - startTick < shutdownWaitMs) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while powering of vm.");
}
}
if (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF) {
s_logger.info("can not gracefully shutdown VM within " + (shutdownWaitMs / 1000) + " seconds, we will perform force power off on VM " + vmName);
return powerOffNoCheck();
}
return true;
} catch (Exception e) {
s_logger.warn("Failed to do guest-os graceful shutdown due to " + VmwareHelper.getExceptionMessage(e));
}
}
return powerOffNoCheck();
}
private boolean powerOffNoCheck() throws Exception {
ManagedObjectReference morTask = _context.getService().powerOffVMTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
// It seems that even if a power-off task is returned done, VM state may still not be marked,
// wait up to 5 seconds to make sure to avoid race conditioning for immediate following on operations
// that relies on a powered-off VM
long startTick = System.currentTimeMillis();
while (getResetSafePowerState() != VirtualMachinePowerState.POWERED_OFF && System.currentTimeMillis() - startTick < 5000) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while powering of vm unconditionaly.");
}
}
return true;
} else {
if (getResetSafePowerState() == VirtualMachinePowerState.POWERED_OFF) {
// to help deal with possible race-condition
s_logger.info("Current power-off task failed. However, VM has been switched to the state we are expecting for");
return true;
}
s_logger.error("VMware powerOffVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachinePowerState getResetSafePowerState() throws Exception {
VirtualMachinePowerState powerState = VirtualMachinePowerState.POWERED_OFF;
// This is really ugly, there is a case that when windows guest VM is doing sysprep, the temporary
// rebooting process may let us pick up a "poweredOff" state during VMsync process, this can trigger
// a series actions. Unfortunately, from VMware API we can not distinguish power state into such details.
// We hope by giving it 3 second to re-read the state can cover this as a short-term solution.
//
// In the future, VMsync should not kick off CloudStack action (this is not a HA case) based on VM
// state report, until then we can remove this hacking fix
for (int i = 0; i < 3; i++) {
powerState = (VirtualMachinePowerState)getContext().getVimClient().getDynamicProperty(_mor, "runtime.powerState");
if (powerState == VirtualMachinePowerState.POWERED_OFF) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while pausing after power off.");
}
} else {
break;
}
}
return powerState;
}
public VirtualMachinePowerState getPowerState() throws Exception {
return (VirtualMachinePowerState)getContext().getVimClient().getDynamicProperty(_mor, "runtime.powerState");
}
public boolean reset() throws Exception {
ManagedObjectReference morTask = _context.getService().resetVMTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware resetVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public void shutdown() throws Exception {
_context.getService().shutdownGuest(_mor);
}
public void rebootGuest() throws Exception {
_context.getService().rebootGuest(_mor);
}
public void markAsTemplate() throws Exception {
_context.getService().markAsTemplate(_mor);
}
public void markAsVirtualMachine(ManagedObjectReference resourcePool, ManagedObjectReference host) throws Exception {
_context.getService().markAsVirtualMachine(_mor, resourcePool, host);
}
public boolean isTemplate() throws Exception {
VirtualMachineConfigInfo configInfo = getConfigInfo();
return configInfo.isTemplate();
}
public boolean migrate(ManagedObjectReference morRp, ManagedObjectReference morTargetHost) throws Exception {
ManagedObjectReference morTask = _context.getService().migrateVMTask(_mor, morRp, morTargetHost, VirtualMachineMovePriority.DEFAULT_PRIORITY, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware migrateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeDatastore(VirtualMachineRelocateSpec relocateSpec) throws Exception {
ManagedObjectReference morTask = _context.getVimClient().getService().relocateVMTask(_mor, relocateSpec, VirtualMachineMovePriority.DEFAULT_PRIORITY);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware RelocateVM_Task to change datastore failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeHost(VirtualMachineRelocateSpec relocateSpec) throws Exception {
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, VirtualMachineMovePriority.DEFAULT_PRIORITY);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware RelocateVM_Task to change host failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean changeDatastore(ManagedObjectReference morDataStore) throws Exception {
VirtualMachineRelocateSpec relocateSpec = new VirtualMachineRelocateSpec();
relocateSpec.setDatastore(morDataStore);
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware change datastore relocateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean relocate(ManagedObjectReference morTargetHost) throws Exception {
VirtualMachineRelocateSpec relocateSpec = new VirtualMachineRelocateSpec();
relocateSpec.setHost(morTargetHost);
ManagedObjectReference morTask = _context.getService().relocateVMTask(_mor, relocateSpec, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware relocateVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachineSnapshotInfo getSnapshotInfo() throws Exception {
return (VirtualMachineSnapshotInfo)_context.getVimClient().getDynamicProperty(_mor, "snapshot");
}
public boolean createSnapshot(String snapshotName, String snapshotDescription, boolean dumpMemory, boolean quiesce) throws Exception {
return createSnapshotGetReference(snapshotName, snapshotDescription, dumpMemory, quiesce) != null;
}
public ManagedObjectReference createSnapshotGetReference(String snapshotName, String snapshotDescription, boolean dumpMemory, boolean quiesce) throws Exception {
long apiTimeout = _context.getVimClient().getVcenterSessionTimeout();
ManagedObjectReference morTask = _context.getService().createSnapshotTask(_mor, snapshotName, snapshotDescription, dumpMemory, quiesce);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
ManagedObjectReference morSnapshot = null;
// We still need to wait until the object appear in vCenter
long startTick = System.currentTimeMillis();
while (System.currentTimeMillis() - startTick < apiTimeout) {
morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot != null) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while waiting for snapshot to be done.");
}
}
if (morSnapshot == null) {
s_logger.error("We've been waiting for over " + apiTimeout + " milli seconds for snapshot MOR to be appearing in vCenter after CreateSnapshot task is done, " +
"but it is still not there?!");
return null;
}
s_logger.debug("Waited for " + (System.currentTimeMillis() - startTick) + " seconds for snapshot object [" + snapshotName + "] to appear in vCenter.");
return morSnapshot;
} else {
s_logger.error("VMware createSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return null;
}
public boolean removeSnapshot(String snapshotName, boolean removeChildren) throws Exception {
ManagedObjectReference morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot == null) {
s_logger.warn("Unable to find snapshot: " + snapshotName);
return false;
}
ManagedObjectReference morTask = _context.getService().removeSnapshotTask(morSnapshot, removeChildren, true);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware removeSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean revertToSnapshot(String snapshotName) throws Exception {
ManagedObjectReference morSnapshot = getSnapshotMor(snapshotName);
if (morSnapshot == null) {
s_logger.warn("Unable to find snapshot: " + snapshotName);
return false;
}
ManagedObjectReference morTask = _context.getService().revertToSnapshotTask(morSnapshot, _mor, null);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware revert to snapshot failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
/**
* Deletes all of the snapshots of a VM.
*/
public void consolidateAllSnapshots() throws Exception {
ManagedObjectReference task = _context.getService().removeAllSnapshotsTask(_mor, true);
boolean result = _context.getVimClient().waitForTask(task);
if (result) {
_context.waitForTaskProgressDone(task);
} else {
throw new Exception("Unable to register VM due to the following issue: " + TaskMO.getTaskFailureInfo(_context, task));
}
}
public boolean removeAllSnapshots() throws Exception {
VirtualMachineSnapshotInfo snapshotInfo = getSnapshotInfo();
if (snapshotInfo != null && snapshotInfo.getRootSnapshotList() != null) {
List<VirtualMachineSnapshotTree> tree = snapshotInfo.getRootSnapshotList();
for (VirtualMachineSnapshotTree treeNode : tree) {
ManagedObjectReference morTask = _context.getService().removeSnapshotTask(treeNode.getSnapshot(), true, true);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
} else {
s_logger.error("VMware removeSnapshot_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
return false;
}
}
}
return true;
}
public String
getSnapshotDiskFileDatastorePath(VirtualMachineFileInfo vmFileInfo, List<Pair<ManagedObjectReference, String>> datastoreMounts, String snapshotDiskFile)
throws Exception {
// if file path start with "/", need to search all datastore mounts on the host in order
// to form fully qualified datastore path
if (snapshotDiskFile.startsWith("/")) {
for (Pair<ManagedObjectReference, String> mount : datastoreMounts) {
if (snapshotDiskFile.startsWith(mount.second())) {
DatastoreMO dsMo = new DatastoreMO(_context, mount.first());
String dsFullPath = String.format("[%s] %s", dsMo.getName(), snapshotDiskFile.substring(mount.second().length() + 1));
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + dsFullPath);
return dsFullPath;
}
}
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + snapshotDiskFile);
return snapshotDiskFile;
} else {
// snapshot directory string from VirtualMachineFileInfo ends with /
String dsFullPath = vmFileInfo.getSnapshotDirectory() + snapshotDiskFile;
s_logger.info("Convert snapshot disk file name to datastore path. " + snapshotDiskFile + "->" + dsFullPath);
return dsFullPath;
}
}
public SnapshotDescriptor getSnapshotDescriptor() throws Exception {
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String dsPath = getSnapshotDescriptorDatastorePath();
assert (dsPath != null);
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), dsPath);
byte[] content = getContext().getResourceContent(url);
if (content == null || content.length < 1) {
s_logger.warn("Snapshot descriptor file (vsd) does not exist anymore?");
}
SnapshotDescriptor descriptor = new SnapshotDescriptor();
descriptor.parse(content);
return descriptor;
}
public String getSnapshotDescriptorDatastorePath() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.getPathSet().add("name");
pSpec.getPathSet().add("config.files");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.FALSE);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
assert (ocs != null);
String vmName = null;
VirtualMachineFileInfo fileInfo = null;
assert (ocs.size() == 1);
for (ObjectContent oc : ocs) {
List<DynamicProperty> props = oc.getPropSet();
if (props != null) {
assert (props.size() == 2);
for (DynamicProperty prop : props) {
if (prop.getName().equals("name")) {
vmName = prop.getVal().toString();
} else {
fileInfo = (VirtualMachineFileInfo)prop.getVal();
}
}
}
}
assert (vmName != null);
assert (fileInfo != null);
// .vmsd file exists at the same directory of .vmx file
DatastoreFile vmxFile = new DatastoreFile(fileInfo.getVmPathName());
return vmxFile.getCompanionPath(vmName + ".vmsd");
}
public ManagedObjectReference getSnapshotMor(String snapshotName) throws Exception {
VirtualMachineSnapshotInfo info = getSnapshotInfo();
if (info != null) {
List<VirtualMachineSnapshotTree> snapTree = info.getRootSnapshotList();
return VmwareHelper.findSnapshotInTree(snapTree, snapshotName);
}
return null;
}
public boolean hasSnapshot() throws Exception {
VirtualMachineSnapshotInfo info = getSnapshotInfo();
if (info != null) {
ManagedObjectReference currentSnapshot = info.getCurrentSnapshot();
if (currentSnapshot != null) {
return true;
}
List<VirtualMachineSnapshotTree> rootSnapshotList = info.getRootSnapshotList();
if (rootSnapshotList != null && rootSnapshotList.size() > 0) {
return true;
}
}
return false;
}
public boolean createFullClone(String cloneName, ManagedObjectReference morFolder, ManagedObjectReference morResourcePool, ManagedObjectReference morDs)
throws Exception {
VirtualMachineCloneSpec cloneSpec = new VirtualMachineCloneSpec();
VirtualMachineRelocateSpec relocSpec = new VirtualMachineRelocateSpec();
cloneSpec.setLocation(relocSpec);
cloneSpec.setPowerOn(false);
cloneSpec.setTemplate(false);
relocSpec.setDatastore(morDs);
relocSpec.setPool(morResourcePool);
ManagedObjectReference morTask = _context.getService().cloneVMTask(_mor, morFolder, cloneName, cloneSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware cloneVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean createLinkedClone(String cloneName, ManagedObjectReference morBaseSnapshot, ManagedObjectReference morFolder, ManagedObjectReference morResourcePool,
ManagedObjectReference morDs) throws Exception {
assert (morBaseSnapshot != null);
assert (morFolder != null);
assert (morResourcePool != null);
assert (morDs != null);
VirtualDisk[] independentDisks = getAllIndependentDiskDevice();
VirtualMachineRelocateSpec rSpec = new VirtualMachineRelocateSpec();
if (independentDisks.length > 0) {
List<VirtualMachineRelocateSpecDiskLocator> diskLocator = new ArrayList<VirtualMachineRelocateSpecDiskLocator>(independentDisks.length);
for (int i = 0; i < independentDisks.length; i++) {
VirtualMachineRelocateSpecDiskLocator loc = new VirtualMachineRelocateSpecDiskLocator();
loc.setDatastore(morDs);
loc.setDiskId(independentDisks[i].getKey());
loc.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.MOVE_ALL_DISK_BACKINGS_AND_DISALLOW_SHARING.value());
diskLocator.add(loc);
}
rSpec.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.CREATE_NEW_CHILD_DISK_BACKING.value());
rSpec.getDisk().addAll(diskLocator);
} else {
rSpec.setDiskMoveType(VirtualMachineRelocateDiskMoveOptions.CREATE_NEW_CHILD_DISK_BACKING.value());
}
rSpec.setPool(morResourcePool);
VirtualMachineCloneSpec cloneSpec = new VirtualMachineCloneSpec();
cloneSpec.setPowerOn(false);
cloneSpec.setTemplate(false);
cloneSpec.setLocation(rSpec);
cloneSpec.setSnapshot(morBaseSnapshot);
ManagedObjectReference morTask = _context.getService().cloneVMTask(_mor, morFolder, cloneName, cloneSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware cloneVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public VirtualMachineRuntimeInfo getRuntimeInfo() throws Exception {
return (VirtualMachineRuntimeInfo)_context.getVimClient().getDynamicProperty(_mor, "runtime");
}
public VirtualMachineConfigInfo getConfigInfo() throws Exception {
return (VirtualMachineConfigInfo)_context.getVimClient().getDynamicProperty(_mor, "config");
}
public boolean isToolsInstallerMounted() throws Exception {
return _context.getVimClient().getDynamicProperty(_mor, "runtime.toolsInstallerMounted");
}
public GuestInfo getGuestInfo() throws Exception {
return (GuestInfo)_context.getVimClient().getDynamicProperty(_mor, "guest");
}
public VirtualMachineConfigSummary getConfigSummary() throws Exception {
return (VirtualMachineConfigSummary)_context.getVimClient().getDynamicProperty(_mor, "summary.config");
}
public VirtualMachineFileInfo getFileInfo() throws Exception {
return (VirtualMachineFileInfo)_context.getVimClient().getDynamicProperty(_mor, "config.files");
}
public VirtualMachineFileLayoutEx getFileLayout() throws Exception {
VirtualMachineFileLayoutEx fileLayout = null;
PropertySpec pSpec = new PropertySpec();
pSpec.setType("VirtualMachine");
pSpec.getPathSet().add("layoutEx");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.FALSE);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null) {
for (ObjectContent oc : ocs) {
List<DynamicProperty> props = oc.getPropSet();
if (props != null) {
for (DynamicProperty prop : props) {
if (prop.getName().equals("layoutEx")) {
fileLayout = (VirtualMachineFileLayoutEx)prop.getVal();
break;
}
}
}
}
}
return fileLayout;
}
@Override
public ManagedObjectReference getParentMor() throws Exception {
return (ManagedObjectReference)_context.getVimClient().getDynamicProperty(_mor, "parent");
}
public String[] getNetworks() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Network");
pSpec.getPathSet().add("name");
TraversalSpec vm2NetworkTraversal = new TraversalSpec();
vm2NetworkTraversal.setType("VirtualMachine");
vm2NetworkTraversal.setPath("network");
vm2NetworkTraversal.setName("vm2NetworkTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vm2NetworkTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
List<String> networks = new ArrayList<String>();
if (ocs != null && ocs.size() > 0) {
for (ObjectContent oc : ocs) {
networks.add(oc.getPropSet().get(0).getVal().toString());
}
}
return networks.toArray(new String[0]);
}
public List<NetworkDetails> getNetworksWithDetails() throws Exception {
List<NetworkDetails> networks = new ArrayList<NetworkDetails>();
int gcTagKey = getCustomFieldKey("Network", CustomFieldConstants.CLOUD_GC);
if (gcTagKey == 0) {
gcTagKey = getCustomFieldKey("DistributedVirtualPortgroup", CustomFieldConstants.CLOUD_GC_DVP);
s_logger.debug("The custom key for dvPortGroup is : " + gcTagKey);
}
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Network");
pSpec.getPathSet().add("name");
pSpec.getPathSet().add("vm");
pSpec.getPathSet().add(String.format("value[%d]", gcTagKey));
TraversalSpec vm2NetworkTraversal = new TraversalSpec();
vm2NetworkTraversal.setType("VirtualMachine");
vm2NetworkTraversal.setPath("network");
vm2NetworkTraversal.setName("vm2NetworkTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vm2NetworkTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
if (ocs != null && ocs.size() > 0) {
for (ObjectContent oc : ocs) {
ArrayOfManagedObjectReference morVms = null;
String gcTagValue = null;
String name = null;
for (DynamicProperty prop : oc.getPropSet()) {
if (prop.getName().equals("name"))
name = prop.getVal().toString();
else if (prop.getName().equals("vm"))
morVms = (ArrayOfManagedObjectReference)prop.getVal();
else if (prop.getName().startsWith("value[")) {
CustomFieldStringValue val = (CustomFieldStringValue)prop.getVal();
if (val != null)
gcTagValue = val.getValue();
}
}
NetworkDetails details =
new NetworkDetails(name, oc.getObj(), (morVms != null ? morVms.getManagedObjectReference().toArray(
new ManagedObjectReference[morVms.getManagedObjectReference().size()]) : null), gcTagValue);
networks.add(details);
}
s_logger.debug("Retrieved " + networks.size() + " networks with key : " + gcTagKey);
}
return networks;
}
public List<DatastoreMO> getAllDatastores() throws Exception {
PropertySpec pSpec = new PropertySpec();
pSpec.setType("Datastore");
pSpec.getPathSet().add("name");
TraversalSpec vmDatastoreTraversal = new TraversalSpec();
vmDatastoreTraversal.setType("VirtualMachine");
vmDatastoreTraversal.setPath("datastore");
vmDatastoreTraversal.setName("vmDatastoreTraversal");
ObjectSpec oSpec = new ObjectSpec();
oSpec.setObj(_mor);
oSpec.setSkip(Boolean.TRUE);
oSpec.getSelectSet().add(vmDatastoreTraversal);
PropertyFilterSpec pfSpec = new PropertyFilterSpec();
pfSpec.getPropSet().add(pSpec);
pfSpec.getObjectSet().add(oSpec);
List<PropertyFilterSpec> pfSpecArr = new ArrayList<PropertyFilterSpec>();
pfSpecArr.add(pfSpec);
List<ObjectContent> ocs = _context.getService().retrieveProperties(_context.getPropertyCollector(), pfSpecArr);
List<DatastoreMO> datastores = new ArrayList<DatastoreMO>();
if (CollectionUtils.isNotEmpty(ocs)) {
for (ObjectContent oc : ocs) {
datastores.add(new DatastoreMO(_context, oc.getObj()));
}
}
return datastores;
}
/**
* Retrieve path info to access VM files via vSphere web interface
* @return [0] vm-name, [1] data-center-name, [2] datastore-name
* @throws Exception
*/
public String[] getHttpAccessPathInfo() throws Exception {
String[] pathInfo = new String[3];
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
VirtualMachineFileInfo fileInfo = getFileInfo();
String vmxFilePath = fileInfo.getVmPathName();
String vmxPathTokens[] = vmxFilePath.split("\\[|\\]|/");
assert (vmxPathTokens.length == 4);
pathInfo[1] = vmxPathTokens[1].trim(); // vSphere vm name
pathInfo[2] = dcInfo.second(); // vSphere datacenter name
pathInfo[3] = vmxPathTokens[0].trim(); // vSphere datastore name
return pathInfo;
}
public String getVmxHttpAccessUrl() throws Exception {
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
VirtualMachineFileInfo fileInfo = getFileInfo();
String vmxFilePath = fileInfo.getVmPathName();
String vmxPathTokens[] = vmxFilePath.split("\\[|\\]|/");
StringBuffer sb = new StringBuffer("https://" + _context.getServerAddress() + "/folder/");
sb.append(URLEncoder.encode(vmxPathTokens[2].trim(), "UTF-8"));
sb.append("/");
sb.append(URLEncoder.encode(vmxPathTokens[3].trim(), "UTF-8"));
sb.append("?dcPath=");
sb.append(URLEncoder.encode(dcInfo.second(), "UTF-8"));
sb.append("&dsName=");
sb.append(URLEncoder.encode(vmxPathTokens[1].trim(), "UTF-8"));
return sb.toString();
}
public boolean setVncConfigInfo(boolean enableVnc, String vncPassword, int vncPort, String keyboard) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
OptionValue[] vncOptions = VmwareHelper.composeVncOptions(null, enableVnc, vncPassword, vncPort, keyboard);
vmConfigSpec.getExtraConfig().addAll(Arrays.asList(vncOptions));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, vmConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean configureVm(VirtualMachineConfigSpec vmConfigSpec) throws Exception {
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, vmConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public boolean configureVm(Ternary<VirtualDevice, VirtualDeviceConfigSpecOperation, VirtualDeviceConfigSpecFileOperation>[] devices) throws Exception {
assert (devices != null);
VirtualMachineConfigSpec configSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[devices.length];
int i = 0;
for (Ternary<VirtualDevice, VirtualDeviceConfigSpecOperation, VirtualDeviceConfigSpecFileOperation> deviceTernary : devices) {
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(deviceTernary.first());
deviceConfigSpec.setOperation(deviceTernary.second());
deviceConfigSpec.setFileOperation(deviceTernary.third());
deviceConfigSpecArray[i++] = deviceConfigSpec;
}
configSpec.getDeviceChange().addAll(Arrays.asList(deviceConfigSpecArray));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, configSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware reconfigVM_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
public Pair<String, Integer> getVncPort(String hostNetworkName) throws Exception {
HostMO hostMo = getRunningHost();
VmwareHypervisorHostNetworkSummary summary = hostMo.getHyperHostNetworkSummary(hostNetworkName);
VirtualMachineConfigInfo configInfo = getConfigInfo();
List<OptionValue> values = configInfo.getExtraConfig();
if (values != null) {
for (OptionValue option : values) {
if (option.getKey().equals("RemoteDisplay.vnc.port")) {
String value = (String)option.getValue();
if (value != null) {
return new Pair<String, Integer>(summary.getHostIp(), Integer.parseInt(value));
}
}
}
}
return new Pair<String, Integer>(summary.getHostIp(), 0);
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public void createDisk(String vmdkDatastorePath, long sizeInMb, ManagedObjectReference morDs, int controllerKey) throws Exception {
createDisk(vmdkDatastorePath, VirtualDiskType.THIN, VirtualDiskMode.PERSISTENT, null, sizeInMb, morDs, controllerKey);
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public void createDisk(String vmdkDatastorePath, VirtualDiskType diskType, VirtualDiskMode diskMode, String rdmDeviceName, long sizeInMb,
ManagedObjectReference morDs, int controllerKey) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath + ", sizeInMb: " + sizeInMb +
", diskType: " + diskType + ", diskMode: " + diskMode + ", rdmDeviceName: " + rdmDeviceName + ", datastore: " + morDs.getValue() + ", controllerKey: " +
controllerKey);
assert (vmdkDatastorePath != null);
assert (morDs != null);
int ideControllerKey = getIDEDeviceControllerKey();
if (controllerKey < 0) {
controllerKey = ideControllerKey;
}
VirtualDisk newDisk = new VirtualDisk();
if (diskType == VirtualDiskType.THIN || diskType == VirtualDiskType.PREALLOCATED || diskType == VirtualDiskType.EAGER_ZEROED_THICK) {
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDiskMode(VirtualDiskMode.PERSISTENT.value());
if (diskType == VirtualDiskType.THIN) {
backingInfo.setThinProvisioned(true);
} else {
backingInfo.setThinProvisioned(false);
}
if (diskType == VirtualDiskType.EAGER_ZEROED_THICK) {
backingInfo.setEagerlyScrub(true);
} else {
backingInfo.setEagerlyScrub(false);
}
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
newDisk.setBacking(backingInfo);
} else if (diskType == VirtualDiskType.RDM || diskType == VirtualDiskType.RDMP) {
VirtualDiskRawDiskMappingVer1BackingInfo backingInfo = new VirtualDiskRawDiskMappingVer1BackingInfo();
if (diskType == VirtualDiskType.RDM) {
backingInfo.setCompatibilityMode("virtualMode");
} else {
backingInfo.setCompatibilityMode("physicalMode");
}
backingInfo.setDeviceName(rdmDeviceName);
if (diskType == VirtualDiskType.RDM) {
backingInfo.setDiskMode(VirtualDiskMode.PERSISTENT.value());
}
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
newDisk.setBacking(backingInfo);
}
int deviceNumber = getNextDeviceNumber(controllerKey);
newDisk.setControllerKey(controllerKey);
newDisk.setKey(-deviceNumber);
newDisk.setUnitNumber(deviceNumber);
newDisk.setCapacityInKB(sizeInMb * 1024);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(newDisk);
deviceConfigSpec.setFileOperation(VirtualDeviceConfigSpecFileOperation.CREATE);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk() done(failed)");
throw new Exception("Unable to create disk " + vmdkDatastorePath + " due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - createDisk() done(successfully)");
}
public void updateVmdkAdapter(String vmdkFileName, String diskController) throws Exception {
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
VmdkAdapterType vmdkAdapterType = VmdkAdapterType.getAdapterType(diskControllerType);
if (vmdkAdapterType == VmdkAdapterType.none) {
String message = "Failed to attach disk due to invalid vmdk adapter type for vmdk file [" +
vmdkFileName + "] with controller : " + diskControllerType;
s_logger.debug(message);
throw new Exception(message);
}
String newAdapterType = vmdkAdapterType.toString();
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(vmdkFileName);
VmdkFileDescriptor vmdkFileDescriptor = vmdkInfo.first();
boolean isVmfsSparseFile = vmdkFileDescriptor.isVmfsSparseFile();
if (!isVmfsSparseFile) {
String currentAdapterType = vmdkFileDescriptor.getAdapterType();
if (!currentAdapterType.equalsIgnoreCase(newAdapterType)) {
s_logger.info("Updating adapter type to " + newAdapterType + " for VMDK file " + vmdkFileName);
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
byte[] newVmdkContent = vmdkFileDescriptor.changeVmdkAdapterType(vmdkInfo.second(), newAdapterType);
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcInfo.first().getName(), vmdkFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
s_logger.info("Updated VMDK file " + vmdkFileName);
}
}
}
public void updateAdapterTypeIfRequired(String vmdkFileName) throws Exception {
// Validate existing adapter type of VMDK file. Update it with a supported adapter type if validation fails.
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(vmdkFileName);
VmdkFileDescriptor vmdkFileDescriptor = vmdkInfo.first();
boolean isVmfsSparseFile = vmdkFileDescriptor.isVmfsSparseFile();
if (!isVmfsSparseFile) {
String currentAdapterTypeStr = vmdkFileDescriptor.getAdapterType();
if (s_logger.isTraceEnabled()) {
s_logger.trace("Detected adapter type " + currentAdapterTypeStr + " for VMDK file " + vmdkFileName);
}
VmdkAdapterType currentAdapterType = VmdkAdapterType.getType(currentAdapterTypeStr);
if (currentAdapterType == VmdkAdapterType.none) {
// Value of currentAdapterType can be VmdkAdapterType.none only if adapter type of vmdk is set to either
// lsisas1068 (SAS controller) or pvscsi (Vmware Paravirtual) only. Valid adapter type for those controllers is lsilogic.
// Hence use adapter type lsilogic. Other adapter types ide, lsilogic, buslogic are valid and does not need to be modified.
VmdkAdapterType newAdapterType = VmdkAdapterType.lsilogic;
s_logger.debug("Updating adapter type to " + newAdapterType + " from " + currentAdapterTypeStr + " for VMDK file " + vmdkFileName);
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
byte[] newVmdkContent = vmdkFileDescriptor.changeVmdkAdapterType(vmdkInfo.second(), newAdapterType.toString());
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcInfo.first().getName(), vmdkFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
s_logger.debug("Updated VMDK file " + vmdkFileName);
}
}
}
public void attachDisk(String[] vmdkDatastorePathChain, ManagedObjectReference morDs) throws Exception {
attachDisk(vmdkDatastorePathChain, morDs, null);
}
public void attachDisk(String[] vmdkDatastorePathChain, ManagedObjectReference morDs, String diskController) throws Exception {
if(s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: "
+ new Gson().toJson(vmdkDatastorePathChain) + ", datastore: " + morDs.getValue());
int controllerKey = 0;
int unitNumber = 0;
if (DiskControllerType.getType(diskController) == DiskControllerType.ide) {
// IDE virtual disk cannot be added if VM is running
if (getPowerState() == VirtualMachinePowerState.POWERED_ON) {
throw new Exception("Adding a virtual disk over IDE controller is not supported while VM is running in VMware hypervisor. Please re-try when VM is not running.");
}
// Get next available unit number and controller key
int ideDeviceCount = getNumberOfIDEDevices();
if (ideDeviceCount >= VmwareHelper.MAX_IDE_CONTROLLER_COUNT * VmwareHelper.MAX_ALLOWED_DEVICES_IDE_CONTROLLER) {
throw new Exception("Maximum limit of devices supported on IDE controllers [" + VmwareHelper.MAX_IDE_CONTROLLER_COUNT
* VmwareHelper.MAX_ALLOWED_DEVICES_IDE_CONTROLLER + "] is reached.");
}
controllerKey = getIDEControllerKey(ideDeviceCount);
unitNumber = getFreeUnitNumberOnIDEController(controllerKey);
} else {
if (StringUtils.isNotBlank(diskController)) {
controllerKey = getScsiDiskControllerKey(diskController);
} else {
controllerKey = getScsiDeviceControllerKey();
}
unitNumber = -1;
}
synchronized (_mor.getValue().intern()) {
VirtualDevice newDisk = VmwareHelper.prepareDiskDevice(this, null, controllerKey, vmdkDatastorePathChain, morDs, unitNumber, 1);
if (StringUtils.isNotBlank(diskController)) {
String vmdkFileName = vmdkDatastorePathChain[0];
updateVmdkAdapter(vmdkFileName, diskController);
}
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(newDisk);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk() done(failed)");
throw new Exception("Failed to attach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
}
if(s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachDisk() done(successfully)");
}
private int getControllerBusNumber(int controllerKey) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualController && device.getKey() == controllerKey) {
return ((VirtualController)device).getBusNumber();
}
}
}
throw new Exception("SCSI Controller with key " + controllerKey + " is Not Found");
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public List<Pair<String, ManagedObjectReference>> detachDisk(String vmdkDatastorePath, boolean deleteBackingFile) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath + ", deleteBacking: " +
deleteBackingFile);
// Note: if VM has been taken snapshot, original backing file will be renamed, therefore, when we try to find the matching
// VirtualDisk, we only perform prefix matching
Pair<VirtualDisk, String> deviceInfo = getDiskDevice(vmdkDatastorePath);
if (deviceInfo == null) {
s_logger.warn("vCenter API trace - detachDisk() done (failed)");
throw new Exception("No such disk device: " + vmdkDatastorePath);
}
// IDE virtual disk cannot be detached if VM is running
if (deviceInfo.second() != null && deviceInfo.second().contains("ide")) {
if (getPowerState() == VirtualMachinePowerState.POWERED_ON) {
throw new Exception("Removing a virtual disk over IDE controller is not supported while VM is running in VMware hypervisor. " +
"Please re-try when VM is not running.");
}
}
List<Pair<String, ManagedObjectReference>> chain = getDiskDatastorePathChain(deviceInfo.first(), true);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(deviceInfo.first());
if (deleteBackingFile) {
deviceConfigSpec.setFileOperation(VirtualDeviceConfigSpecFileOperation.DESTROY);
}
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk() done (failed)");
throw new Exception("Failed to detach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
// VMware does not update snapshot references to the detached disk, we have to work around it
SnapshotDescriptor snapshotDescriptor = null;
try {
snapshotDescriptor = getSnapshotDescriptor();
} catch (Exception e) {
s_logger.info("Unable to retrieve snapshot descriptor, will skip updating snapshot reference");
}
if (snapshotDescriptor != null) {
for (Pair<String, ManagedObjectReference> pair : chain) {
DatastoreFile dsFile = new DatastoreFile(pair.first());
snapshotDescriptor.removeDiskReferenceFromSnapshot(dsFile.getFileName());
}
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String dsPath = getSnapshotDescriptorDatastorePath();
assert (dsPath != null);
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), dsPath);
getContext().uploadResourceContent(url, snapshotDescriptor.getVmsdContent());
}
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachDisk() done (successfully)");
return chain;
}
public void detachAllDisks() throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk(). target MOR: " + _mor.getValue());
VirtualDisk[] disks = getAllDiskDevice();
if (disks.length > 0) {
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[disks.length];
for (int i = 0; i < disks.length; i++) {
deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec();
deviceConfigSpecArray[i].setDevice(disks[i]);
deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
}
reConfigSpec.getDeviceChange().addAll(Arrays.asList(deviceConfigSpecArray));
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk() done(failed)");
throw new Exception("Failed to detach disk due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
}
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachAllDisk() done(successfully)");
}
// isoDatastorePath: [datastore name] isoFilePath
public void attachIso(String isoDatastorePath, ManagedObjectReference morDs, boolean connect, boolean connectAtBoot) throws Exception {
attachIso(isoDatastorePath, morDs, connect, connectAtBoot, null);
}
// isoDatastorePath: [datastore name] isoFilePath
public void attachIso(String isoDatastorePath, ManagedObjectReference morDs,
boolean connect, boolean connectAtBoot, Integer key) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso(). target MOR: " + _mor.getValue() + ", isoDatastorePath: " + isoDatastorePath + ", datastore: " +
morDs.getValue() + ", connect: " + connect + ", connectAtBoot: " + connectAtBoot);
assert (isoDatastorePath != null);
assert (morDs != null);
boolean newCdRom = false;
VirtualCdrom cdRom;
if (key == null) {
cdRom = (VirtualCdrom) getIsoDevice();
} else {
cdRom = (VirtualCdrom) getIsoDevice(key);
}
if (cdRom == null) {
newCdRom = true;
cdRom = new VirtualCdrom();
cdRom.setControllerKey(getIDEDeviceControllerKey());
int deviceNumber = getNextIDEDeviceNumber();
cdRom.setUnitNumber(deviceNumber);
cdRom.setKey(-deviceNumber);
}
VirtualDeviceConnectInfo cInfo = new VirtualDeviceConnectInfo();
cInfo.setConnected(connect);
cInfo.setStartConnected(connectAtBoot);
cdRom.setConnectable(cInfo);
VirtualCdromIsoBackingInfo backingInfo = new VirtualCdromIsoBackingInfo();
backingInfo.setFileName(isoDatastorePath);
backingInfo.setDatastore(morDs);
cdRom.setBacking(backingInfo);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
//VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[1];
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(cdRom);
if (newCdRom) {
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
} else {
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.EDIT);
}
//deviceConfigSpecArray[0] = deviceConfigSpec;
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso() done(failed)");
throw new Exception("Failed to attach ISO due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - attachIso() done(successfully)");
}
public int detachIso(String isoDatastorePath) throws Exception {
return detachIso(isoDatastorePath, false);
}
public int detachIso(String isoDatastorePath, final boolean force) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachIso(). target MOR: " + _mor.getValue() + ", isoDatastorePath: " + isoDatastorePath);
VirtualDevice device = getIsoDevice(isoDatastorePath);
if (device == null) {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - detachIso() done(failed)");
throw new Exception("Unable to find a CDROM device");
}
VirtualCdromRemotePassthroughBackingInfo backingInfo = new VirtualCdromRemotePassthroughBackingInfo();
backingInfo.setDeviceName("");
device.setBacking(backingInfo);
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
//VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[1];
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.EDIT);
//deviceConfigSpecArray[0] = deviceConfigSpec;
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Question id: " + question.getId());
s_logger.trace("Question text: " + question.getText());
}
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("msg id: " + msg.getId());
s_logger.trace("msg text: " + msg.getText());
}
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId() +
", for safe operation we will automatically decline it");
vmMo.answerVM(question.getId(), force ? ANSWER_YES : ANSWER_NO);
break;
}
}
} else if (question.getText() != null) {
String text = question.getText();
String msgId;
String msgText;
if (s_logger.isDebugEnabled()) {
s_logger.debug("question text : " + text);
}
String[] tokens = text.split(":");
msgId = tokens[0];
msgText = tokens[1];
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msgId)) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + question.getId() +
". Message id : " + msgId + ". Message text : " + msgText + ", for safe operation we will automatically decline it.");
vmMo.answerVM(question.getId(), force ? ANSWER_YES : ANSWER_NO);
}
}
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while handling vm question about iso detach.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
if (s_logger.isDebugEnabled())
s_logger.trace("vCenter API trace - detachIso() done(failed)");
throw new Exception("Failed to detachIso due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
_context.waitForTaskProgressDone(morTask);
s_logger.trace("vCenter API trace - detachIso() done(successfully)");
} finally {
flags[0] = true;
future.cancel(true);
}
return device.getKey();
}
public Pair<VmdkFileDescriptor, byte[]> getVmdkFileInfo(String vmdkDatastorePath) throws Exception {
if (s_logger.isTraceEnabled())
s_logger.trace("vCenter API trace - getVmdkFileInfo(). target MOR: " + _mor.getValue() + ", vmdkDatastorePath: " + vmdkDatastorePath);
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), vmdkDatastorePath);
byte[] content = getContext().getResourceContent(url);
VmdkFileDescriptor descriptor = new VmdkFileDescriptor();
descriptor.parse(content);
Pair<VmdkFileDescriptor, byte[]> result = new Pair<VmdkFileDescriptor, byte[]>(descriptor, content);
if (s_logger.isTraceEnabled()) {
s_logger.trace("vCenter API trace - getVmdkFileInfo() done");
s_logger.trace("VMDK file descriptor: " + new Gson().toJson(result.first()));
}
return result;
}
public void exportVm(String exportDir, String exportName, boolean packToOva, boolean leaveOvaFileOnly) throws Exception {
ManagedObjectReference morOvf = _context.getServiceContent().getOvfManager();
VirtualMachineRuntimeInfo runtimeInfo = getRuntimeInfo();
HostMO hostMo = new HostMO(_context, runtimeInfo.getHost());
String hostName = hostMo.getHostName();
String vmName = getVmName();
DatacenterMO dcMo = new DatacenterMO(_context, hostMo.getHyperHostDatacenter());
if (runtimeInfo.getPowerState() != VirtualMachinePowerState.POWERED_OFF) {
String msg = "Unable to export VM because it is not at powerdOff state. vmName: " + vmName + ", host: " + hostName;
s_logger.error(msg);
throw new Exception(msg);
}
ManagedObjectReference morLease = _context.getService().exportVm(getMor());
if (morLease == null) {
s_logger.error("exportVm() failed");
throw new Exception("exportVm() failed");
}
HttpNfcLeaseMO leaseMo = new HttpNfcLeaseMO(_context, morLease);
HttpNfcLeaseState state = leaseMo.waitState(new HttpNfcLeaseState[] {HttpNfcLeaseState.READY, HttpNfcLeaseState.ERROR});
try {
if (state == HttpNfcLeaseState.READY) {
final HttpNfcLeaseMO.ProgressReporter progressReporter = leaseMo.createProgressReporter();
boolean success = false;
List<String> fileNames = new ArrayList<String>();
try {
HttpNfcLeaseInfo leaseInfo = leaseMo.getLeaseInfo();
final long totalBytes = leaseInfo.getTotalDiskCapacityInKB() * 1024;
long totalBytesDownloaded = 0;
List<HttpNfcLeaseDeviceUrl> deviceUrls = leaseInfo.getDeviceUrl();
s_logger.info("volss: copy vmdk and ovf file starts " + System.currentTimeMillis());
if (deviceUrls != null) {
OvfFile[] ovfFiles = new OvfFile[deviceUrls.size()];
for (int i = 0; i < deviceUrls.size(); i++) {
String deviceId = deviceUrls.get(i).getKey();
String deviceUrlStr = deviceUrls.get(i).getUrl();
String orgDiskFileName = deviceUrlStr.substring(deviceUrlStr.lastIndexOf("/") + 1);
String diskFileName = String.format("%s-disk%d%s", exportName, i, VmwareHelper.getFileExtension(orgDiskFileName, ".vmdk"));
String diskUrlStr = deviceUrlStr.replace("*", hostName);
diskUrlStr = HypervisorHostHelper.resolveHostNameInUrl(dcMo, diskUrlStr);
String diskLocalPath = exportDir + File.separator + diskFileName;
fileNames.add(diskLocalPath);
if (s_logger.isInfoEnabled()) {
s_logger.info("Download VMDK file for export. url: " + deviceUrlStr);
}
long lengthOfDiskFile = _context.downloadVmdkFile(diskUrlStr, diskLocalPath, totalBytesDownloaded, new ActionDelegate<Long>() {
@Override
public void action(Long param) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Download progress " + param + "/" + toHumanReadableSize(totalBytes));
}
progressReporter.reportProgress((int)(param * 100 / totalBytes));
}
});
totalBytesDownloaded += lengthOfDiskFile;
OvfFile ovfFile = new OvfFile();
ovfFile.setPath(diskFileName);
ovfFile.setDeviceId(deviceId);
ovfFile.setSize(lengthOfDiskFile);
ovfFiles[i] = ovfFile;
}
// write OVF descriptor file
OvfCreateDescriptorParams ovfDescParams = new OvfCreateDescriptorParams();
ovfDescParams.getOvfFiles().addAll(Arrays.asList(ovfFiles));
OvfCreateDescriptorResult ovfCreateDescriptorResult = _context.getService().createDescriptor(morOvf, getMor(), ovfDescParams);
String ovfPath = exportDir + File.separator + exportName + ".ovf";
fileNames.add(ovfPath);
OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(ovfPath),"UTF-8");
out.write(ovfCreateDescriptorResult.getOvfDescriptor());
out.close();
// tar files into OVA
if (packToOva) {
// Important! we need to sync file system before we can safely use tar to work around a linux kernal bug(or feature)
s_logger.info("Sync file system before we package OVA...");
Script commandSync = new Script(true, "sync", 0, s_logger);
commandSync.execute();
Script command = new Script(false, "tar", 0, s_logger);
command.setWorkDir(exportDir);
command.add("-cf", exportName + ".ova");
command.add(exportName + ".ovf"); // OVF file should be the first file in OVA archive
for (String name : fileNames) {
command.add((new File(name).getName()));
}
s_logger.info("Package OVA with commmand: " + command.toString());
command.execute();
// to be safe, physically test existence of the target OVA file
if ((new File(exportDir + File.separator + exportName + ".ova")).exists()) {
success = true;
} else {
s_logger.error(exportDir + File.separator + exportName + ".ova is not created as expected");
}
} else {
success = true;
}
}
s_logger.info("volss: copy vmdk and ovf file finishes " + System.currentTimeMillis());
} catch (Throwable e) {
s_logger.error("Unexpected exception ", e);
} finally {
progressReporter.close();
if (leaveOvaFileOnly) {
for (String name : fileNames) {
new File(name).delete();
}
}
if (!success)
throw new Exception("Unable to finish the whole process to package as a OVA file");
}
}
} finally {
leaseMo.updateLeaseProgress(100);
leaseMo.completeLease();
}
}
// snapshot directory in format of: /vmfs/volumes/<datastore name>/<path>
@Deprecated
public void setSnapshotDirectory(String snapshotDir) throws Exception {
VirtualMachineFileInfo fileInfo = getFileInfo();
Pair<DatacenterMO, String> dcInfo = getOwnerDatacenter();
String vmxUrl = _context.composeDatastoreBrowseUrl(dcInfo.second(), fileInfo.getVmPathName());
byte[] vmxContent = _context.getResourceContent(vmxUrl);
BufferedReader in = null;
BufferedWriter out = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
boolean replaced = false;
try {
in = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(vmxContent),"UTF-8"));
out = new BufferedWriter(new OutputStreamWriter(bos,"UTF-8"));
String line;
while ((line = in.readLine()) != null) {
if (line.startsWith("workingDir")) {
replaced = true;
out.write(String.format("workingDir=\"%s\"", snapshotDir));
out.newLine();
} else {
out.write(line);
out.newLine();
}
}
if (!replaced) {
out.newLine();
out.write(String.format("workingDir=\"%s\"", snapshotDir));
out.newLine();
}
} finally {
if (in != null) {
in.close();
}
if (out != null) {
out.close();
}
}
_context.uploadResourceContent(vmxUrl, bos.toByteArray());
// It seems that I don't need to do re-registration. VMware has bug in writing the correct snapshot's VMDK path to
// its disk backing info anyway.
// redoRegistration();
}
// destName does not contain extension name
public void backupCurrentSnapshot(String deviceName, ManagedObjectReference morDestDs, String destDsDirectory, String destName, boolean includeBase) throws Exception {
SnapshotDescriptor descriptor = getSnapshotDescriptor();
SnapshotInfo[] snapshotInfo = descriptor.getCurrentDiskChain();
if (snapshotInfo.length == 0) {
String msg = "No snapshot found in this VM";
throw new Exception(msg);
}
HostMO hostMo = getRunningHost();
DatacenterMO dcMo = getOwnerDatacenter().first();
List<Pair<ManagedObjectReference, String>> mounts = hostMo.getDatastoreMountsOnHost();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
List<Ternary<String, String, String>> backupInfo = new ArrayList<Ternary<String, String, String>>();
for (int i = 0; i < snapshotInfo.length; i++) {
if (!includeBase && i == snapshotInfo.length - 1) {
break;
}
SnapshotDescriptor.DiskInfo[] disks = snapshotInfo[i].getDisks();
if (disks != null) {
String destBaseFileName;
String destFileName;
String destParentFileName;
for (SnapshotDescriptor.DiskInfo disk : disks) {
if (deviceName == null || deviceName.equals(disk.getDeviceName())) {
String srcVmdkFullDsPath = getSnapshotDiskFileDatastorePath(vmFileInfo, mounts, disk.getDiskFileName());
Pair<DatastoreMO, String> srcDsInfo = getOwnerDatastore(srcVmdkFullDsPath);
Pair<VmdkFileDescriptor, byte[]> vmdkInfo = getVmdkFileInfo(srcVmdkFullDsPath);
String srcVmdkBaseFilePath = DatastoreFile.getCompanionDatastorePath(srcVmdkFullDsPath, vmdkInfo.first().getBaseFileName());
destFileName = destName + (snapshotInfo.length - i - 1) + ".vmdk";
if (vmdkInfo.first().getParentFileName() != null) {
destBaseFileName = destName + (snapshotInfo.length - i - 1) + "-delta.vmdk";
destParentFileName = destName + (snapshotInfo.length - i - 2) + ".vmdk";
} else {
destBaseFileName = destName + (snapshotInfo.length - i - 1) + "-flat.vmdk";
destParentFileName = null;
}
s_logger.info("Copy VMDK base file " + srcVmdkBaseFilePath + " to " + destDsDirectory + "/" + destBaseFileName);
srcDsInfo.first().copyDatastoreFile(srcVmdkBaseFilePath, dcMo.getMor(), morDestDs, destDsDirectory + "/" + destBaseFileName, dcMo.getMor(), true);
byte[] newVmdkContent = VmdkFileDescriptor.changeVmdkContentBaseInfo(vmdkInfo.second(), destBaseFileName, destParentFileName);
String vmdkUploadUrl = getContext().composeDatastoreBrowseUrl(dcMo.getName(), destDsDirectory + "/" + destFileName);
s_logger.info("Upload VMDK content file to " + destDsDirectory + "/" + destFileName);
getContext().uploadResourceContent(vmdkUploadUrl, newVmdkContent);
backupInfo.add(new Ternary<String, String, String>(destFileName, destBaseFileName, destParentFileName));
}
}
}
}
byte[] vdiskInfo = VmwareHelper.composeDiskInfo(backupInfo, snapshotInfo.length, includeBase);
String vdiskUploadUrl = getContext().composeDatastoreBrowseUrl(dcMo.getName(), destDsDirectory + "/" + destName + ".vdisk");
getContext().uploadResourceContent(vdiskUploadUrl, vdiskInfo);
}
public String[] getCurrentSnapshotDiskChainDatastorePaths(String diskDevice) throws Exception {
HostMO hostMo = getRunningHost();
List<Pair<ManagedObjectReference, String>> mounts = hostMo.getDatastoreMountsOnHost();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
SnapshotDescriptor descriptor = getSnapshotDescriptor();
SnapshotInfo[] snapshotInfo = descriptor.getCurrentDiskChain();
List<String> diskDsFullPaths = new ArrayList<String>();
for (int i = 0; i < snapshotInfo.length; i++) {
SnapshotDescriptor.DiskInfo[] disks = snapshotInfo[i].getDisks();
if (disks != null) {
for (SnapshotDescriptor.DiskInfo disk : disks) {
String deviceNameInDisk = disk.getDeviceName();
if (diskDevice == null || diskDevice.equalsIgnoreCase(deviceNameInDisk)) {
String vmdkFullDsPath = getSnapshotDiskFileDatastorePath(vmFileInfo, mounts, disk.getDiskFileName());
diskDsFullPaths.add(vmdkFullDsPath);
}
}
}
}
return diskDsFullPaths.toArray(new String[0]);
}
// return the disk chain (VMDK datastore paths) for cloned snapshot
public Pair<VirtualMachineMO, String[]> cloneFromCurrentSnapshot(String clonedVmName, int cpuSpeedMHz, int memoryMb, String diskDevice, ManagedObjectReference morDs, String virtualHardwareVersion)
throws Exception {
assert (morDs != null);
String[] disks = getCurrentSnapshotDiskChainDatastorePaths(diskDevice);
VirtualMachineMO clonedVm = cloneFromDiskChain(clonedVmName, cpuSpeedMHz, memoryMb, disks, morDs, virtualHardwareVersion);
return new Pair<VirtualMachineMO, String[]>(clonedVm, disks);
}
public VirtualMachineMO cloneFromDiskChain(String clonedVmName, int cpuSpeedMHz, int memoryMb, String[] disks, ManagedObjectReference morDs, String cloneHardwareVersion) throws Exception {
assert (disks != null);
assert (disks.length >= 1);
HostMO hostMo = getRunningHost();
VirtualMachineMO clonedVmMo = HypervisorHostHelper.createWorkerVM(hostMo, new DatastoreMO(hostMo.getContext(), morDs), clonedVmName, cloneHardwareVersion);
if (clonedVmMo == null)
throw new Exception("Unable to find just-created blank VM");
boolean bSuccess = false;
try {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
VirtualDevice device = VmwareHelper.prepareDiskDevice(clonedVmMo, null, -1, disks, morDs, -1, 1);
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
clonedVmMo.configureVm(vmConfigSpec);
bSuccess = true;
return clonedVmMo;
} finally {
if (!bSuccess) {
clonedVmMo.detachAllDisks();
clonedVmMo.destroy();
}
}
}
public GuestOsDescriptor getGuestOsDescriptor(String guestOsId) throws Exception {
GuestOsDescriptor guestOsDescriptor = null;
String guestId = guestOsId;
if (guestId == null) {
guestId = getGuestId();
}
ManagedObjectReference vmEnvironmentBrowser = _context.getVimClient().getMoRefProp(_mor, "environmentBrowser");
VirtualMachineConfigOption vmConfigOption = _context.getService().queryConfigOption(vmEnvironmentBrowser, null, null);
List<GuestOsDescriptor> guestDescriptors = vmConfigOption.getGuestOSDescriptor();
for (GuestOsDescriptor descriptor : guestDescriptors) {
if (guestId != null && guestId.equalsIgnoreCase(descriptor.getId())) {
guestOsDescriptor = descriptor;
break;
}
}
return guestOsDescriptor;
}
public void plugDevice(VirtualDevice device) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to add devices");
}
}
public void tearDownDevice(VirtualDevice device) throws Exception {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
vmConfigSpec.getDeviceChange().add(deviceConfigSpec);
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to detach devices");
}
}
public void tearDownDevices(Class<?>[] deviceClasses) throws Exception {
VirtualDevice[] devices = getMatchedDevices(deviceClasses);
if (devices.length > 0) {
VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec();
VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[devices.length];
for (int i = 0; i < devices.length; i++) {
deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec();
deviceConfigSpecArray[i].setDevice(devices[i]);
deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
vmConfigSpec.getDeviceChange().add(deviceConfigSpecArray[i]);
}
if (!configureVm(vmConfigSpec)) {
throw new Exception("Failed to detach devices");
}
}
}
public void copyAllVmDiskFiles(DatastoreMO destDsMo, String destDsDir, boolean followDiskChain) throws Exception {
VirtualDevice[] disks = getAllDiskDevice();
DatacenterMO dcMo = getOwnerDatacenter().first();
if (disks != null) {
for (VirtualDevice disk : disks) {
List<Pair<String, ManagedObjectReference>> vmdkFiles = getDiskDatastorePathChain((VirtualDisk)disk, followDiskChain);
for (Pair<String, ManagedObjectReference> fileItem : vmdkFiles) {
DatastoreMO srcDsMo = new DatastoreMO(_context, fileItem.second());
DatastoreFile srcFile = new DatastoreFile(fileItem.first());
DatastoreFile destFile = new DatastoreFile(destDsMo.getName(), destDsDir, srcFile.getFileName());
Pair<VmdkFileDescriptor, byte[]> vmdkDescriptor = null;
vmdkDescriptor = getVmdkFileInfo(fileItem.first());
s_logger.info("Copy VM disk file " + srcFile.getPath() + " to " + destFile.getPath());
srcDsMo.copyDatastoreFile(fileItem.first(), dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
if (vmdkDescriptor != null) {
String vmdkBaseFileName = vmdkDescriptor.first().getBaseFileName();
String baseFilePath = srcFile.getCompanionPath(vmdkBaseFileName);
destFile = new DatastoreFile(destDsMo.getName(), destDsDir, vmdkBaseFileName);
s_logger.info("Copy VM disk file " + baseFilePath + " to " + destFile.getPath());
srcDsMo.copyDatastoreFile(baseFilePath, dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
}
}
}
}
}
public List<String> getVmdkFileBaseNames() throws Exception {
List<String> vmdkFileBaseNames = new ArrayList<String>();
VirtualDevice[] devices = getAllDiskDevice();
for(VirtualDevice device : devices) {
if(device instanceof VirtualDisk) {
vmdkFileBaseNames.add(getVmdkFileBaseName((VirtualDisk)device));
}
}
return vmdkFileBaseNames;
}
public String getVmdkFileBaseName(VirtualDisk disk) throws Exception {
String vmdkFileBaseName = null;
VirtualDeviceBackingInfo backingInfo = disk.getBacking();
if(backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
vmdkFileBaseName = dsBackingFile.getFileBaseName();
}
return vmdkFileBaseName;
}
// this method relies on un-offical VMware API
@Deprecated
public void moveAllVmDiskFiles(DatastoreMO destDsMo, String destDsDir, boolean followDiskChain) throws Exception {
VirtualDevice[] disks = getAllDiskDevice();
DatacenterMO dcMo = getOwnerDatacenter().first();
if (disks != null) {
for (VirtualDevice disk : disks) {
List<Pair<String, ManagedObjectReference>> vmdkFiles = getDiskDatastorePathChain((VirtualDisk)disk, followDiskChain);
for (Pair<String, ManagedObjectReference> fileItem : vmdkFiles) {
DatastoreMO srcDsMo = new DatastoreMO(_context, fileItem.second());
DatastoreFile srcFile = new DatastoreFile(fileItem.first());
DatastoreFile destFile = new DatastoreFile(destDsMo.getName(), destDsDir, srcFile.getFileName());
Pair<VmdkFileDescriptor, byte[]> vmdkDescriptor = null;
vmdkDescriptor = getVmdkFileInfo(fileItem.first());
s_logger.info("Move VM disk file " + srcFile.getPath() + " to " + destFile.getPath());
srcDsMo.moveDatastoreFile(fileItem.first(), dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
if (vmdkDescriptor != null) {
String vmdkBaseFileName = vmdkDescriptor.first().getBaseFileName();
String baseFilePath = srcFile.getCompanionPath(vmdkBaseFileName);
destFile = new DatastoreFile(destDsMo.getName(), destDsDir, vmdkBaseFileName);
s_logger.info("Move VM disk file " + baseFilePath + " to " + destFile.getPath());
srcDsMo.moveDatastoreFile(baseFilePath, dcMo.getMor(), destDsMo.getMor(), destFile.getPath(), dcMo.getMor(), true);
}
}
}
}
}
public int getPvScsiDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof ParaVirtualSCSIController) {
return device.getKey();
}
}
}
return -1;
}
public int getPvScsiDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof ParaVirtualSCSIController) {
return device.getKey();
}
}
}
assert (false);
throw new Exception("VMware Paravirtual SCSI Controller Not Found");
}
public void ensurePvScsiDeviceController(int requiredNumScsiControllers, int availableBusNum) throws Exception {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < requiredNumScsiControllers) {
ParaVirtualSCSIController scsiController = new ParaVirtualSCSIController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + requiredNumScsiControllers + " SCSI controllers.");
}
}
public String getRecommendedDiskController(String guestOsId) throws Exception {
String recommendedController;
GuestOsDescriptor guestOsDescriptor = getGuestOsDescriptor(guestOsId);
recommendedController = VmwareHelper.getRecommendedDiskControllerFromDescriptor(guestOsDescriptor);
return recommendedController;
}
public boolean isPvScsiSupported() throws Exception {
int virtualHardwareVersion;
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if virtual machine is using hardware version 7 or later.
if (virtualHardwareVersion < 7) {
s_logger.error("The virtual hardware version of the VM is " + virtualHardwareVersion
+ ", which doesn't support PV SCSI controller type for virtual harddisks. Please upgrade this VM's virtual hardware version to 7 or later.");
return false;
}
return true;
}
// Would be useful if there exists multiple sub types of SCSI controllers per VM are supported in CloudStack f
public int getScsiDiskControllerKey(String diskController) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (CollectionUtils.isNotEmpty(devices)) {
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
for (VirtualDevice device : devices) {
if ((diskControllerType == DiskControllerType.lsilogic || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualLsiLogicController && isValidScsiDiskController((VirtualLsiLogicController)device)) {
return ((VirtualLsiLogicController)device).getKey();
} else if ((diskControllerType == DiskControllerType.lsisas1068 || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualLsiLogicSASController && isValidScsiDiskController((VirtualLsiLogicSASController)device)) {
return ((VirtualLsiLogicSASController)device).getKey();
} else if ((diskControllerType == DiskControllerType.pvscsi || diskControllerType == DiskControllerType.scsi)
&& device instanceof ParaVirtualSCSIController && isValidScsiDiskController((ParaVirtualSCSIController)device)) {
return ((ParaVirtualSCSIController)device).getKey();
} else if ((diskControllerType == DiskControllerType.buslogic || diskControllerType == DiskControllerType.scsi)
&& device instanceof VirtualBusLogicController && isValidScsiDiskController((VirtualBusLogicController)device)) {
return ((VirtualBusLogicController)device).getKey();
}
}
}
assert (false);
throw new IllegalStateException("Scsi disk controller of type " + diskController + " not found among configured devices.");
}
public int getScsiDiskControllerKeyNoException(String diskController, int scsiUnitNumber) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (CollectionUtils.isNotEmpty(devices) && scsiUnitNumber >= 0) {
int requiredScsiController = scsiUnitNumber / VmwareHelper.MAX_ALLOWED_DEVICES_SCSI_CONTROLLER;
int scsiControllerDeviceCount = 0;
DiskControllerType diskControllerType = DiskControllerType.getType(diskController);
for (VirtualDevice device : devices) {
if ((diskControllerType == DiskControllerType.lsilogic || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualLsiLogicController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualLsiLogicController)device)) {
return ((VirtualLsiLogicController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.lsisas1068 || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualLsiLogicSASController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualLsiLogicSASController)device)) {
return ((VirtualLsiLogicSASController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.pvscsi || diskControllerType == DiskControllerType.scsi) && device instanceof ParaVirtualSCSIController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((ParaVirtualSCSIController)device)) {
return ((ParaVirtualSCSIController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
} else if ((diskControllerType == DiskControllerType.buslogic || diskControllerType == DiskControllerType.scsi) && device instanceof VirtualBusLogicController) {
if (scsiControllerDeviceCount == requiredScsiController) {
if (isValidScsiDiskController((VirtualBusLogicController)device)) {
return ((VirtualBusLogicController)device).getKey();
}
break;
}
scsiControllerDeviceCount++;
}
}
}
return -1;
}
public int getNextScsiDiskDeviceNumber() throws Exception {
int scsiControllerKey = getScsiDeviceControllerKey();
int deviceNumber = getNextDeviceNumber(scsiControllerKey);
return deviceNumber;
}
public int getScsiDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController && isValidScsiDiskController((VirtualSCSIController)device)) {
return device.getKey();
}
}
}
assert (false);
throw new Exception("SCSI Controller Not Found");
}
public int getScsiDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController && isValidScsiDiskController((VirtualSCSIController)device)) {
return device.getKey();
}
}
}
return -1;
}
public void ensureLsiLogicDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getLsiLogicDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Lsi Logic controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " LsiLogic Parallel SCSI controllers.");
}
}
}
private int getLsiLogicDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualLsiLogicController) {
return device.getKey();
}
}
}
return -1;
}
public void ensureScsiDeviceController() throws Exception {
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
// Scsi controller
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(0);
scsiController.setKey(1);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controller");
}
}
}
public void ensureScsiDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicController scsiController = new VirtualLsiLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " SCSI controllers.");
}
}
}
private boolean isValidScsiDiskController(VirtualSCSIController scsiDiskController) {
if (scsiDiskController == null) {
return false;
}
List<Integer> scsiDiskDevicesOnController = scsiDiskController.getDevice();
if (scsiDiskDevicesOnController == null || scsiDiskDevicesOnController.size() >= (VmwareHelper.MAX_SUPPORTED_DEVICES_SCSI_CONTROLLER)) {
return false;
}
if (scsiDiskController.getBusNumber() >= VmwareHelper.MAX_SCSI_CONTROLLER_COUNT) {
return false;
}
return true;
}
// return pair of VirtualDisk and disk device bus name(ide0:0, etc)
public Pair<VirtualDisk, String> getDiskDevice(String vmdkDatastorePath) throws Exception {
final String zeroLengthString = "";
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
ArrayList<Pair<VirtualDisk, String>> partialMatchingDiskDevices = new ArrayList<>();
DatastoreFile dsSrcFile = new DatastoreFile(vmdkDatastorePath);
String srcBaseName = dsSrcFile.getFileBaseName();
String trimmedSrcBaseName = VmwareHelper.trimSnapshotDeltaPostfix(srcBaseName);
String srcDatastoreName = dsSrcFile.getDatastoreName() != null ? dsSrcFile.getDatastoreName() : zeroLengthString;
s_logger.info("Look for disk device info for volume : " + vmdkDatastorePath + " with base name: " + srcBaseName);
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = device.getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
do {
s_logger.info("Test against disk backing : " + diskBackingInfo.getFileName());
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingDatastoreName = dsBackingFile.getDatastoreName() != null ? dsBackingFile.getDatastoreName() : zeroLengthString;
if (srcDatastoreName.equals(zeroLengthString)) {
backingDatastoreName = zeroLengthString;
}
if (srcDatastoreName.equalsIgnoreCase(backingDatastoreName)) {
String backingBaseName = dsBackingFile.getFileBaseName();
if (backingBaseName.equalsIgnoreCase(srcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<>((VirtualDisk)device, deviceNumbering);
}
if (backingBaseName.contains(trimmedSrcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
partialMatchingDiskDevices.add(new Pair<>((VirtualDisk)device, deviceNumbering));
}
}
diskBackingInfo = diskBackingInfo.getParent();
} while (diskBackingInfo != null);
}
}
}
}
// No disk device was found with an exact match for the volume path, hence look for disk device that matches the trimmed name.
s_logger.info("No disk device with an exact match found for volume : " + vmdkDatastorePath + ". Look for disk device info against trimmed base name: " + srcBaseName);
if (partialMatchingDiskDevices != null) {
if (partialMatchingDiskDevices.size() == 1) {
VirtualDiskFlatVer2BackingInfo matchingDiskBackingInfo = (VirtualDiskFlatVer2BackingInfo)partialMatchingDiskDevices.get(0).first().getBacking();
s_logger.info("Disk backing : " + matchingDiskBackingInfo.getFileName() + " matches ==> " + partialMatchingDiskDevices.get(0).second());
return partialMatchingDiskDevices.get(0);
} else if (partialMatchingDiskDevices.size() > 1) {
s_logger.warn("Disk device info lookup for volume: " + vmdkDatastorePath + " failed as multiple disk devices were found to match" +
" volume's trimmed base name: " + trimmedSrcBaseName);
return null;
}
}
s_logger.warn("Disk device info lookup for volume: " + vmdkDatastorePath + " failed as no matching disk device found");
return null;
}
// return pair of VirtualDisk and disk device bus name(ide0:0, etc)
public Pair<VirtualDisk, String> getDiskDevice(String vmdkDatastorePath, boolean matchExactly) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
DatastoreFile dsSrcFile = new DatastoreFile(vmdkDatastorePath);
String srcBaseName = dsSrcFile.getFileBaseName();
String trimmedSrcBaseName = VmwareHelper.trimSnapshotDeltaPostfix(srcBaseName);
if (matchExactly) {
s_logger.info("Look for disk device info from volume : " + vmdkDatastorePath + " with base name: " + srcBaseName);
} else {
s_logger.info("Look for disk device info from volume : " + vmdkDatastorePath + " with trimmed base name: " + trimmedSrcBaseName);
}
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
do {
s_logger.info("Test against disk backing : " + diskBackingInfo.getFileName());
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingBaseName = dsBackingFile.getFileBaseName();
if (matchExactly) {
if (backingBaseName.equalsIgnoreCase(srcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<VirtualDisk, String>((VirtualDisk)device, deviceNumbering);
}
} else {
if (backingBaseName.contains(trimmedSrcBaseName)) {
String deviceNumbering = getDeviceBusName(devices, device);
s_logger.info("Disk backing : " + diskBackingInfo.getFileName() + " matches ==> " + deviceNumbering);
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
return new Pair<VirtualDisk, String>((VirtualDisk)device, deviceNumbering);
}
}
diskBackingInfo = diskBackingInfo.getParent();
} while (diskBackingInfo != null);
}
}
}
}
return null;
}
public void registerVirtualDisk(VirtualDisk device, DatastoreFile dsBackingFile) {
if (((VirtualDisk) device).getVDiskId() == null) {
try {
s_logger.debug("vDiskid does not exist for volume " + dsBackingFile.getFileName() + " registering the disk now");
VirtualStorageObjectManagerMO vStorageObjectManagerMO = new VirtualStorageObjectManagerMO(getOwnerDatacenter().first().getContext());
VStorageObject vStorageObject = vStorageObjectManagerMO.registerVirtualDisk(dsBackingFile, null, getOwnerDatacenter().first().getName());
VStorageObjectConfigInfo diskConfigInfo = vStorageObject.getConfig();
((VirtualDisk) device).setVDiskId(diskConfigInfo.getId());
} catch (Exception e) {
s_logger.warn("Exception while trying to register a disk as first class disk to get the unique identifier, main operation still continues: " + e.getMessage());
}
}
}
public String getDiskCurrentTopBackingFileInChain(String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
s_logger.info("Test against disk device, controller key: " + device.getControllerKey() + ", unit number: " + device.getUnitNumber());
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
String deviceNumbering = getDeviceBusName(devices, device);
if (deviceNumbering.equals(deviceBusName))
return diskBackingInfo.getFileName();
}
}
}
}
return null;
}
public VirtualDisk getDiskDeviceByDeviceBusName(String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
String deviceNumbering = getDeviceBusName(devices, device);
if (deviceNumbering.equals(deviceBusName))
return (VirtualDisk)device;
}
}
}
return null;
}
public VirtualMachineDiskInfoBuilder getDiskInfoBuilder() throws Exception {
VirtualMachineDiskInfoBuilder builder = new VirtualMachineDiskInfoBuilder();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
while (diskBackingInfo != null) {
String deviceBusName = getDeviceBusName(devices, device);
builder.addDisk(deviceBusName, diskBackingInfo.getFileName());
diskBackingInfo = diskBackingInfo.getParent();
}
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
}
}
}
}
return builder;
}
public List<Pair<Integer, ManagedObjectReference>> getAllDiskDatastores() throws Exception {
List<Pair<Integer, ManagedObjectReference>> disks = new ArrayList<Pair<Integer, ManagedObjectReference>>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = ((VirtualDisk)device).getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
disks.add(new Pair<Integer, ManagedObjectReference>(new Integer(device.getKey()), diskBackingInfo.getDatastore()));
}
}
}
}
return disks;
}
@Deprecated
public List<Pair<String, ManagedObjectReference>> getDiskDatastorePathChain(VirtualDisk disk, boolean followChain) throws Exception {
VirtualDeviceBackingInfo backingInfo = disk.getBacking();
if (!(backingInfo instanceof VirtualDiskFlatVer2BackingInfo)) {
throw new Exception("Unsupported VirtualDeviceBackingInfo");
}
List<Pair<String, ManagedObjectReference>> pathList = new ArrayList<Pair<String, ManagedObjectReference>>();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
if (!followChain) {
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
return pathList;
}
Pair<DatacenterMO, String> dcPair = getOwnerDatacenter();
VirtualMachineFileInfo vmFilesInfo = getFileInfo();
DatastoreFile snapshotDirFile = new DatastoreFile(vmFilesInfo.getSnapshotDirectory());
DatastoreFile vmxDirFile = new DatastoreFile(vmFilesInfo.getVmPathName());
do {
if (diskBackingInfo.getParent() != null) {
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
diskBackingInfo = diskBackingInfo.getParent();
} else {
// try getting parent info from VMDK file itself
byte[] content = null;
try {
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), diskBackingInfo.getFileName());
content = getContext().getResourceContent(url);
if (content == null || content.length == 0) {
break;
}
pathList.add(new Pair<String, ManagedObjectReference>(diskBackingInfo.getFileName(), diskBackingInfo.getDatastore()));
} catch (Exception e) {
// if snapshot directory has been changed to place other than default. VMware has a bug
// that its corresponding disk backing info is not updated correctly. therefore, we will try search
// in snapshot directory one more time
DatastoreFile currentFile = new DatastoreFile(diskBackingInfo.getFileName());
String vmdkFullDsPath = snapshotDirFile.getCompanionPath(currentFile.getFileName());
String url = getContext().composeDatastoreBrowseUrl(dcPair.second(), vmdkFullDsPath);
content = getContext().getResourceContent(url);
if (content == null || content.length == 0) {
break;
}
pathList.add(new Pair<String, ManagedObjectReference>(vmdkFullDsPath, diskBackingInfo.getDatastore()));
}
VmdkFileDescriptor descriptor = new VmdkFileDescriptor();
descriptor.parse(content);
if (descriptor.getParentFileName() != null && !descriptor.getParentFileName().isEmpty()) {
// create a fake one
VirtualDiskFlatVer2BackingInfo parentDiskBackingInfo = new VirtualDiskFlatVer2BackingInfo();
parentDiskBackingInfo.setDatastore(diskBackingInfo.getDatastore());
String parentFileName = descriptor.getParentFileName();
if (parentFileName.startsWith("/")) {
int fileNameStartPos = parentFileName.lastIndexOf("/");
parentFileName = parentFileName.substring(fileNameStartPos + 1);
parentDiskBackingInfo.setFileName(vmxDirFile.getCompanionPath(parentFileName));
} else {
parentDiskBackingInfo.setFileName(snapshotDirFile.getCompanionPath(parentFileName));
}
diskBackingInfo = parentDiskBackingInfo;
} else {
break;
}
}
} while (diskBackingInfo != null);
return pathList;
}
public String getDeviceBusName(List<VirtualDevice> allDevices, VirtualDevice theDevice) throws Exception {
for (VirtualDevice device : allDevices) {
if (device.getKey() == theDevice.getControllerKey().intValue()) {
if (device instanceof VirtualIDEController) {
return String.format("ide%d:%d", ((VirtualIDEController)device).getBusNumber(), theDevice.getUnitNumber());
} else if (device instanceof VirtualSCSIController) {
return String.format("scsi%d:%d", ((VirtualSCSIController)device).getBusNumber(), theDevice.getUnitNumber());
} else {
throw new Exception("Device controller is not supported yet");
}
}
}
throw new Exception("Unable to find device controller");
}
public List<VirtualDisk> getVirtualDisks() throws Exception {
List<VirtualDisk> virtualDisks = new ArrayList<VirtualDisk>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceBackingInfo backingInfo = device.getBacking();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
virtualDisks.add((VirtualDisk)device);
}
}
return virtualDisks;
}
public List<String> detachAllDisksExcept(String vmdkBaseName, String deviceBusName) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
VirtualMachineConfigSpec reConfigSpec = new VirtualMachineConfigSpec();
List<String> detachedDiskFiles = new ArrayList<String>();
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
VirtualDeviceConfigSpec deviceConfigSpec = new VirtualDeviceConfigSpec();
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo)device.getBacking();
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
String backingBaseName = dsBackingFile.getFileBaseName();
String deviceNumbering = getDeviceBusName(devices, device);
if (backingBaseName.equalsIgnoreCase(vmdkBaseName) || (deviceBusName != null && deviceBusName.equals(deviceNumbering))) {
continue;
} else {
s_logger.info("Detach " + diskBackingInfo.getFileName() + " from " + getName());
detachedDiskFiles.add(diskBackingInfo.getFileName());
deviceConfigSpec.setDevice(device);
deviceConfigSpec.setOperation(VirtualDeviceConfigSpecOperation.REMOVE);
reConfigSpec.getDeviceChange().add(deviceConfigSpec);
}
registerVirtualDisk((VirtualDisk) device, dsBackingFile);
}
}
if (detachedDiskFiles.size() > 0) {
ManagedObjectReference morTask = _context.getService().reconfigVMTask(_mor, reConfigSpec);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
} else {
s_logger.warn("Unable to reconfigure the VM to detach disks");
throw new Exception("Unable to reconfigure the VM to detach disks");
}
}
return detachedDiskFiles;
}
public List<VirtualDevice> getAllDeviceList() throws Exception {
return _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
}
public VirtualDisk[] getAllDiskDevice() throws Exception {
List<VirtualDisk> deviceList = new ArrayList<VirtualDisk>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
if (((VirtualDisk) device).getVDiskId() == null) {
try {
// Register as first class disk
VirtualDeviceBackingInfo backingInfo = device.getBacking();
if (backingInfo instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo diskBackingInfo = (VirtualDiskFlatVer2BackingInfo) backingInfo;
DatastoreFile dsBackingFile = new DatastoreFile(diskBackingInfo.getFileName());
s_logger.debug("vDiskid does not exist for volume " + diskBackingInfo.getFileName() + " registering the disk now");
VirtualStorageObjectManagerMO vStorageObjectManagerMO = new VirtualStorageObjectManagerMO(getOwnerDatacenter().first().getContext());
VStorageObject vStorageObject = vStorageObjectManagerMO.registerVirtualDisk(dsBackingFile, null, getOwnerDatacenter().first().getName());
VStorageObjectConfigInfo diskConfigInfo = vStorageObject.getConfig();
((VirtualDisk) device).setVDiskId(diskConfigInfo.getId());
}
} catch (Exception e) {
s_logger.warn("Exception while trying to register a disk as first class disk to get the unique identifier, main operation still continues: " + e.getMessage());
}
}
deviceList.add((VirtualDisk)device);
}
}
}
return deviceList.toArray(new VirtualDisk[0]);
}
public VirtualDisk getDiskDeviceByBusName(List<VirtualDevice> allDevices, String busName) throws Exception {
for (VirtualDevice device : allDevices) {
if (device instanceof VirtualDisk) {
VirtualDisk disk = (VirtualDisk)device;
String diskBusName = getDeviceBusName(allDevices, disk);
if (busName.equalsIgnoreCase(diskBusName))
return disk;
}
}
return null;
}
public VirtualDisk[] getAllIndependentDiskDevice() throws Exception {
List<VirtualDisk> independentDisks = new ArrayList<VirtualDisk>();
VirtualDisk[] allDisks = getAllDiskDevice();
if (allDisks.length > 0) {
for (VirtualDisk disk : allDisks) {
String diskMode = "";
if (disk.getBacking() instanceof VirtualDiskFlatVer1BackingInfo) {
diskMode = ((VirtualDiskFlatVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskFlatVer2BackingInfo) {
diskMode = ((VirtualDiskFlatVer2BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskRawDiskMappingVer1BackingInfo) {
diskMode = ((VirtualDiskRawDiskMappingVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskSparseVer1BackingInfo) {
diskMode = ((VirtualDiskSparseVer1BackingInfo)disk.getBacking()).getDiskMode();
} else if (disk.getBacking() instanceof VirtualDiskSparseVer2BackingInfo) {
diskMode = ((VirtualDiskSparseVer2BackingInfo)disk.getBacking()).getDiskMode();
}
if (diskMode.indexOf("independent") != -1) {
independentDisks.add(disk);
}
}
}
return independentDisks.toArray(new VirtualDisk[0]);
}
public int tryGetIDEDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
return ((VirtualIDEController)device).getKey();
}
}
}
return -1;
}
public int getIDEDeviceControllerKey() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
return ((VirtualIDEController)device).getKey();
}
}
}
assert (false);
throw new Exception("IDE Controller Not Found");
}
public int getIDEControllerKey(int ideUnitNumber) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int requiredIdeController = ideUnitNumber / VmwareHelper.MAX_IDE_CONTROLLER_COUNT;
int ideControllerCount = 0;
if(devices != null && devices.size() > 0) {
for(VirtualDevice device : devices) {
if(device instanceof VirtualIDEController) {
if (ideControllerCount == requiredIdeController) {
return ((VirtualIDEController)device).getKey();
}
ideControllerCount++;
}
}
}
assert(false);
throw new Exception("IDE Controller Not Found");
}
public int getNumberOfIDEDevices() throws Exception {
int ideDeviceCount = 0;
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualIDEController) {
ideDeviceCount += ((VirtualIDEController)device).getDevice().size();
}
}
}
return ideDeviceCount;
}
public int getFreeUnitNumberOnIDEController(int controllerKey) throws Exception {
int freeUnitNumber = 0;
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int deviceCount = 0;
int ideDeviceUnitNumber = -1;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk && (controllerKey == device.getControllerKey())) {
deviceCount++;
ideDeviceUnitNumber = device.getUnitNumber();
}
}
}
if (deviceCount == 1) {
if (ideDeviceUnitNumber == 0) {
freeUnitNumber = 1;
} // else freeUnitNumber is already initialized to 0
} else if (deviceCount == 2) {
throw new Exception("IDE controller with key [" + controllerKey + "] already has 2 device attached. Cannot attach more than the limit of 2.");
}
return freeUnitNumber;
}
public int getNextIDEDeviceNumber() throws Exception {
int controllerKey = getIDEDeviceControllerKey();
return getNextDeviceNumber(controllerKey);
}
public VirtualDevice getIsoDevice() throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualCdrom) {
return device;
}
}
}
return null;
}
public VirtualDevice getIsoDevice(int key) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualCdrom && device.getKey() == key) {
return device;
}
}
}
return null;
}
public VirtualDevice getIsoDevice(String filename) throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if(devices != null && devices.size() > 0) {
long isoDevices = devices.stream()
.filter(x -> x instanceof VirtualCdrom && x.getBacking() instanceof VirtualCdromIsoBackingInfo)
.count();
for(VirtualDevice device : devices) {
if(device instanceof VirtualCdrom && device.getBacking() instanceof VirtualCdromIsoBackingInfo) {
if (((VirtualCdromIsoBackingInfo)device.getBacking()).getFileName().equals(filename)) {
return device;
} else if (isoDevices == 1L){
s_logger.warn(String.format("VM ISO filename %s differs from the expected filename %s",
((VirtualCdromIsoBackingInfo)device.getBacking()).getFileName(), filename));
return device;
}
}
}
}
return null;
}
public int getNextDeviceNumber(int controllerKey) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
List<Integer> existingUnitNumbers = new ArrayList<Integer>();
int deviceNumber = 0;
int scsiControllerKey = getScsiDeviceControllerKeyNoException();
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device.getControllerKey() != null && device.getControllerKey().intValue() == controllerKey) {
existingUnitNumbers.add(device.getUnitNumber());
}
}
}
while (true) {
// Next device number should be the lowest device number on the key that is not in use and is not reserved.
if (!existingUnitNumbers.contains(Integer.valueOf(deviceNumber))) {
if (controllerKey != scsiControllerKey || !VmwareHelper.isReservedScsiDeviceNumber(deviceNumber))
break;
}
++deviceNumber;
}
return deviceNumber;
}
private List<VirtualDevice> getNicDevices(boolean sorted) throws Exception {
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
List<VirtualDevice> nics = new ArrayList<VirtualDevice>();
if (devices != null) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualEthernetCard) {
nics.add(device);
}
}
}
if (sorted) {
Collections.sort(nics, new Comparator<VirtualDevice>() {
@Override
public int compare(VirtualDevice arg0, VirtualDevice arg1) {
int unitNumber0 = arg0.getUnitNumber() != null ? arg0.getUnitNumber().intValue() : -1;
int unitNumber1 = arg1.getUnitNumber() != null ? arg1.getUnitNumber().intValue() : -1;
if (unitNumber0 < unitNumber1)
return -1;
else if (unitNumber0 > unitNumber1)
return 1;
return 0;
}
});
}
return nics;
}
public VirtualDevice[] getSortedNicDevices() throws Exception {
return getNicDevices(true).toArray(new VirtualDevice[0]);
}
public VirtualDevice[] getNicDevices() throws Exception {
return getNicDevices(false).toArray(new VirtualDevice[0]);
}
public VirtualDevice getNicDeviceByIndex(int index) throws Exception {
List<VirtualDevice> nics = getNicDevices(true);
try {
return nics.get(index);
} catch (IndexOutOfBoundsException e) {
// Not found
return null;
}
}
public Pair<Integer, VirtualDevice> getNicDeviceIndex(String networkNamePrefix) throws Exception {
List<VirtualDevice> nics = getNicDevices(true);
int index = 0;
String attachedNetworkSummary;
String dvPortGroupName;
for (VirtualDevice nic : nics) {
attachedNetworkSummary = ((VirtualEthernetCard)nic).getDeviceInfo().getSummary();
if (attachedNetworkSummary.startsWith(networkNamePrefix)) {
return new Pair<Integer, VirtualDevice>(new Integer(index), nic);
} else if (attachedNetworkSummary.endsWith("DistributedVirtualPortBackingInfo.summary") || attachedNetworkSummary.startsWith("DVSwitch")) {
dvPortGroupName = getDvPortGroupName((VirtualEthernetCard)nic);
if (dvPortGroupName != null && dvPortGroupName.startsWith(networkNamePrefix)) {
s_logger.debug("Found a dvPortGroup already associated with public NIC.");
return new Pair<Integer, VirtualDevice>(new Integer(index), nic);
}
}
index++;
}
return new Pair<Integer, VirtualDevice>(new Integer(-1), null);
}
public String getDvPortGroupName(VirtualEthernetCard nic) throws Exception {
VirtualEthernetCardDistributedVirtualPortBackingInfo dvpBackingInfo = (VirtualEthernetCardDistributedVirtualPortBackingInfo)nic.getBacking();
DistributedVirtualSwitchPortConnection dvsPort = dvpBackingInfo.getPort();
String dvPortGroupKey = dvsPort.getPortgroupKey();
ManagedObjectReference dvPortGroupMor = new ManagedObjectReference();
dvPortGroupMor.setValue(dvPortGroupKey);
dvPortGroupMor.setType("DistributedVirtualPortgroup");
return (String)_context.getVimClient().getDynamicProperty(dvPortGroupMor, "name");
}
public VirtualDevice[] getMatchedDevices(Class<?>[] deviceClasses) throws Exception {
assert (deviceClasses != null);
List<VirtualDevice> returnList = new ArrayList<VirtualDevice>();
List<VirtualDevice> devices = _context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
if (devices != null) {
for (VirtualDevice device : devices) {
for (Class<?> clz : deviceClasses) {
if (clz.isInstance(device)) {
returnList.add(device);
break;
}
}
}
}
return returnList.toArray(new VirtualDevice[0]);
}
public void mountToolsInstaller() throws Exception {
_context.getService().mountToolsInstaller(_mor);
}
public boolean unmountToolsInstaller() throws Exception {
// Monitor VM questions
final Boolean[] flags = {false};
final VirtualMachineMO vmMo = this;
final boolean[] encounterQuestion = new boolean[1];
encounterQuestion[0] = false;
Future<?> future = MonitorServiceExecutor.submit(new Runnable() {
@Override
public void run() {
s_logger.info("VM Question monitor started...");
while (!flags[0]) {
try {
VirtualMachineRuntimeInfo runtimeInfo = vmMo.getRuntimeInfo();
VirtualMachineQuestionInfo question = runtimeInfo.getQuestion();
if (question != null) {
encounterQuestion[0] = true;
if (s_logger.isTraceEnabled()) {
s_logger.trace("Question id: " + question.getId());
s_logger.trace("Question text: " + question.getText());
}
if (question.getMessage() != null) {
for (VirtualMachineMessage msg : question.getMessage()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("msg id: " + msg.getId());
s_logger.trace("msg text: " + msg.getText());
}
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msg.getId())) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + msg.getId() +
", for safe operation we will automatically decline it");
vmMo.answerVM(question.getId(), ANSWER_NO);
break;
}
}
} else if (question.getText() != null) {
String text = question.getText();
String msgId;
String msgText;
if (s_logger.isDebugEnabled()) {
s_logger.debug("question text : " + text);
}
String[] tokens = text.split(":");
msgId = tokens[0];
msgText = tokens[1];
if ("msg.cdromdisconnect.locked".equalsIgnoreCase(msgId)) {
s_logger.info("Found that VM has a pending question that we need to answer programmatically, question id: " + question.getId() +
". Message id : " + msgId + ". Message text : " + msgText + ", for safe operation we will automatically decline it.");
vmMo.answerVM(question.getId(), ANSWER_NO);
}
}
ChoiceOption choice = question.getChoice();
if (choice != null) {
for (ElementDescription info : choice.getChoiceInfo()) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("Choice option key: " + info.getKey());
s_logger.trace("Choice option label: " + info.getLabel());
}
}
}
}
} catch (Throwable e) {
s_logger.error("Unexpected exception: ", e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
s_logger.debug("[ignored] interupted while handling vm question about umount tools install.");
}
}
s_logger.info("VM Question monitor stopped");
}
});
try {
_context.getService().unmountToolsInstaller(_mor);
} finally {
flags[0] = true;
future.cancel(true);
}
if (encounterQuestion[0]) {
s_logger.warn("cdrom is locked by VM. Failed to detach the ISO.");
return false;
} else {
s_logger.info("Successfully unmounted tools installer from VM.");
return true;
}
}
public void redoRegistration(ManagedObjectReference morHost) throws Exception {
String vmName = getVmName();
VirtualMachineFileInfo vmFileInfo = getFileInfo();
boolean isTemplate = isTemplate();
HostMO hostMo;
if (morHost != null)
hostMo = new HostMO(getContext(), morHost);
else
hostMo = getRunningHost();
ManagedObjectReference morFolder = getParentMor();
ManagedObjectReference morPool = hostMo.getHyperHostOwnerResourcePool();
_context.getService().unregisterVM(_mor);
ManagedObjectReference morTask = _context.getService().registerVMTask(morFolder, vmFileInfo.getVmPathName(), vmName, false, morPool, hostMo.getMor());
boolean result = _context.getVimClient().waitForTask(morTask);
if (!result) {
throw new Exception("Unable to register template due to " + TaskMO.getTaskFailureInfo(_context, morTask));
} else {
_context.waitForTaskProgressDone(morTask);
if (isTemplate) {
VirtualMachineMO vmNewRegistration = hostMo.findVmOnHyperHost(vmName);
assert (vmNewRegistration != null);
vmNewRegistration.markAsTemplate();
}
}
}
public long getHotAddMemoryIncrementSizeInMb() throws Exception {
return (Long)_context.getVimClient().getDynamicProperty(_mor, "config.hotPlugMemoryIncrementSize");
}
public long getHotAddMemoryLimitInMb() throws Exception {
return (Long)_context.getVimClient().getDynamicProperty(_mor, "config.hotPlugMemoryLimit");
}
public String getGuestId() throws Exception {
return (String)_context.getVimClient().getDynamicProperty(_mor, "config.guestId");
}
public int getCoresPerSocket() throws Exception {
// number of cores per socket is 1 in case of ESXi. It's not defined explicitly and the property is support since vSphere API 5.0.
String apiVersion = HypervisorHostHelper.getVcenterApiVersion(_context);
if (apiVersion.compareTo("5.0") < 0) {
return 1;
}
Integer coresPerSocket = (Integer)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.numCoresPerSocket");
return coresPerSocket != null ? coresPerSocket : 1;
}
public int getVirtualHardwareVersion() throws Exception {
VirtualHardwareOption vhOption = getVirtualHardwareOption();
return vhOption.getHwVersion();
}
public VirtualHardwareOption getVirtualHardwareOption() throws Exception {
VirtualMachineConfigOption vmConfigOption = _context.getService().queryConfigOption(getEnvironmentBrowser(), null, null);
return vmConfigOption.getHardwareOptions();
}
private ManagedObjectReference getEnvironmentBrowser() throws Exception {
if (_vmEnvironmentBrowser == null) {
_vmEnvironmentBrowser = _context.getVimClient().getMoRefProp(_mor, "environmentBrowser");
}
return _vmEnvironmentBrowser;
}
public boolean isCpuHotAddSupported(String guestOsId) throws Exception {
boolean guestOsSupportsCpuHotAdd = false;
boolean virtualHardwareSupportsCpuHotAdd = false;
GuestOsDescriptor guestOsDescriptor;
int virtualHardwareVersion;
int numCoresPerSocket;
guestOsDescriptor = getGuestOsDescriptor(guestOsId);
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if guest operating system supports cpu hotadd
if (guestOsDescriptor.isSupportsCpuHotAdd()) {
guestOsSupportsCpuHotAdd = true;
}
// Check if virtual machine is using hardware version 8 or later.
// If hardware version is 7, then only 1 core per socket is supported. Hot adding multi-core vcpus is not allowed if hardware version is 7.
if (virtualHardwareVersion >= 8) {
virtualHardwareSupportsCpuHotAdd = true;
} else if (virtualHardwareVersion == 7) {
// Check if virtual machine has only 1 core per socket.
numCoresPerSocket = getCoresPerSocket();
if (numCoresPerSocket == 1) {
virtualHardwareSupportsCpuHotAdd = true;
}
}
return guestOsSupportsCpuHotAdd && virtualHardwareSupportsCpuHotAdd;
}
public boolean isMemoryHotAddSupported(String guestOsId) throws Exception {
boolean guestOsSupportsMemoryHotAdd = false;
boolean virtualHardwareSupportsMemoryHotAdd = false;
GuestOsDescriptor guestOsDescriptor;
int virtualHardwareVersion;
guestOsDescriptor = getGuestOsDescriptor(guestOsId);
virtualHardwareVersion = getVirtualHardwareVersion();
// Check if guest operating system supports memory hotadd
if (guestOsDescriptor != null && guestOsDescriptor.isSupportsMemoryHotAdd()) {
guestOsSupportsMemoryHotAdd = true;
}
// Check if virtual machine is using hardware version 7 or later.
if (virtualHardwareVersion >= 7) {
virtualHardwareSupportsMemoryHotAdd = true;
}
return guestOsSupportsMemoryHotAdd && virtualHardwareSupportsMemoryHotAdd;
}
public void ensureLsiLogicSasDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getLsiLogicSasDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualLsiLogicSASController scsiController = new VirtualLsiLogicSASController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi controller of type LsiLogic SAS.");
}
}
}
private int getLsiLogicSasDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualLsiLogicSASController) {
return device.getKey();
}
}
}
return -1;
}
public void ensureBusLogicDeviceControllers(int count, int availableBusNum) throws Exception {
int scsiControllerKey = getBusLogicDeviceControllerKeyNoException();
if (scsiControllerKey < 0) {
VirtualMachineConfigSpec vmConfig = new VirtualMachineConfigSpec();
int busNum = availableBusNum;
while (busNum < count) {
VirtualBusLogicController scsiController = new VirtualBusLogicController();
scsiController.setSharedBus(VirtualSCSISharing.NO_SHARING);
scsiController.setBusNumber(busNum);
scsiController.setKey(busNum - VmwareHelper.MAX_SCSI_CONTROLLER_COUNT);
VirtualDeviceConfigSpec scsiControllerSpec = new VirtualDeviceConfigSpec();
scsiControllerSpec.setDevice(scsiController);
scsiControllerSpec.setOperation(VirtualDeviceConfigSpecOperation.ADD);
vmConfig.getDeviceChange().add(scsiControllerSpec);
busNum++;
}
if (configureVm(vmConfig)) {
throw new Exception("Unable to add Scsi BusLogic controllers to the VM " + getName());
} else {
s_logger.info("Successfully added " + count + " SCSI BusLogic controllers.");
}
}
}
private int getBusLogicDeviceControllerKeyNoException() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualBusLogicController) {
return device.getKey();
}
}
}
return -1;
}
public Ternary<Integer, Integer, DiskControllerType> getScsiControllerInfo() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().
getDynamicProperty(_mor, "config.hardware.device");
int scsiControllerCount = 0;
int busNum = -1;
DiskControllerType controllerType = DiskControllerType.lsilogic;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualSCSIController) {
scsiControllerCount++;
int deviceBus = ((VirtualSCSIController)device).getBusNumber();
if (busNum < deviceBus) {
busNum = deviceBus;
}
if (device instanceof VirtualLsiLogicController) {
controllerType = DiskControllerType.lsilogic;
} else if (device instanceof VirtualLsiLogicSASController) {
controllerType = DiskControllerType.lsisas1068;
} else if (device instanceof VirtualBusLogicController) {
controllerType = DiskControllerType.buslogic;
} else if (device instanceof ParaVirtualSCSIController) {
controllerType = DiskControllerType.pvscsi;
}
}
}
}
return new Ternary<Integer, Integer, DiskControllerType>(scsiControllerCount, busNum, controllerType);
}
public int getNumberOfVirtualDisks() throws Exception {
List<VirtualDevice> devices = (List<VirtualDevice>)_context.getVimClient().getDynamicProperty(_mor, "config.hardware.device");
s_logger.info("Counting disk devices attached to VM " + getVmName());
int count = 0;
if (devices != null && devices.size() > 0) {
for (VirtualDevice device : devices) {
if (device instanceof VirtualDisk) {
count++;
}
}
}
return count;
}
public boolean consolidateVmDisks() throws Exception {
ManagedObjectReference morTask = _context.getService().consolidateVMDisksTask(_mor);
boolean result = _context.getVimClient().waitForTask(morTask);
if (result) {
_context.waitForTaskProgressDone(morTask);
return true;
} else {
s_logger.error("VMware ConsolidateVMDisks_Task failed due to " + TaskMO.getTaskFailureInfo(_context, morTask));
}
return false;
}
}
|
Fix NPE on one of the register disk usecases
|
vmware-base/src/main/java/com/cloud/hypervisor/vmware/mo/VirtualMachineMO.java
|
Fix NPE on one of the register disk usecases
|
|
Java
|
apache-2.0
|
670d197ab5f3640f37ba6607291d0b0b20b44f92
| 0
|
openbaton/plugin-sdk
|
package org.openbaton.plugin;
import com.google.gson.*;
import com.rabbitmq.client.AMQP.BasicProperties;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.QueueingConsumer;
import org.apache.commons.codec.binary.Base64;
import org.openbaton.catalogue.nfvo.PluginAnswer;
import org.openbaton.exceptions.NotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeoutException;
/**
* Created by lto on 25/11/15.
*/
public class PluginListener implements Runnable {
private static final String exchange = "plugin-exchange";
private String pluginId;
private Object pluginInstance;
private Logger log;
private QueueingConsumer consumer;
private Channel channel;
private Gson gson =
new GsonBuilder()
.registerTypeHierarchyAdapter(byte[].class, new ByteArrayToBase64TypeAdapter())
.setPrettyPrinting()
.create();
private boolean exit = false;
private String brokerIp;
private int brokerPort;
private String username;
private String password;
private Connection connection;
public String getPluginId() {
return pluginId;
}
public void setPluginId(String pluginId) {
this.pluginId = pluginId;
}
public Object getPluginInstance() {
return pluginInstance;
}
public void setPluginInstance(Object pluginInstance) {
this.pluginInstance = pluginInstance;
log = LoggerFactory.getLogger(pluginInstance.getClass().getName());
}
public boolean isExit() {
return exit;
}
public void setExit(boolean exit) {
this.exit = exit;
}
private static class ByteArrayToBase64TypeAdapter
implements JsonSerializer<byte[]>, JsonDeserializer<byte[]> {
public byte[] deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
throws JsonParseException {
return Base64.decodeBase64(json.getAsString());
}
public JsonElement serialize(byte[] src, Type typeOfSrc, JsonSerializationContext context) {
return new JsonPrimitive(Base64.encodeBase64String(src));
}
}
@Override
public void run() {
try {
initRabbitMQ();
} catch (IOException | TimeoutException e) {
e.printStackTrace();
setExit(true);
}
try {
while (!exit) {
QueueingConsumer.Delivery delivery;
BasicProperties props;
BasicProperties replyProps;
try {
delivery = consumer.nextDelivery();
props = delivery.getProperties();
replyProps =
new BasicProperties.Builder()
.correlationId(props.getCorrelationId())
// .contentType("plain/text")
.build();
} catch (Exception e) {
e.printStackTrace();
exit = true;
continue;
}
log.info("\nWaiting for RPC requests");
String message = new String(delivery.getBody());
log.debug("Received message");
log.trace("Message content received: " + message);
PluginAnswer answer = new PluginAnswer();
try {
answer.setAnswer(executeMethod(message));
} catch (InvocationTargetException e) {
answer.setException(e.getTargetException());
} catch (Exception e) {
e.printStackTrace();
answer.setException(e);
}
String response;
try {
response = gson.toJson(answer);
log.debug("Answer is: " + response);
log.debug("Reply queue is: " + props.getReplyTo());
channel.basicPublish(exchange, props.getReplyTo(), replyProps, response.getBytes());
channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
} catch (Exception e) {
e.printStackTrace();
answer.setException(e);
log.debug("Answer is: " + answer);
log.debug("Reply queue is: " + props.getReplyTo());
channel.basicPublish(
exchange, props.getReplyTo(), replyProps, gson.toJson(answer).getBytes());
channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
setExit(true);
}
}
} catch (IOException e) {
e.printStackTrace();
}
try {
channel.close();
connection.close();
} catch (IOException | TimeoutException e) {
e.printStackTrace();
}
}
private Serializable executeMethod(String pluginMessageString)
throws InvocationTargetException, IllegalAccessException, NotFoundException {
JsonObject pluginMessageObject = gson.fromJson(pluginMessageString, JsonObject.class);
List<Object> params = new ArrayList<Object>();
for (JsonElement param : pluginMessageObject.get("parameters").getAsJsonArray()) {
Object p = gson.fromJson(param, Object.class);
if (p != null) {
params.add(p);
}
}
Class pluginClass = pluginInstance.getClass();
log.debug("There are " + params.size() + " parameters");
String methodName = pluginMessageObject.get("methodName").getAsString();
log.debug("Looking for method: " + methodName);
for (Method m : pluginClass.getMethods()) {
log.trace(
"Method checking is: "
+ m.getName()
+ " with "
+ m.getParameterTypes().length
+ " parameters");
byte[] avoid = new byte[0];
if (m.getName().equals(methodName)
&& m.getParameterTypes().length == params.size()
&& !m.getParameterTypes()[m.getParameterTypes().length - 1]
.getCanonicalName()
.equals(avoid.getClass().getCanonicalName())) {
if (!m.getReturnType().equals(Void.class)) {
if (params.size() != 0) {
params =
getParameters(
pluginMessageObject.get("parameters").getAsJsonArray(), m.getParameterTypes());
for (Object p : params) {
log.trace("param class is: " + p.getClass());
}
return (Serializable) m.invoke(pluginInstance, params.toArray());
} else {
return (Serializable) m.invoke(pluginInstance);
}
} else {
if (params.size() != 0) {
params =
getParameters(
pluginMessageObject.get("parameters").getAsJsonArray(), m.getParameterTypes());
for (Object p : params) {
log.trace("param class is: " + p.getClass());
}
m.invoke(pluginInstance, params.toArray());
} else {
m.invoke(pluginInstance);
}
return null;
}
}
}
throw new NotFoundException("method not found");
}
private List<Object> getParameters(JsonArray parameters, Class<?>[] parameterTypes) {
List<Object> res = new LinkedList<Object>();
for (int i = 0; i < parameters.size(); i++) {
res.add(gson.fromJson(parameters.get(i), parameterTypes[i]));
}
return res;
}
private void initRabbitMQ() throws IOException, TimeoutException {
ConnectionFactory factory = new ConnectionFactory();
factory.setHost(brokerIp);
factory.setPort(brokerPort);
factory.setPassword(password);
factory.setUsername(username);
connection = factory.newConnection();
channel = connection.createChannel();
channel.exchangeDeclare(exchange, "topic");
channel.queueDeclare(pluginId, false, false, true, null);
channel.queueBind(pluginId, exchange, pluginId);
channel.basicQos(1);
consumer = new QueueingConsumer(channel);
channel.basicConsume(pluginId, false, consumer);
}
public String getBrokerIp() {
return brokerIp;
}
public void setBrokerIp(String brokerIp) {
this.brokerIp = brokerIp;
}
public int getBrokerPort() {
return brokerPort;
}
public void setBrokerPort(int brokerPort) {
this.brokerPort = brokerPort;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
|
src/main/java/org/openbaton/plugin/PluginListener.java
|
package org.openbaton.plugin;
import com.google.gson.*;
import com.rabbitmq.client.AMQP.BasicProperties;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.QueueingConsumer;
import org.apache.commons.codec.binary.Base64;
import org.openbaton.catalogue.nfvo.PluginAnswer;
import org.openbaton.exceptions.NotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeoutException;
/**
* Created by lto on 25/11/15.
*/
public class PluginListener implements Runnable {
private static final String exchange = "plugin-exchange";
private String pluginId;
private Object pluginInstance;
private Logger log;
private QueueingConsumer consumer;
private Channel channel;
private Gson gson =
new GsonBuilder()
.registerTypeHierarchyAdapter(byte[].class, new ByteArrayToBase64TypeAdapter())
.setPrettyPrinting()
.create();
private boolean exit = false;
private String brokerIp;
private int brokerPort;
private String username;
private String password;
private Connection connection;
public String getPluginId() {
return pluginId;
}
public void setPluginId(String pluginId) {
this.pluginId = pluginId;
}
public Object getPluginInstance() {
return pluginInstance;
}
public void setPluginInstance(Object pluginInstance) {
this.pluginInstance = pluginInstance;
log = LoggerFactory.getLogger(pluginInstance.getClass().getName());
}
public boolean isExit() {
return exit;
}
public void setExit(boolean exit) {
this.exit = exit;
}
private static class ByteArrayToBase64TypeAdapter
implements JsonSerializer<byte[]>, JsonDeserializer<byte[]> {
public byte[] deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
throws JsonParseException {
return Base64.decodeBase64(json.getAsString());
}
public JsonElement serialize(byte[] src, Type typeOfSrc, JsonSerializationContext context) {
return new JsonPrimitive(Base64.encodeBase64String(src));
}
}
@Override
public void run() {
try {
initRabbitMQ();
} catch (IOException | TimeoutException e) {
e.printStackTrace();
setExit(true);
}
try {
while (!exit) {
QueueingConsumer.Delivery delivery;
BasicProperties props;
BasicProperties replyProps;
try {
delivery = consumer.nextDelivery();
props = delivery.getProperties();
replyProps =
new BasicProperties.Builder()
.correlationId(props.getCorrelationId())
// .contentType("plain/text")
.build();
} catch (Exception e) {
e.printStackTrace();
exit = true;
continue;
}
log.info("\nWaiting for RPC requests");
String message = new String(delivery.getBody());
log.debug("Received message");
log.trace("Message content received: " + message);
PluginAnswer answer = new PluginAnswer();
try {
answer.setAnswer(executeMethod(message));
} catch (InvocationTargetException e) {
answer.setException(e.getTargetException());
} catch (Exception e) {
e.printStackTrace();
answer.setException(e);
}
String response;
try {
response = gson.toJson(answer);
log.debug("Answer is: " + response);
log.debug("Reply queue is: " + props.getReplyTo());
channel.basicPublish(exchange, props.getReplyTo(), replyProps, response.getBytes());
channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
} catch (Exception e) {
e.printStackTrace();
answer.setException(e);
log.debug("Answer is: " + answer);
log.debug("Reply queue is: " + props.getReplyTo());
channel.basicPublish(
exchange, props.getReplyTo(), replyProps, gson.toJson(answer).getBytes());
channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
setExit(true);
}
}
} catch (IOException e) {
e.printStackTrace();
}
try {
channel.close();
connection.close();
} catch (IOException | TimeoutException e) {
e.printStackTrace();
}
}
private Serializable executeMethod(String pluginMessageString)
throws InvocationTargetException, IllegalAccessException, NotFoundException {
JsonObject pluginMessageObject = gson.fromJson(pluginMessageString, JsonObject.class);
List<Object> params = new ArrayList<Object>();
for (JsonElement param : pluginMessageObject.get("parameters").getAsJsonArray()) {
Object p = gson.fromJson(param, Object.class);
if (p != null) {
params.add(p);
}
}
Class pluginClass = pluginInstance.getClass();
log.debug("There are " + params.size() + " parameters");
for (Method m : pluginClass.getMethods()) {
log.debug(
"Method checking is: "
+ m.getName()
+ " with "
+ m.getParameterTypes().length
+ " parameters");
if (m.getName().equals(pluginMessageObject.get("methodName").getAsString())
&& m.getParameterTypes().length == params.size()) {
if (!m.getReturnType().equals(Void.class)) {
if (params.size() != 0) {
params =
getParameters(
pluginMessageObject.get("parameters").getAsJsonArray(), m.getParameterTypes());
return (Serializable) m.invoke(pluginInstance, params.toArray());
} else {
return (Serializable) m.invoke(pluginInstance);
}
} else {
if (params.size() != 0) {
params =
getParameters(
pluginMessageObject.get("parameters").getAsJsonArray(), m.getParameterTypes());
for (Object p : params) {
log.debug("param class is: " + p.getClass());
}
m.invoke(pluginInstance, params.toArray());
} else {
m.invoke(pluginInstance);
}
return null;
}
}
}
throw new NotFoundException("method not found");
}
private List<Object> getParameters(JsonArray parameters, Class<?>[] parameterTypes) {
List<Object> res = new LinkedList<Object>();
for (int i = 0; i < parameters.size(); i++) {
res.add(gson.fromJson(parameters.get(i), parameterTypes[i]));
}
return res;
}
private void initRabbitMQ() throws IOException, TimeoutException {
ConnectionFactory factory = new ConnectionFactory();
factory.setHost(brokerIp);
factory.setPort(brokerPort);
factory.setPassword(password);
factory.setUsername(username);
connection = factory.newConnection();
channel = connection.createChannel();
channel.exchangeDeclare(exchange, "topic");
channel.queueDeclare(pluginId, false, false, true, null);
channel.queueBind(pluginId, exchange, pluginId);
channel.basicQos(1);
consumer = new QueueingConsumer(channel);
channel.basicConsume(pluginId, false, consumer);
}
public String getBrokerIp() {
return brokerIp;
}
public void setBrokerIp(String brokerIp) {
this.brokerIp = brokerIp;
}
public int getBrokerPort() {
return brokerPort;
}
public void setBrokerPort(int brokerPort) {
this.brokerPort = brokerPort;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
|
Fix: addImage choice is now only using the imageLink
|
src/main/java/org/openbaton/plugin/PluginListener.java
|
Fix: addImage choice is now only using the imageLink
|
|
Java
|
apache-2.0
|
955c574b3a10cfe4376a33c6b1e4927e6f74a75c
| 0
|
treejames/wechat4j-1,Wingo7239/wechat4j,delonzhou/wechat4j,8522/wechat4j,financeX/wechat4j,liwanwei/wechat4j,leonardo-eggs/wechat4j,zouchangzhen/wechat4j,chengn/wechat4j,birdtsai/wechat4j,subaochen/wechat4j,b2b2244424/wechat4j,yudedipan/wechat4j,sword-org/wechat4j
|
/**
*
*/
package org.sword.wechat4j.response;
import javax.xml.bind.annotation.XmlElement;
/**
* 视频消息
* @author ChengNing
* @date 2014年12月7日
*/
public class VideoResponse {
private String MediaId; //通过上传多媒体文件,得到的id
private String Title; //视频消息的标题
private String Description; //视频消息的描述
private String ThumbMediaId;
@XmlElement(name="MediaId")
public String getMediaId() {
return MediaId;
}
public void setMediaId(String mediaId) {
MediaId = mediaId;
}
@XmlElement(name="Title")
public String getTitle() {
return Title;
}
public void setTitle(String title) {
Title = title;
}
@XmlElement(name="Description")
public String getDescription() {
return Description;
}
public void setDescription(String description) {
Description = description;
}
@XmlElement(name="ThumbMediaId")
public String getThumbMediaId() {
return ThumbMediaId;
}
public void setThumbMediaId(String thumbMediaId) {
ThumbMediaId = thumbMediaId;
}
}
|
src/org/sword/wechat4j/response/VideoResponse.java
|
/**
*
*/
package org.sword.wechat4j.response;
import javax.xml.bind.annotation.XmlElement;
/**
* 视频消息
* @author ChengNing
* @date 2014年12月7日
*/
public class VideoResponse {
private String MediaId; //通过上传多媒体文件,得到的id
private String Title; //视频消息的标题
private String Description; //视频消息的描述
@XmlElement(name="MediaId")
public String getMediaId() {
return MediaId;
}
public void setMediaId(String mediaId) {
MediaId = mediaId;
}
@XmlElement(name="Title")
public String getTitle() {
return Title;
}
public void setTitle(String title) {
Title = title;
}
@XmlElement(name="Description")
public String getDescription() {
return Description;
}
public void setDescription(String description) {
Description = description;
}
}
|
调整消息接口
|
src/org/sword/wechat4j/response/VideoResponse.java
|
调整消息接口
|
|
Java
|
apache-2.0
|
d32ba0977f6f499faf5373273513429798051a14
| 0
|
HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class UnsafeAvailChecker {
private static final String CLASS_NAME = "sun.misc.Unsafe";
private static final Log LOG = LogFactory.getLog(UnsafeAvailChecker.class);
private static boolean avail = false;
private static boolean unaligned = false;
static {
avail = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
try {
Class<?> clazz = Class.forName(CLASS_NAME);
Field f = clazz.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null) != null;
} catch (Throwable e) {
LOG.warn("sun.misc.Unsafe is not available/accessible", e);
}
return false;
}
});
// When Unsafe itself is not available/accessible consider unaligned as false.
if (avail) {
String arch = System.getProperty("os.arch");
if ("ppc64".equals(arch) || "ppc64le".equals(arch) || "aarch64".equals(arch)) {
// java.nio.Bits.unaligned() wrongly returns false on ppc (JDK-8165231),
unaligned = true;
} else {
try {
// Using java.nio.Bits#unaligned() to check for unaligned-access capability
Class<?> clazz = Class.forName("java.nio.Bits");
Method m = clazz.getDeclaredMethod("unaligned");
m.setAccessible(true);
unaligned = (Boolean) m.invoke(null);
} catch (Exception e) {
LOG.warn("java.nio.Bits#unaligned() check failed."
+ "Unsafe based read/write of primitive types won't be used", e);
}
}
}
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and it is
* accessible.
*/
public static boolean isAvailable() {
return avail;
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and underlying
* system having unaligned-access capability.
*/
public static boolean unaligned() {
return unaligned;
}
private UnsafeAvailChecker() {
// private constructor to avoid instantiation
}
}
|
hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class UnsafeAvailChecker {
private static final String CLASS_NAME = "sun.misc.Unsafe";
private static final Log LOG = LogFactory.getLog(UnsafeAvailChecker.class);
private static boolean avail = false;
private static boolean unaligned = false;
static {
avail = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
try {
Class<?> clazz = Class.forName(CLASS_NAME);
Field f = clazz.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null) != null;
} catch (Throwable e) {
LOG.warn("sun.misc.Unsafe is not available/accessible", e);
}
return false;
}
});
// When Unsafe itself is not available/accessible consider unaligned as false.
if (avail) {
String arch = System.getProperty("os.arch");
if ("ppc64".equals(arch) || "ppc64le".equals(arch)) {
// java.nio.Bits.unaligned() wrongly returns false on ppc (JDK-8165231),
unaligned = true;
} else {
try {
// Using java.nio.Bits#unaligned() to check for unaligned-access capability
Class<?> clazz = Class.forName("java.nio.Bits");
Method m = clazz.getDeclaredMethod("unaligned");
m.setAccessible(true);
unaligned = (Boolean) m.invoke(null);
} catch (Exception e) {
LOG.warn("java.nio.Bits#unaligned() check failed."
+ "Unsafe based read/write of primitive types won't be used", e);
}
}
}
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and it is
* accessible.
*/
public static boolean isAvailable() {
return avail;
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and underlying
* system having unaligned-access capability.
*/
public static boolean unaligned() {
return unaligned;
}
private UnsafeAvailChecker() {
// private constructor to avoid instantiation
}
}
|
HBASE-19386 Enable Arm64 unaligned support for HBase
On Arm64, java.nio.Bits.unaligned() wrongly returns false due to a JDK bug.
This causes HBase UnsafeAvailChecker wrongly returning false on Arm64.
And it slso cause FuzzyRowFilter Unit test failed.
Fix it by providing a hard-code to enbale Arm64 unaligned support.
Jira: HBASE-19386
Change-Id: I3ab821dacbe42b18cd515080da1fa3dc1f1e1d28
Signed-off-by: Yuqi Gu <345a4e3af047c06b177f7fe1915a45f6033d9349@arm.com>
Signed-off-by: tedyu <22f3e725b314afeca4d45e12683c560674029d21@gmail.com>
|
hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
|
HBASE-19386 Enable Arm64 unaligned support for HBase
|
|
Java
|
apache-2.0
|
18217d59c60442e195d970c1bedb362c76354250
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.maven.indices;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.progress.ProgressIndicatorProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.model.MavenId;
import org.jetbrains.idea.maven.model.MavenRemoteRepository;
import org.jetbrains.idea.maven.onlinecompletion.DependencyCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.DependencySearchService;
import org.jetbrains.idea.maven.onlinecompletion.IndexBasedCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.OfflineSearchService;
import org.jetbrains.idea.maven.onlinecompletion.ProjectModulesCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.model.MavenDependencyCompletionItem;
import org.jetbrains.idea.maven.onlinecompletion.model.SearchParameters;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectChanges;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.project.MavenProjectsTree;
import org.jetbrains.idea.maven.server.NativeMavenProjectHolder;
import org.jetbrains.idea.maven.utils.MavenMergingUpdateQueue;
import org.jetbrains.idea.maven.utils.MavenSimpleProjectComponent;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public final class MavenProjectIndicesManager extends MavenSimpleProjectComponent {
private volatile List<MavenIndex> myProjectIndices = new ArrayList<>();
private volatile boolean offlineIndexes = false;
private volatile OfflineSearchService myOfflineSearchService;
private volatile DependencySearchService myDependencySearchService;
private final MergingUpdateQueue myUpdateQueue;
public boolean hasOfflineIndexes() {
return offlineIndexes;
}
public static MavenProjectIndicesManager getInstance(Project p) {
return p.getComponent(MavenProjectIndicesManager.class);
}
public MavenProjectIndicesManager(Project project) {
super(project);
myUpdateQueue = new MavenMergingUpdateQueue(getClass().getSimpleName(), 1000, true, project);
myOfflineSearchService = new OfflineSearchService(project, Collections.emptyList());
myDependencySearchService = new DependencySearchService(project, myOfflineSearchService);
if (!isNormalProject()) return;
doInit();
}
public void doInit() {
if (ApplicationManager.getApplication().isUnitTestMode()) {
scheduleUpdateIndicesList();
}
getMavenProjectManager().addManagerListener(new MavenProjectsManager.Listener() {
@Override
public void activated() {
scheduleUpdateIndicesList();
}
});
getMavenProjectManager().addProjectsTreeListener(new MavenProjectsTree.Listener() {
@Override
public void projectsUpdated(@NotNull List<Pair<MavenProject, MavenProjectChanges>> updated, @NotNull List<MavenProject> deleted) {
scheduleUpdateIndicesList();
}
@Override
public void projectResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges,
NativeMavenProjectHolder nativeMavenProject) {
scheduleUpdateIndicesList();
}
});
}
private void scheduleUpdateIndicesList() {
scheduleUpdateIndicesList(null);
}
public void scheduleUpdateIndicesList(@Nullable final Consumer<? super List<MavenIndex>> consumer) {
myUpdateQueue.queue(new Update(this) {
@Override
public void run() {
Set<Pair<String, String>> remoteRepositoriesIdsAndUrls;
File localRepository;
MavenIndicesManager indicesManager = MavenIndicesManager.getInstance();
remoteRepositoriesIdsAndUrls = ReadAction.compute(() -> myProject.isDisposed() ? null : collectRemoteRepositoriesIdsAndUrls());
localRepository = ReadAction.compute(() -> myProject.isDisposed() ? null : getLocalRepository());
if (remoteRepositoriesIdsAndUrls == null || localRepository == null) return;
Set<DependencyCompletionProvider> providers = new HashSet<>();
MavenIndex localIndex = indicesManager.createIndexForLocalRepo(myProject, localRepository);
if (localIndex != null) {
providers.add(new IndexBasedCompletionProvider(localIndex));
}
providers.add(new ProjectModulesCompletionProvider(myProject));
List<MavenIndex> offlineIndices =
MavenIndicesManager.getInstance().ensureIndicesExist(myProject, remoteRepositoriesIdsAndUrls);
for (MavenSearchIndex index : offlineIndices) {
if (index instanceof MavenIndex) {
providers.add(new IndexBasedCompletionProvider((MavenIndex)index));
}
}
List<MavenIndex> newIndices = new ArrayList<>(offlineIndices);
if (localIndex != null) {
newIndices.add(localIndex);
}
synchronized (this) {
offlineIndexes = !remoteRepositoriesIdsAndUrls.isEmpty();
myProjectIndices = newIndices;
myOfflineSearchService = new OfflineSearchService(myProject, new ArrayList<>(providers));
myDependencySearchService = new DependencySearchService(myProject, myOfflineSearchService);
}
if (consumer != null) {
consumer.consume(myProjectIndices);
}
}
});
}
private File getLocalRepository() {
return MavenProjectsManager.getInstance(myProject).getLocalRepository();
}
private Set<Pair<String, String>> collectRemoteRepositoriesIdsAndUrls() {
Set<Pair<String, String>> result = new THashSet<>();
Set<MavenRemoteRepository> remoteRepositories = new HashSet<>(getMavenProjectManager().getRemoteRepositories());
for (MavenRepositoryProvider repositoryProvider : MavenRepositoryProvider.EP_NAME.getExtensions()) {
ContainerUtil.addAll(remoteRepositories, repositoryProvider.getRemoteRepositories(myProject));
}
for (MavenRemoteRepository each : remoteRepositories) {
String id = each.getId();
String url = each.getUrl();
result.add(Pair.create(id, url));
}
return result;
}
/**
* @deprecated use {@link #getOfflineSearchService()}
*/
@Deprecated
public List<MavenIndex> getIndices() {
return new ArrayList<>(myProjectIndices);
}
public void scheduleUpdateAll() {
MavenIndicesManager.getInstance().scheduleUpdate(myProject, myProjectIndices);
}
public void scheduleUpdate(List<MavenIndex> indices) {
MavenIndicesManager.getInstance().scheduleUpdate(myProject, indices);
}
public MavenIndicesManager.IndexUpdatingState getUpdatingState(MavenSearchIndex index) {
return MavenIndicesManager.getInstance().getUpdatingState(index);
}
private MavenProjectsManager getMavenProjectManager() {
return MavenProjectsManager.getInstance(myProject);
}
public synchronized OfflineSearchService getOfflineSearchService() {
return myOfflineSearchService;
}
public synchronized DependencySearchService getDependencySearchService() {
return myDependencySearchService;
}
/**
* @deprecated use {@link OfflineSearchService#findGroupCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getGroupIds() {
return getGroupIds("");
}
/**
* @deprecated use {@link OfflineSearchService#findGroupCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getGroupIds(String pattern) {
pattern = pattern == null ? "" : pattern;
//todo fix
return getOfflineSearchService().findGroupCandidates(new MavenDependencyCompletionItem(pattern))
.stream().map(d -> d.getArtifactId())
.collect(
Collectors.toSet());
}
/**
* @deprecated use {@link OfflineSearchService#findArtifactCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getArtifactIds(String groupId) {
ProgressIndicatorProvider.checkCanceled();
return getOfflineSearchService().findArtifactCandidates(new MavenDependencyCompletionItem(groupId)).stream().map(d -> d.getArtifactId())
.collect(
Collectors.toSet());
}
/**
* @deprecated use {@link OfflineSearchService#findAllVersions or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getVersions(String groupId, String artifactId) {
ProgressIndicatorProvider.checkCanceled();
return getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null)).stream()
.map(d -> d.getVersion()).collect(
Collectors.toSet());
}
@Deprecated
public boolean hasGroupId(String groupId) {
ProgressIndicatorProvider.checkCanceled();
if (hasProjectGroupId(groupId)) return true;
return getOfflineSearchService().findGroupCandidates(new MavenDependencyCompletionItem(groupId)).stream()
.anyMatch(p -> StringUtil.equals(groupId, p.getGroupId()));
}
private boolean checkLocalRepository(String groupId, String artifactId, String version) {
if (StringUtil.isEmpty(groupId)) return false;
String relPath = groupId.replace('.', '/');
if (artifactId != null) {
relPath += "/" + artifactId;
if (version != null) {
relPath += "/" + version + "/" + artifactId + "-" + version + ".pom";
}
}
File file = new File(getLocalRepository(), relPath);
return file.exists();
}
@Deprecated
public boolean hasArtifactId(String groupId, String artifactId) {
if (hasProjectArtifactId(groupId, artifactId)) return true;
return !getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null),
SearchParameters.DEFAULT).isEmpty();
}
@Deprecated
public boolean hasVersion(String groupId, String artifactId, String version) {
if (hasProjectVersion(groupId, artifactId, version)) return true;
return getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null)).stream().anyMatch(
s -> version.equals(s.getVersion())
);
}
private Set<String> getProjectGroupIds() {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
result.add(each.getGroupId());
}
return result;
}
private Set<String> getProjectArtifactIds(String groupId) {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
if (groupId.equals(each.getGroupId())) {
result.add(each.getArtifactId());
}
}
return result;
}
private Set<String> getProjectVersions(String groupId, String artifactId) {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
if (groupId.equals(each.getGroupId()) && artifactId.equals(each.getArtifactId())) {
result.add(each.getVersion());
}
}
return result;
}
private boolean hasProjectGroupId(String groupId) {
return getProjectGroupIds().contains(groupId);
}
private boolean hasProjectArtifactId(String groupId, String artifactId) {
return getProjectArtifactIds(groupId).contains(artifactId);
}
private boolean hasProjectVersion(String groupId, String artifactId, String version) {
return getProjectVersions(groupId, artifactId).contains(version);
}
private Set<MavenId> getProjectsIds() {
Set<MavenId> result = new THashSet<>();
for (MavenProject each : MavenProjectsManager.getInstance(myProject).getProjects()) {
result.add(each.getMavenId());
}
return result;
}
}
|
plugins/maven/src/main/java/org/jetbrains/idea/maven/indices/MavenProjectIndicesManager.java
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.maven.indices;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.progress.ProgressIndicatorProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.model.MavenId;
import org.jetbrains.idea.maven.model.MavenRemoteRepository;
import org.jetbrains.idea.maven.onlinecompletion.DependencyCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.DependencySearchService;
import org.jetbrains.idea.maven.onlinecompletion.IndexBasedCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.OfflineSearchService;
import org.jetbrains.idea.maven.onlinecompletion.ProjectModulesCompletionProvider;
import org.jetbrains.idea.maven.onlinecompletion.model.MavenDependencyCompletionItem;
import org.jetbrains.idea.maven.onlinecompletion.model.SearchParameters;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectChanges;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.project.MavenProjectsTree;
import org.jetbrains.idea.maven.server.NativeMavenProjectHolder;
import org.jetbrains.idea.maven.utils.MavenMergingUpdateQueue;
import org.jetbrains.idea.maven.utils.MavenSimpleProjectComponent;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public final class MavenProjectIndicesManager extends MavenSimpleProjectComponent {
private volatile List<MavenIndex> myProjectIndices = new ArrayList<>();
private volatile boolean offlineIndexes = false;
private volatile OfflineSearchService myOfflineSearchService;
private volatile DependencySearchService myDependencySearchService;
private final MergingUpdateQueue myUpdateQueue;
public boolean hasOfflineIndexes() {
return offlineIndexes;
}
public static MavenProjectIndicesManager getInstance(Project p) {
return p.getComponent(MavenProjectIndicesManager.class);
}
public MavenProjectIndicesManager(Project project) {
super(project);
myUpdateQueue = new MavenMergingUpdateQueue(getClass().getSimpleName(), 1000, true, project);
myOfflineSearchService = new OfflineSearchService(project, Collections.emptyList());
myDependencySearchService = new DependencySearchService(project, myOfflineSearchService);
if (!isNormalProject()) return;
doInit();
}
public void doInit() {
if (ApplicationManager.getApplication().isUnitTestMode()) {
scheduleUpdateIndicesList();
}
getMavenProjectManager().addManagerListener(new MavenProjectsManager.Listener() {
@Override
public void activated() {
scheduleUpdateIndicesList();
}
});
getMavenProjectManager().addProjectsTreeListener(new MavenProjectsTree.Listener() {
@Override
public void projectsUpdated(@NotNull List<Pair<MavenProject, MavenProjectChanges>> updated, @NotNull List<MavenProject> deleted) {
scheduleUpdateIndicesList();
}
@Override
public void projectResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges,
NativeMavenProjectHolder nativeMavenProject) {
scheduleUpdateIndicesList();
}
});
}
private void scheduleUpdateIndicesList() {
scheduleUpdateIndicesList(null);
}
public void scheduleUpdateIndicesList(@Nullable final Consumer<? super List<MavenIndex>> consumer) {
myUpdateQueue.queue(new Update(this) {
@Override
public void run() {
Set<Pair<String, String>> remoteRepositoriesIdsAndUrls;
File localRepository;
MavenIndicesManager indicesManager = MavenIndicesManager.getInstance();
remoteRepositoriesIdsAndUrls = ReadAction.compute(() -> myProject.isDisposed() ? null : collectRemoteRepositoriesIdsAndUrls());
localRepository = ReadAction.compute(() -> myProject.isDisposed() ? null : getLocalRepository());
if (remoteRepositoriesIdsAndUrls == null || localRepository == null) return;
Set<DependencyCompletionProvider> providers = new HashSet<>();
MavenIndex localIndex = indicesManager.createIndexForLocalRepo(myProject, localRepository);
if (localIndex != null) {
providers.add(new IndexBasedCompletionProvider(localIndex));
}
providers.add(new ProjectModulesCompletionProvider(myProject));
List<MavenIndex> offlineIndices =
MavenIndicesManager.getInstance().ensureIndicesExist(myProject, remoteRepositoriesIdsAndUrls);
for (MavenSearchIndex index : offlineIndices) {
if (index instanceof MavenIndex) {
providers.add(new IndexBasedCompletionProvider((MavenIndex)index));
}
}
List<MavenIndex> newIndices = new ArrayList<>(offlineIndices);
newIndices.add(localIndex);
synchronized (this) {
offlineIndexes = !remoteRepositoriesIdsAndUrls.isEmpty();
myProjectIndices = newIndices;
myOfflineSearchService = new OfflineSearchService(myProject, new ArrayList<>(providers));
myDependencySearchService = new DependencySearchService(myProject, myOfflineSearchService);
}
if (consumer != null) {
consumer.consume(myProjectIndices);
}
}
});
}
private File getLocalRepository() {
return MavenProjectsManager.getInstance(myProject).getLocalRepository();
}
private Set<Pair<String, String>> collectRemoteRepositoriesIdsAndUrls() {
Set<Pair<String, String>> result = new THashSet<>();
Set<MavenRemoteRepository> remoteRepositories = new HashSet<>(getMavenProjectManager().getRemoteRepositories());
for (MavenRepositoryProvider repositoryProvider : MavenRepositoryProvider.EP_NAME.getExtensions()) {
ContainerUtil.addAll(remoteRepositories, repositoryProvider.getRemoteRepositories(myProject));
}
for (MavenRemoteRepository each : remoteRepositories) {
String id = each.getId();
String url = each.getUrl();
result.add(Pair.create(id, url));
}
return result;
}
/**
* @deprecated use {@link #getOfflineSearchService()}
*/
@Deprecated
public List<MavenIndex> getIndices() {
return new ArrayList<>(myProjectIndices);
}
public void scheduleUpdateAll() {
MavenIndicesManager.getInstance().scheduleUpdate(myProject, myProjectIndices);
}
public void scheduleUpdate(List<MavenIndex> indices) {
MavenIndicesManager.getInstance().scheduleUpdate(myProject, indices);
}
public MavenIndicesManager.IndexUpdatingState getUpdatingState(MavenSearchIndex index) {
return MavenIndicesManager.getInstance().getUpdatingState(index);
}
private MavenProjectsManager getMavenProjectManager() {
return MavenProjectsManager.getInstance(myProject);
}
public synchronized OfflineSearchService getOfflineSearchService() {
return myOfflineSearchService;
}
public synchronized DependencySearchService getDependencySearchService() {
return myDependencySearchService;
}
/**
* @deprecated use {@link OfflineSearchService#findGroupCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getGroupIds() {
return getGroupIds("");
}
/**
* @deprecated use {@link OfflineSearchService#findGroupCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getGroupIds(String pattern) {
pattern = pattern == null ? "" : pattern;
//todo fix
return getOfflineSearchService().findGroupCandidates(new MavenDependencyCompletionItem(pattern))
.stream().map(d -> d.getArtifactId())
.collect(
Collectors.toSet());
}
/**
* @deprecated use {@link OfflineSearchService#findArtifactCandidates} or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getArtifactIds(String groupId) {
ProgressIndicatorProvider.checkCanceled();
return getOfflineSearchService().findArtifactCandidates(new MavenDependencyCompletionItem(groupId)).stream().map(d -> d.getArtifactId())
.collect(
Collectors.toSet());
}
/**
* @deprecated use {@link OfflineSearchService#findAllVersions or{@link OfflineSearchService#findByTemplate} instead
**/
@Deprecated
public Set<String> getVersions(String groupId, String artifactId) {
ProgressIndicatorProvider.checkCanceled();
return getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null)).stream()
.map(d -> d.getVersion()).collect(
Collectors.toSet());
}
@Deprecated
public boolean hasGroupId(String groupId) {
ProgressIndicatorProvider.checkCanceled();
if (hasProjectGroupId(groupId)) return true;
return getOfflineSearchService().findGroupCandidates(new MavenDependencyCompletionItem(groupId)).stream()
.anyMatch(p -> StringUtil.equals(groupId, p.getGroupId()));
}
private boolean checkLocalRepository(String groupId, String artifactId, String version) {
if (StringUtil.isEmpty(groupId)) return false;
String relPath = groupId.replace('.', '/');
if (artifactId != null) {
relPath += "/" + artifactId;
if (version != null) {
relPath += "/" + version + "/" + artifactId + "-" + version + ".pom";
}
}
File file = new File(getLocalRepository(), relPath);
return file.exists();
}
@Deprecated
public boolean hasArtifactId(String groupId, String artifactId) {
if (hasProjectArtifactId(groupId, artifactId)) return true;
return !getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null),
SearchParameters.DEFAULT).isEmpty();
}
@Deprecated
public boolean hasVersion(String groupId, String artifactId, String version) {
if (hasProjectVersion(groupId, artifactId, version)) return true;
return getOfflineSearchService().findAllVersions(new MavenDependencyCompletionItem(groupId, artifactId, null)).stream().anyMatch(
s -> version.equals(s.getVersion())
);
}
private Set<String> getProjectGroupIds() {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
result.add(each.getGroupId());
}
return result;
}
private Set<String> getProjectArtifactIds(String groupId) {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
if (groupId.equals(each.getGroupId())) {
result.add(each.getArtifactId());
}
}
return result;
}
private Set<String> getProjectVersions(String groupId, String artifactId) {
Set<String> result = new THashSet<>();
for (MavenId each : getProjectsIds()) {
if (groupId.equals(each.getGroupId()) && artifactId.equals(each.getArtifactId())) {
result.add(each.getVersion());
}
}
return result;
}
private boolean hasProjectGroupId(String groupId) {
return getProjectGroupIds().contains(groupId);
}
private boolean hasProjectArtifactId(String groupId, String artifactId) {
return getProjectArtifactIds(groupId).contains(artifactId);
}
private boolean hasProjectVersion(String groupId, String artifactId, String version) {
return getProjectVersions(groupId, artifactId).contains(version);
}
private Set<MavenId> getProjectsIds() {
Set<MavenId> result = new THashSet<>();
for (MavenProject each : MavenProjectsManager.getInstance(myProject).getProjects()) {
result.add(each.getMavenId());
}
return result;
}
}
|
fix NPE
GitOrigin-RevId: ff75bda270e3d98a7ba4a1eb9d1dbd90e27e488e
|
plugins/maven/src/main/java/org/jetbrains/idea/maven/indices/MavenProjectIndicesManager.java
|
fix NPE
|
|
Java
|
apache-2.0
|
13bb393f97dd603b623656f9fda78f63f498fc6e
| 0
|
jboss-switchyard/switchyard,igarashitm/switchyard,igarashitm/switchyard,bfitzpat/switchyard,bfitzpat/switchyard,igarashitm/switchyard,tadayosi/switchyard,tadayosi/switchyard,cunningt/switchyard,cunningt/switchyard,bfitzpat/switchyard,igarashitm/switchyard,cunningt/switchyard,jboss-switchyard/switchyard,jboss-switchyard/switchyard,tadayosi/switchyard
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.switchyard.component.bpel.deploy;
import java.util.HashMap;
import java.util.Map;
import javax.xml.namespace.QName;
import org.apache.log4j.Logger;
import org.riftsaw.engine.BPELEngine;
import org.riftsaw.engine.BPELEngineFactory;
import org.riftsaw.engine.internal.BPELEngineImpl;
import org.switchyard.component.bpel.config.model.BPELComponentImplementationModel;
import org.switchyard.component.bpel.exchange.BPELExchangeHandler;
import org.switchyard.component.bpel.exchange.BPELExchangeHandlerFactory;
import org.switchyard.component.bpel.riftsaw.RiftsawServiceLocator;
import org.switchyard.config.Configuration;
import org.switchyard.config.model.composite.ComponentModel;
import org.switchyard.config.model.composite.ComponentReferenceModel;
import org.switchyard.config.model.composite.ComponentServiceModel;
import org.switchyard.deploy.BaseActivator;
import org.switchyard.deploy.ServiceHandler;
import org.switchyard.exception.SwitchYardException;
/**
* Activator for the BPEL component.
*
*/
public class BPELActivator extends BaseActivator {
private static final Logger LOG = Logger.getLogger(BPELActivator.class);
private static Map<QName, BPELExchangeHandler> _handlers = new HashMap<QName , BPELExchangeHandler>();
private static BPELEngine _engine=null;
private static Configuration _configuration=null;
private static RiftsawServiceLocator locator = new RiftsawServiceLocator();
/**
* Constructs a new Activator of type "bpel".
*/
public BPELActivator() {
super("bpel");
}
@Override
public ServiceHandler activateService(QName serviceName, ComponentModel config) {
init();
BPELExchangeHandler handler = BPELExchangeHandlerFactory.instance().newBPELExchangeHandler(getServiceDomain());
BPELComponentImplementationModel bciModel = (BPELComponentImplementationModel)config.getImplementation();
ComponentServiceModel service = null;
for (ComponentServiceModel csm : config.getServices()) {
if (csm.getQName().equals(serviceName)) {
service = csm;
break;
}
}
if (service.getInterface() == null) {
throw new SwitchYardException("Interface not defined for component with BPEL implementation");
}
// take care of references
for (ComponentReferenceModel crm : config.getReferences()) {
locator.addServiceDomain(crm.getQName(), getServiceDomain());
((RiftsawServiceLocator)_engine.getServiceLocator()).initialiseReference(crm);
}
handler.init(serviceName, bciModel, service.getInterface().getInterface(), _engine);
_handlers.put(serviceName, handler);
return handler;
}
@Override
public void deactivateService(QName name, ServiceHandler handler) {
_handlers.remove(name);
// Check if engine should be removed
synchronized (BPELActivator.class) {
if (_handlers.size() == 0 && _engine != null) {
try {
_engine.close();
_engine = null;
} catch (Exception e) {
LOG.error("Failed to close BPEL engine", e);
}
}
}
}
/**
* Associate the configuration with the activator.
*
* @param config The configuration
*/
protected void setConfiguration(Configuration config) {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting configuration to: "+config);
}
_configuration = config;
}
protected void init() {
// _engine is a static member, so this synchronization needs to be on the class
synchronized (BPELActivator.class) {
if (_engine == null) {
_engine = BPELEngineFactory.getEngine();
try {
java.util.Properties props=new java.util.Properties();
// Load default properties
try {
java.io.InputStream is=BPELEngineImpl.class.getClassLoader().getResourceAsStream("bpel.properties");
props.load(is);
} catch (Exception e) {
throw new SwitchYardException("Failed to load default properties: "+ e, e);
}
if (_configuration != null) {
// Overwrite default properties with values from configuration
for (Configuration child : _configuration.getChildren()) {
if (LOG.isDebugEnabled()) {
if (props.containsKey(child.getName())) {
LOG.debug("Overriding BPEL property: "+child.getName()
+" = "+child.getValue());
} else {
LOG.debug("Setting BPEL property: "+child.getName()
+" = "+child.getValue());
}
}
props.put(child.getName(), child.getValue());
}
}
_engine.init(locator, props);
} catch (Exception e) {
throw new SwitchYardException("Failed to initialize the engine: "+ e, e);
}
}
}
}
}
|
bpel/src/main/java/org/switchyard/component/bpel/deploy/BPELActivator.java
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.switchyard.component.bpel.deploy;
import java.util.HashMap;
import java.util.Map;
import javax.xml.namespace.QName;
import org.apache.log4j.Logger;
import org.riftsaw.engine.BPELEngine;
import org.riftsaw.engine.BPELEngineFactory;
import org.riftsaw.engine.internal.BPELEngineImpl;
import org.switchyard.component.bpel.config.model.BPELComponentImplementationModel;
import org.switchyard.component.bpel.exchange.BPELExchangeHandler;
import org.switchyard.component.bpel.exchange.BPELExchangeHandlerFactory;
import org.switchyard.component.bpel.riftsaw.RiftsawServiceLocator;
import org.switchyard.config.Configuration;
import org.switchyard.config.model.composite.ComponentModel;
import org.switchyard.config.model.composite.ComponentReferenceModel;
import org.switchyard.config.model.composite.ComponentServiceModel;
import org.switchyard.deploy.BaseActivator;
import org.switchyard.deploy.ServiceHandler;
import org.switchyard.exception.SwitchYardException;
/**
* Activator for the BPEL component.
*
*/
public class BPELActivator extends BaseActivator {
private static final Logger LOG = Logger.getLogger(BPELActivator.class);
private Map<QName, BPELExchangeHandler> _handlers = new HashMap<QName , BPELExchangeHandler>();
private static BPELEngine _engine=null;
private static Configuration _configuration=null;
private static RiftsawServiceLocator locator = new RiftsawServiceLocator();
/**
* Constructs a new Activator of type "bpel".
*/
public BPELActivator() {
super("bpel");
}
@Override
public ServiceHandler activateService(QName serviceName, ComponentModel config) {
init();
BPELExchangeHandler handler = BPELExchangeHandlerFactory.instance().newBPELExchangeHandler(getServiceDomain());
BPELComponentImplementationModel bciModel = (BPELComponentImplementationModel)config.getImplementation();
ComponentServiceModel service = null;
for (ComponentServiceModel csm : config.getServices()) {
if (csm.getQName().equals(serviceName)) {
service = csm;
break;
}
}
if (service.getInterface() == null) {
throw new SwitchYardException("Interface not defined for component with BPEL implementation");
}
// take care of references
for (ComponentReferenceModel crm : config.getReferences()) {
locator.addServiceDomain(crm.getQName(), getServiceDomain());
((RiftsawServiceLocator)_engine.getServiceLocator()).initialiseReference(crm);
}
handler.init(serviceName, bciModel, service.getInterface().getInterface(), _engine);
_handlers.put(serviceName, handler);
return handler;
}
@Override
public void deactivateService(QName name, ServiceHandler handler) {
_handlers.remove(name);
// Check if engine should be removed
synchronized (BPELActivator.class) {
if (_handlers.size() == 0 && _engine != null) {
try {
_engine.close();
_engine = null;
} catch (Exception e) {
LOG.error("Failed to close BPEL engine", e);
}
}
}
}
/**
* Associate the configuration with the activator.
*
* @param config The configuration
*/
protected void setConfiguration(Configuration config) {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting configuration to: "+config);
}
_configuration = config;
}
protected void init() {
// _engine is a static member, so this synchronization needs to be on the class
synchronized (BPELActivator.class) {
if (_engine == null) {
_engine = BPELEngineFactory.getEngine();
try {
java.util.Properties props=new java.util.Properties();
// Load default properties
try {
java.io.InputStream is=BPELEngineImpl.class.getClassLoader().getResourceAsStream("bpel.properties");
props.load(is);
} catch (Exception e) {
throw new SwitchYardException("Failed to load default properties: "+ e, e);
}
if (_configuration != null) {
// Overwrite default properties with values from configuration
for (Configuration child : _configuration.getChildren()) {
if (LOG.isDebugEnabled()) {
if (props.containsKey(child.getName())) {
LOG.debug("Overriding BPEL property: "+child.getName()
+" = "+child.getValue());
} else {
LOG.debug("Setting BPEL property: "+child.getName()
+" = "+child.getValue());
}
}
props.put(child.getName(), child.getValue());
}
}
_engine.init(locator, props);
} catch (Exception e) {
throw new SwitchYardException("Failed to initialize the engine: "+ e, e);
}
}
}
}
}
|
[SWITCHYARD-709] - Fix BPEL Engine close event procedure
|
bpel/src/main/java/org/switchyard/component/bpel/deploy/BPELActivator.java
|
[SWITCHYARD-709] - Fix BPEL Engine close event procedure
|
|
Java
|
apache-2.0
|
d10d49e39a222b30e387dd000ca8e322acb5cfef
| 0
|
SirPython/Cardshifter,June92/Cardshifter,June92/Cardshifter,Cardshifter/Cardshifter,Cardshifter/Cardshifter,Cardshifter/Cardshifter,SirPython/Cardshifter,June92/Cardshifter,SirPython/Cardshifter
|
package com.cardshifter.server.model;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import net.zomis.cardshifter.ecs.actions.ActionComponent;
import net.zomis.cardshifter.ecs.actions.ActionPerformEvent;
import net.zomis.cardshifter.ecs.actions.ECSAction;
import net.zomis.cardshifter.ecs.actions.TargetSet;
import net.zomis.cardshifter.ecs.ai.AIComponent;
import net.zomis.cardshifter.ecs.ai.AISystem;
import net.zomis.cardshifter.ecs.base.ComponentRetriever;
import net.zomis.cardshifter.ecs.base.ECSGame;
import net.zomis.cardshifter.ecs.base.Entity;
import net.zomis.cardshifter.ecs.base.EntityRemoveEvent;
import net.zomis.cardshifter.ecs.cards.CardComponent;
import net.zomis.cardshifter.ecs.cards.ZoneChangeEvent;
import net.zomis.cardshifter.ecs.cards.ZoneComponent;
import net.zomis.cardshifter.ecs.components.PlayerComponent;
import net.zomis.cardshifter.ecs.phase.PhaseController;
import net.zomis.cardshifter.ecs.resources.ResourceValueChange;
import net.zomis.cardshifter.ecs.resources.Resources;
import net.zomis.cardshifter.ecs.usage.PhrancisGame;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import com.cardshifter.ai.CardshifterAI;
import com.cardshifter.ai.CompleteIdiot;
import com.cardshifter.api.incoming.RequestTargetsMessage;
import com.cardshifter.api.incoming.UseAbilityMessage;
import com.cardshifter.api.outgoing.AvailableTargetsMessage;
import com.cardshifter.api.outgoing.CardInfoMessage;
import com.cardshifter.api.outgoing.EntityRemoveMessage;
import com.cardshifter.api.outgoing.PlayerMessage;
import com.cardshifter.api.outgoing.ResetAvailableActionsMessage;
import com.cardshifter.api.outgoing.UpdateMessage;
import com.cardshifter.api.outgoing.UseableActionMessage;
import com.cardshifter.api.outgoing.ZoneChangeMessage;
import com.cardshifter.api.outgoing.ZoneMessage;
import com.cardshifter.server.clients.ClientIO;
import com.cardshifter.server.main.FakeAIClientTCG;
public class TCGGame extends ServerGame {
private static final Logger logger = LogManager.getLogger(TCGGame.class);
private final ECSGame game;
private final ComponentRetriever<CardComponent> card = ComponentRetriever.retreiverFor(CardComponent.class);
@Deprecated
private final PhaseController phases; // this is not necessary anymore as Actions require a 'player' method to perform.
private final CardshifterAI ai = new CompleteIdiot();
private ComponentRetriever<PlayerComponent> playerData = ComponentRetriever.retreiverFor(PlayerComponent.class);
public TCGGame(Server server, int id) {
super(server, id);
game = PhrancisGame.createGame();
game.getEvents().registerHandlerAfter(this, ResourceValueChange.class, this::broadcast);
game.getEvents().registerHandlerAfter(this, ZoneChangeEvent.class, this::zoneChange);
game.getEvents().registerHandlerAfter(this, EntityRemoveEvent.class, this::remove);
AISystem.setup(game, server.getScheduler());
game.addSystem(game -> game.getEvents().registerHandlerAfter(this, ActionPerformEvent.class, event -> this.sendAvailableActions()));
phases = ComponentRetriever.singleton(game, PhaseController.class);
}
private void zoneChange(ZoneChangeEvent event) {
Entity cardEntity = event.getCard();
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(new ZoneChangeMessage(event.getCard().getId(), event.getSource().getZoneId(), event.getDestination().getZoneId()));
if (event.getDestination().isKnownTo(player) && !event.getSource().isKnownTo(player)) {
io.sendToClient(new CardInfoMessage(event.getDestination().getZoneId(), cardEntity.getId(), Resources.map(cardEntity)));
}
}
}
private void remove(EntityRemoveEvent event) {
this.send(new EntityRemoveMessage(event.getEntity().getId()));
}
private void broadcast(ResourceValueChange event) {
if (getState() == GameState.NOT_STARTED) {
// let the most information be sent when actually starting the game
return;
}
Entity entity = event.getEntity();
UpdateMessage updateEvent = new UpdateMessage(entity.getId(), event.getResource().toString(), event.getNewValue());
if (card.has(entity)) {
CardComponent cardData = card.get(entity);
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
if (cardData.getCurrentZone().isKnownTo(player)) {
io.sendToClient(updateEvent);
}
}
}
else {
// Player, Zone, or Game
this.send(updateEvent);
}
}
public void informAboutTargets(RequestTargetsMessage message, ClientIO client) {
ECSAction action = findAction(message.getId(), message.getAction());
TargetSet targetAction = action.getTargetSets().get(0);
List<Entity> targets = targetAction.findPossibleTargets();
int[] targetIds = targets.stream().mapToInt(e -> e.getId()).toArray();
client.sendToClient(new AvailableTargetsMessage(message.getId(), message.getAction(), targetIds, targetAction.getMin(), targetAction.getMax()));
}
public Entity findTargetable(int entityId) {
Optional<Entity> entity = game.findEntities(e -> e.getId() == entityId).stream().findFirst();
return entity.orElse(null);
}
public ECSAction findAction(int entityId, String actionId) {
Optional<Entity> entity = game.findEntities(e -> e.getId() == entityId).stream().findFirst();
if (!entity.isPresent()) {
throw new IllegalArgumentException("No such entity found");
}
Entity e = entity.get();
if (e.hasComponent(ActionComponent.class)) {
ActionComponent comp = e.getComponent(ActionComponent.class);
if (comp.getActions().contains(actionId)) {
return comp.getAction(actionId);
}
throw new IllegalArgumentException("No such action was found.");
}
throw new IllegalArgumentException(e + " does not have an action component");
}
public void handleMove(UseAbilityMessage message, ClientIO client) {
if (!this.getPlayers().contains(client)) {
throw new IllegalArgumentException("Client is not in this game: " + client);
}
if (phases.getCurrentEntity() != playerFor(client)) {
throw new IllegalArgumentException("It's not that players turn: " + client);
}
ECSAction action = findAction(message.getId(), message.getAction());
if (!action.getTargetSets().isEmpty()) {
TargetSet targetAction = action.getTargetSets().get(0);
targetAction.clearTargets();
targetAction.addTarget(findTargetable(message.getTarget()));
}
action.perform(playerFor(client));
// TODO: Add listener to game for ZoneMoves, inform players about card movements, and send CardInfoMessage when a card becomes known
sendAvailableActions();
}
private void aiPerform() {
if (this.getState() != GameState.RUNNING) {
return;
}
for (ClientIO io : this.getPlayers()) {
if (io instanceof FakeAIClientTCG) {
Entity player = playerFor(io);
if (phases.getCurrentEntity() != player) {
continue;
}
ECSAction action = ai.getAction(player);
if (action != null) {
logger.info("AI Performs action: " + action);
action.perform(player);
sendAvailableActions();
return;
}
}
}
}
@Override
protected boolean makeMove(Command command, int player) {
throw new UnsupportedOperationException();
}
@Override
protected void updateStatus() {
}
public Entity playerFor(ClientIO io) {
int index = this.getPlayers().indexOf(io);
if (index < 0) {
throw new IllegalArgumentException(io + " is not a valid player in this game");
}
return getPlayer(index);
}
private Entity getPlayer(int index) {
return game.findEntities(entity -> entity.hasComponent(PlayerComponent.class) && entity.getComponent(PlayerComponent.class).getIndex() == index).get(0);
}
@Override
protected void onStart() {
this.setupAIPlayers();
game.startGame();
this.getPlayers().stream().forEach(pl -> {
Entity playerEntity = playerFor(pl);
PlayerComponent plData = playerEntity.get(playerData);
this.send(new PlayerMessage(playerEntity.getId(), plData.getIndex(), plData.getName(), Resources.map(playerEntity)));
});
this.game.findEntities(e -> true).stream().flatMap(e -> e.getSuperComponents(ZoneComponent.class).stream()).forEach(this::sendZone);
this.sendAvailableActions();
}
private void setupAIPlayers() {
for (ClientIO io : this.getPlayers()) {
if (io instanceof FakeAIClientTCG) {
Entity player = playerFor(io);
player.addComponent(new AIComponent(new CompleteIdiot()));
logger.info("AI is configured for " + player);
}
}
}
private void sendAvailableActions() {
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(new ResetAvailableActionsMessage());
if (phases.getCurrentEntity() == player) {
getAllActions(game).filter(action -> action.isAllowed(player))
.forEach(action -> io.sendToClient(new UseableActionMessage(action.getOwner().getId(), action.getName(), !action.getTargetSets().isEmpty())));
}
}
}
private static Stream<ECSAction> getAllActions(ECSGame game) {
return game.getEntitiesWithComponent(ActionComponent.class)
.stream()
.flatMap(entity -> entity.getComponent(ActionComponent.class)
.getECSActions().stream());
}
private void sendZone(ZoneComponent zone) {
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(constructZoneMessage(zone, player));
if (zone.isKnownTo(player)) {
zone.forEach(card -> this.sendCard(io, card));
}
}
}
private ZoneMessage constructZoneMessage(ZoneComponent zone, Entity player) {
return new ZoneMessage(zone.getZoneId(), zone.getName(),
zone.getOwner().getId(), zone.size(), zone.isKnownTo(player), zone.stream().mapToInt(e -> e.getId()).toArray());
}
private void sendCard(ClientIO io, Entity card) {
CardComponent cardData = card.getComponent(CardComponent.class);
io.sendToClient(new CardInfoMessage(cardData.getCurrentZone().getZoneId(), card.getId(), Resources.map(card)));
}
}
|
cardshifter-server/src/main/java/com/cardshifter/server/model/TCGGame.java
|
package com.cardshifter.server.model;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import net.zomis.cardshifter.ecs.actions.ActionComponent;
import net.zomis.cardshifter.ecs.actions.ECSAction;
import net.zomis.cardshifter.ecs.actions.TargetSet;
import net.zomis.cardshifter.ecs.base.ComponentRetriever;
import net.zomis.cardshifter.ecs.base.ECSGame;
import net.zomis.cardshifter.ecs.base.Entity;
import net.zomis.cardshifter.ecs.base.EntityRemoveEvent;
import net.zomis.cardshifter.ecs.cards.CardComponent;
import net.zomis.cardshifter.ecs.cards.ZoneChangeEvent;
import net.zomis.cardshifter.ecs.cards.ZoneComponent;
import net.zomis.cardshifter.ecs.components.PlayerComponent;
import net.zomis.cardshifter.ecs.phase.PhaseController;
import net.zomis.cardshifter.ecs.resources.ResourceValueChange;
import net.zomis.cardshifter.ecs.resources.Resources;
import net.zomis.cardshifter.ecs.usage.PhrancisGame;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import com.cardshifter.ai.CardshifterAI;
import com.cardshifter.ai.CompleteIdiot;
import com.cardshifter.api.incoming.RequestTargetsMessage;
import com.cardshifter.api.incoming.UseAbilityMessage;
import com.cardshifter.api.outgoing.AvailableTargetsMessage;
import com.cardshifter.api.outgoing.CardInfoMessage;
import com.cardshifter.api.outgoing.EntityRemoveMessage;
import com.cardshifter.api.outgoing.PlayerMessage;
import com.cardshifter.api.outgoing.ResetAvailableActionsMessage;
import com.cardshifter.api.outgoing.UpdateMessage;
import com.cardshifter.api.outgoing.UseableActionMessage;
import com.cardshifter.api.outgoing.ZoneChangeMessage;
import com.cardshifter.api.outgoing.ZoneMessage;
import com.cardshifter.server.clients.ClientIO;
import com.cardshifter.server.main.FakeAIClientTCG;
public class TCGGame extends ServerGame {
private static final Logger logger = LogManager.getLogger(TCGGame.class);
private static final long AI_DELAY_SECONDS = 5;
private final ECSGame game;
private final ScheduledExecutorService aiPerform = Executors.newScheduledThreadPool(1);
private final ComponentRetriever<CardComponent> card = ComponentRetriever.retreiverFor(CardComponent.class);
@Deprecated
private final PhaseController phases; // this is not necessary anymore as Actions require a 'player' method to perform.
private final CardshifterAI ai = new CompleteIdiot();
private ComponentRetriever<PlayerComponent> playerData = ComponentRetriever.retreiverFor(PlayerComponent.class);
public TCGGame(Server server, int id) {
super(server, id);
game = PhrancisGame.createGame();
game.getEvents().registerHandlerAfter(this, ResourceValueChange.class, this::broadcast);
game.getEvents().registerHandlerAfter(this, ZoneChangeEvent.class, this::zoneChange);
game.getEvents().registerHandlerAfter(this, EntityRemoveEvent.class, this::remove);
aiPerform.scheduleWithFixedDelay(this::aiPerform, 0, AI_DELAY_SECONDS, TimeUnit.SECONDS);
phases = ComponentRetriever.singleton(game, PhaseController.class);
}
private void zoneChange(ZoneChangeEvent event) {
Entity cardEntity = event.getCard();
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(new ZoneChangeMessage(event.getCard().getId(), event.getSource().getZoneId(), event.getDestination().getZoneId()));
if (event.getDestination().isKnownTo(player) && !event.getSource().isKnownTo(player)) {
io.sendToClient(new CardInfoMessage(event.getDestination().getZoneId(), cardEntity.getId(), Resources.map(cardEntity)));
}
}
}
private void remove(EntityRemoveEvent event) {
this.send(new EntityRemoveMessage(event.getEntity().getId()));
}
private void broadcast(ResourceValueChange event) {
if (getState() == GameState.NOT_STARTED) {
// let the most information be sent when actually starting the game
return;
}
Entity entity = event.getEntity();
UpdateMessage updateEvent = new UpdateMessage(entity.getId(), event.getResource().toString(), event.getNewValue());
if (card.has(entity)) {
CardComponent cardData = card.get(entity);
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
if (cardData.getCurrentZone().isKnownTo(player)) {
io.sendToClient(updateEvent);
}
}
}
else {
// Player, Zone, or Game
this.send(updateEvent);
}
}
public void informAboutTargets(RequestTargetsMessage message, ClientIO client) {
ECSAction action = findAction(message.getId(), message.getAction());
TargetSet targetAction = action.getTargetSets().get(0);
List<Entity> targets = targetAction.findPossibleTargets();
int[] targetIds = targets.stream().mapToInt(e -> e.getId()).toArray();
client.sendToClient(new AvailableTargetsMessage(message.getId(), message.getAction(), targetIds, targetAction.getMin(), targetAction.getMax()));
}
public Entity findTargetable(int entityId) {
Optional<Entity> entity = game.findEntities(e -> e.getId() == entityId).stream().findFirst();
return entity.orElse(null);
}
public ECSAction findAction(int entityId, String actionId) {
Optional<Entity> entity = game.findEntities(e -> e.getId() == entityId).stream().findFirst();
if (!entity.isPresent()) {
throw new IllegalArgumentException("No such entity found");
}
Entity e = entity.get();
if (e.hasComponent(ActionComponent.class)) {
ActionComponent comp = e.getComponent(ActionComponent.class);
if (comp.getActions().contains(actionId)) {
return comp.getAction(actionId);
}
throw new IllegalArgumentException("No such action was found.");
}
throw new IllegalArgumentException(e + " does not have an action component");
}
public void handleMove(UseAbilityMessage message, ClientIO client) {
if (!this.getPlayers().contains(client)) {
throw new IllegalArgumentException("Client is not in this game: " + client);
}
if (phases.getCurrentEntity() != playerFor(client)) {
throw new IllegalArgumentException("It's not that players turn: " + client);
}
ECSAction action = findAction(message.getId(), message.getAction());
if (!action.getTargetSets().isEmpty()) {
TargetSet targetAction = action.getTargetSets().get(0);
targetAction.clearTargets();
targetAction.addTarget(findTargetable(message.getTarget()));
}
action.perform(playerFor(client));
// TODO: Add listener to game for ZoneMoves, inform players about card movements, and send CardInfoMessage when a card becomes known
sendAvailableActions();
}
private void aiPerform() {
if (this.getState() != GameState.RUNNING) {
return;
}
for (ClientIO io : this.getPlayers()) {
if (io instanceof FakeAIClientTCG) {
Entity player = playerFor(io);
if (phases.getCurrentEntity() != player) {
continue;
}
ECSAction action = ai.getAction(player);
if (action != null) {
logger.info("AI Performs action: " + action);
action.perform(player);
sendAvailableActions();
return;
}
}
}
}
@Override
protected boolean makeMove(Command command, int player) {
throw new UnsupportedOperationException();
}
@Override
protected void updateStatus() {
}
public Entity playerFor(ClientIO io) {
int index = this.getPlayers().indexOf(io);
if (index < 0) {
throw new IllegalArgumentException(io + " is not a valid player in this game");
}
return getPlayer(index);
}
private Entity getPlayer(int index) {
return game.findEntities(entity -> entity.hasComponent(PlayerComponent.class) && entity.getComponent(PlayerComponent.class).getIndex() == index).get(0);
}
@Override
protected void onStart() {
// this.setupAIPlayers();
game.startGame();
this.getPlayers().stream().forEach(pl -> {
Entity playerEntity = playerFor(pl);
PlayerComponent plData = playerEntity.get(playerData);
this.send(new PlayerMessage(playerEntity.getId(), plData.getIndex(), plData.getName(), Resources.map(playerEntity)));
});
this.game.findEntities(e -> true).stream().flatMap(e -> e.getSuperComponents(ZoneComponent.class).stream()).forEach(this::sendZone);
this.sendAvailableActions();
}
// private void setupAIPlayers() {
// for (ClientIO io : this.getPlayers()) {
// if (io instanceof FakeAIClientTCG) {
// Entity player = playerFor(io);
// player.addComponent(new AIComponent(new CompleteIdiot()));
// logger.info("AI is configured for " + player);
// }
// }
// }
private void sendAvailableActions() {
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(new ResetAvailableActionsMessage());
if (phases.getCurrentEntity() == player) {
getAllActions(game).filter(action -> action.isAllowed(player))
.forEach(action -> io.sendToClient(new UseableActionMessage(action.getOwner().getId(), action.getName(), !action.getTargetSets().isEmpty())));
}
}
}
private static Stream<ECSAction> getAllActions(ECSGame game) {
return game.getEntitiesWithComponent(ActionComponent.class)
.stream()
.flatMap(entity -> entity.getComponent(ActionComponent.class)
.getECSActions().stream());
}
private void sendZone(ZoneComponent zone) {
for (ClientIO io : this.getPlayers()) {
Entity player = playerFor(io);
io.sendToClient(constructZoneMessage(zone, player));
if (zone.isKnownTo(player)) {
zone.forEach(card -> this.sendCard(io, card));
}
}
}
private ZoneMessage constructZoneMessage(ZoneComponent zone, Entity player) {
return new ZoneMessage(zone.getZoneId(), zone.getName(),
zone.getOwner().getId(), zone.size(), zone.isKnownTo(player), zone.stream().mapToInt(e -> e.getId()).toArray());
}
private void sendCard(ClientIO io, Entity card) {
CardComponent cardData = card.getComponent(CardComponent.class);
io.sendToClient(new CardInfoMessage(cardData.getCurrentZone().getZoneId(), card.getId(), Resources.map(card)));
}
}
|
Server now uses AISystem to perform the AI in a separate thread
|
cardshifter-server/src/main/java/com/cardshifter/server/model/TCGGame.java
|
Server now uses AISystem to perform the AI in a separate thread
|
|
Java
|
apache-2.0
|
7b668b4506ac4f8ac7421afbb2f673cb4e40db9e
| 0
|
GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.common.Booleans;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static org.elasticsearch.upgrades.IndexingIT.assertCount;
public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase {
public void testDataStreams() throws IOException {
assumeTrue("no data streams in versions before " + Version.V_7_9_0, UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_9_0));
if (CLUSTER_TYPE == ClusterType.OLD) {
String requestBody = "{\n" +
" \"index_patterns\":[\"logs-*\"],\n" +
" \"template\": {\n" +
" \"mappings\": {\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }\n" +
" }\n" +
" }\n" +
" },\n" +
" \"data_stream\":{\n" +
" }\n" +
" }";
Request request = new Request("PUT", "/_index_template/1");
request.setJsonEntity(requestBody);
useIgnoreMultipleMatchingTemplatesWarningsHandler(request);
client().performRequest(request);
StringBuilder b = new StringBuilder();
for (int i = 0; i < 1000; i++) {
b.append("{\"create\":{\"_index\":\"").append("logs-foobar").append("\"}}\n");
b.append("{\"@timestamp\":\"2020-12-12\",\"test\":\"value").append(i).append("\"}\n");
}
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
bulk.addParameter("filter_path", "errors");
bulk.setJsonEntity(b.toString());
Response response = client().performRequest(bulk);
assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
} else if (CLUSTER_TYPE == ClusterType.MIXED) {
long nowMillis = System.currentTimeMillis();
Request rolloverRequest = new Request("POST", "/logs-foobar/_rollover");
client().performRequest(rolloverRequest);
Request index = new Request("POST", "/logs-foobar/_doc");
index.addParameter("refresh", "true");
index.addParameter("filter_path", "_index");
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStream.getLegacyDefaultBackingIndexName("logs-foobar", 2) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1000\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
} else {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStream.getLegacyDefaultBackingIndexName("logs-foobar", 3) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1001\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
}
}
final int expectedCount;
if (CLUSTER_TYPE.equals(ClusterType.OLD)) {
expectedCount = 1000;
} else if (CLUSTER_TYPE.equals(ClusterType.MIXED)) {
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
expectedCount = 1001;
} else {
expectedCount = 1002;
}
} else if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) {
expectedCount = 1002;
} else {
throw new AssertionError("unexpected cluster type");
}
assertCount("logs-foobar", expectedCount);
}
public void testDataStreamValidationDoesNotBreakUpgrade() throws Exception {
assumeTrue("Bug started to occur from version: " + Version.V_7_10_2, UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_10_2));
if (CLUSTER_TYPE == ClusterType.OLD) {
String requestBody = "{\n" +
" \"index_patterns\":[\"logs-*\"],\n" +
" \"template\": {\n" +
" \"mappings\": {\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }\n" +
" }\n" +
" }\n" +
" },\n" +
" \"data_stream\":{\n" +
" }\n" +
" }";
Request request = new Request("PUT", "/_index_template/1");
request.setJsonEntity(requestBody);
useIgnoreMultipleMatchingTemplatesWarningsHandler(request);
client().performRequest(request);
StringBuilder b = new StringBuilder();
b.append("{\"create\":{\"_index\":\"").append("logs-barbaz").append("\"}}\n");
b.append("{\"@timestamp\":\"2020-12-12\",\"test\":\"value").append(0).append("\"}\n");
b.append("{\"create\":{\"_index\":\"").append("logs-barbaz-2021.01.13").append("\"}}\n");
b.append("{\"@timestamp\":\"2020-12-12\",\"test\":\"value").append(0).append("\"}\n");
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
bulk.addParameter("filter_path", "errors");
bulk.setJsonEntity(b.toString());
Response response = client().performRequest(bulk);
assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
Request rolloverRequest = new Request("POST", "/logs-barbaz-2021.01.13/_rollover");
client().performRequest(rolloverRequest);
} else {
assertCount("logs-barbaz", 1);
assertCount("logs-barbaz-2021.01.13", 1);
}
}
}
|
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.common.Booleans;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static org.elasticsearch.upgrades.IndexingIT.assertCount;
public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase {
public void testDataStreams() throws IOException {
assumeTrue("no data streams in versions before " + Version.V_7_9_0, UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_9_0));
if (CLUSTER_TYPE == ClusterType.OLD) {
String requestBody = "{\n" +
" \"index_patterns\":[\"logs-*\"],\n" +
" \"template\": {\n" +
" \"mappings\": {\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }\n" +
" }\n" +
" }\n" +
" },\n" +
" \"data_stream\":{\n" +
" }\n" +
" }";
Request request = new Request("PUT", "/_index_template/1");
request.setJsonEntity(requestBody);
useIgnoreMultipleMatchingTemplatesWarningsHandler(request);
client().performRequest(request);
StringBuilder b = new StringBuilder();
for (int i = 0; i < 1000; i++) {
b.append("{\"create\":{\"_index\":\"").append("logs-foobar").append("\"}}\n");
b.append("{\"@timestamp\":\"2020-12-12\",\"test\":\"value").append(i).append("\"}\n");
}
b.append("{\"create\":{\"_index\":\"").append("logs-foobar-2021.01.13").append("\"}}\n");
b.append("{\"@timestamp\":\"2020-12-12\",\"test\":\"value").append(0).append("\"}\n");
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
bulk.addParameter("filter_path", "errors");
bulk.setJsonEntity(b.toString());
Response response = client().performRequest(bulk);
assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
Request rolloverRequest = new Request("POST", "/logs-foobar-2021.01.13/_rollover");
client().performRequest(rolloverRequest);
} else if (CLUSTER_TYPE == ClusterType.MIXED) {
long nowMillis = System.currentTimeMillis();
Request rolloverRequest = new Request("POST", "/logs-foobar/_rollover");
client().performRequest(rolloverRequest);
Request index = new Request("POST", "/logs-foobar/_doc");
index.addParameter("refresh", "true");
index.addParameter("filter_path", "_index");
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStream.getLegacyDefaultBackingIndexName("logs-foobar", 2) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 2, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1000\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
} else {
// include legacy name and date-named indices with today +/-1 in case of clock skew
var expectedIndices = List.of(
"{\"_index\":\"" + DataStream.getLegacyDefaultBackingIndexName("logs-foobar", 3) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis + 86400000) + "\"}",
"{\"_index\":\"" + DataStream.getDefaultBackingIndexName("logs-foobar", 3, nowMillis - 86400000) + "\"}"
);
index.setJsonEntity("{\"@timestamp\":\"2020-12-12\",\"test\":\"value1001\"}");
Response response = client().performRequest(index);
assertThat(expectedIndices, Matchers.hasItem(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)));
}
}
final int expectedCount;
if (CLUSTER_TYPE.equals(ClusterType.OLD)) {
expectedCount = 1000;
} else if (CLUSTER_TYPE.equals(ClusterType.MIXED)) {
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
expectedCount = 1001;
} else {
expectedCount = 1002;
}
} else if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) {
expectedCount = 1002;
} else {
throw new AssertionError("unexpected cluster type");
}
assertCount("logs-foobar", expectedCount);
}
}
|
Move test for upgrade bug (#69625) to a separate test. (#69791)
|
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java
|
Move test for upgrade bug (#69625) to a separate test. (#69791)
|
|
Java
|
apache-2.0
|
bea4b1acab87caffa35ff91cf3c12fe9eb82b406
| 0
|
google/error-prone,cushon/error-prone,cushon/error-prone,cushon/error-prone,cushon/error-prone,google/error-prone
|
/*
* Copyright 2013 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.getLast;
import static com.google.errorprone.BugPattern.Category.JUNIT;
import static com.google.errorprone.BugPattern.SeverityLevel.ERROR;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.JAVA_LANG_ERROR;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.JAVA_LANG_THROWABLE;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.SOME_ASSERTION_FAILURE;
import static com.google.errorprone.bugpatterns.TryFailThrowable.MatchResult.doesNotMatch;
import static com.google.errorprone.bugpatterns.TryFailThrowable.MatchResult.matches;
import static com.google.errorprone.fixes.SuggestedFix.replace;
import static com.google.errorprone.matchers.Description.NO_MATCH;
import static com.google.errorprone.matchers.Matchers.anyOf;
import static com.google.errorprone.matchers.Matchers.isSameType;
import static com.google.errorprone.util.ASTHelpers.getSymbol;
import static com.sun.source.tree.Tree.Kind.BLOCK;
import static com.sun.source.tree.Tree.Kind.EMPTY_STATEMENT;
import static com.sun.source.tree.Tree.Kind.METHOD;
import static com.sun.source.tree.Tree.Kind.METHOD_INVOCATION;
import static java.lang.String.format;
import com.google.errorprone.BugPattern;
import com.google.errorprone.BugPattern.ProvidesFix;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.TryTreeMatcher;
import com.google.errorprone.fixes.Fix;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.matchers.Matchers;
import com.sun.source.tree.BlockTree;
import com.sun.source.tree.CatchTree;
import com.sun.source.tree.ExpressionStatementTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.StatementTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TryTree;
import com.sun.source.tree.VariableTree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.MethodSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Types;
import java.util.List;
/**
* A bug checker for the following code pattern:
*
* <pre>
* try {
* // do something
* Assert.fail(); // or any Assert.assert*
* // maybe do something more
* } catch (Throwable t) {
* // empty or only comments
* }
* </pre>
*
* * Matches all static methods named "fail" and starting with "assert" from the following classes:
*
* <ul>
* <li>{@code org.junit.Assert},
* <li>{@code junit.framework.Assert},
* <li>{@code junit.framework.TestCase} - which overrides the methods from Assert in order to
* deprecate them and
* <li>every class whose name ends with "MoreAsserts".
* </ul>
*
* Possible improvements/generalizations of this matcher:
*
* <ul>
* <li>support multiple catch() blocks
* </ul>
*
* @author adamwos@google.com (Adam Wos)
*/
@BugPattern(
name = "TryFailThrowable",
summary = "Catching Throwable/Error masks failures from fail() or assert*() in the try block",
category = JUNIT,
severity = ERROR,
providesFix = ProvidesFix.REQUIRES_HUMAN_ATTENTION)
public class TryFailThrowable extends BugChecker implements TryTreeMatcher {
private static final Matcher<VariableTree> javaLangThrowable = isSameType("java.lang.Throwable");
private static final Matcher<VariableTree> javaLangError = isSameType("java.lang.Error");
private static final Matcher<VariableTree> someAssertionFailure =
anyOf(
isSameType("java.lang.AssertionError"),
isSameType("junit.framework.AssertionFailedError"));
private static final Matcher<ExpressionTree> failOrAssert =
new Matcher<ExpressionTree>() {
@Override
public boolean matches(ExpressionTree item, VisitorState state) {
if (item.getKind() != METHOD_INVOCATION) {
return false;
}
Symbol sym = getSymbol(item);
if (!(sym instanceof MethodSymbol)) {
throw new IllegalArgumentException("not a method call");
}
if (!sym.isStatic()) {
return false;
}
String methodName = sym.getQualifiedName().toString();
String className = sym.owner.getQualifiedName().toString();
// TODO(cpovirk): Look for literal "throw new AssertionError()," etc.
return (methodName.startsWith("assert") || methodName.startsWith("fail"))
&& (className.equals("org.junit.Assert")
|| className.equals("junit.framework.Assert")
|| className.equals("junit.framework.TestCase")
|| className.endsWith("MoreAsserts"));
}
};
@Override
public Description matchTry(TryTree tree, VisitorState state) {
MatchResult matchResult = tryTreeMatches(tree, state);
if (!matchResult.matched()) {
return NO_MATCH;
}
Description.Builder builder = buildDescription(tree.getCatches().get(0).getParameter());
if (matchResult.caughtType == JAVA_LANG_THROWABLE) {
builder.addFix(fixByCatchingException(tree));
}
if (matchResult.caughtType == SOME_ASSERTION_FAILURE) {
builder.addFix(fixByThrowingJavaLangError(matchResult.failStatement, state));
}
builder.addFix(fixWithReturnOrBoolean(tree, matchResult.failStatement, state));
return builder.build();
}
private static Fix fixByCatchingException(TryTree tryTree) {
VariableTree catchParameter = getOnlyCatch(tryTree).getParameter();
return replace(catchParameter, "Exception " + catchParameter.getName());
}
private static Fix fixByThrowingJavaLangError(StatementTree failStatement, VisitorState state) {
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.FALSE);
return replace(failStatement, format("throw new Error(%s);", messageSnippet));
}
private static Fix fixWithReturnOrBoolean(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
Tree parent = state.getPath().getParentPath().getLeaf();
Tree grandparent = state.getPath().getParentPath().getParentPath().getLeaf();
if (parent.getKind() == BLOCK
&& grandparent.getKind() == METHOD
&& tryTree == getLastStatement((BlockTree) parent)) {
return fixWithReturn(tryTree, failStatement, state);
} else {
return fixWithBoolean(tryTree, failStatement, state);
}
}
private static Fix fixWithReturn(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
SuggestedFix.Builder builder = SuggestedFix.builder();
builder.delete(failStatement);
builder.replace(getOnlyCatch(tryTree).getBlock(), "{ return; }");
// TODO(cpovirk): Use the file's preferred assertion API.
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.FALSE);
builder.postfixWith(tryTree, format("fail(%s);", messageSnippet));
return builder.build();
}
private static Fix fixWithBoolean(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
SuggestedFix.Builder builder = SuggestedFix.builder();
builder.delete(failStatement);
builder.prefixWith(tryTree, "boolean threw = false;");
builder.replace(getOnlyCatch(tryTree).getBlock(), "{ threw = true; }");
// TODO(cpovirk): Use the file's preferred assertion API.
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.TRUE);
builder.postfixWith(tryTree, format("assertTrue(%sthrew);", messageSnippet));
return builder.build();
}
private static String getMessageSnippet(
StatementTree failStatement, VisitorState state, HasOtherParameters hasOtherParameters) {
ExpressionTree expression = ((ExpressionStatementTree) failStatement).getExpression();
MethodSymbol sym = (MethodSymbol) getSymbol(expression);
String tail = hasOtherParameters == HasOtherParameters.TRUE ? ", " : "";
// The above casts were checked earlier by failOrAssert.
return hasInitialStringParameter(sym, state)
? state.getSourceForNode(((MethodInvocationTree) expression).getArguments().get(0)) + tail
: "";
}
/**
* Whether the assertion method we're inserting a call to has extra parameters besides its message
* (like {@code assertTrue}) or not (like {@code fail}).
*/
enum HasOtherParameters {
TRUE,
FALSE;
}
private static boolean hasInitialStringParameter(MethodSymbol sym, VisitorState state) {
Types types = state.getTypes();
List<VarSymbol> parameters = sym.getParameters();
return !parameters.isEmpty()
&& types.isSameType(parameters.get(0).type, state.getSymtab().stringType);
}
private static MatchResult tryTreeMatches(TryTree tryTree, VisitorState state) {
BlockTree tryBlock = tryTree.getBlock();
List<? extends StatementTree> statements = tryBlock.getStatements();
if (statements.isEmpty()) {
return doesNotMatch();
}
// Check if any of the statements is a fail or assert* method (i.e. any
// method that can throw an AssertionFailedError)
StatementTree failStatement = null;
for (StatementTree statement : statements) {
if (!(statement instanceof ExpressionStatementTree)) {
continue;
}
if (failOrAssert.matches(((ExpressionStatementTree) statement).getExpression(), state)) {
failStatement = statement;
break;
}
}
if (failStatement == null) {
return doesNotMatch();
}
// Verify that the only catch clause catches Throwable
List<? extends CatchTree> catches = tryTree.getCatches();
if (catches.size() != 1) {
// TODO(adamwos): this could be supported - only the last catch would need
// to be checked - it would either be Throwable or Error.
return doesNotMatch();
}
CatchTree catchTree = catches.get(0);
VariableTree catchType = catchTree.getParameter();
boolean catchesThrowable = javaLangThrowable.matches(catchType, state);
boolean catchesError = javaLangError.matches(catchType, state);
boolean catchesOtherError = someAssertionFailure.matches(catchType, state);
if (!catchesThrowable && !catchesError && !catchesOtherError) {
return doesNotMatch();
}
// Verify that the catch block is empty or contains only comments.
List<? extends StatementTree> catchStatements = catchTree.getBlock().getStatements();
for (StatementTree catchStatement : catchStatements) {
// Comments are not a part of the AST. Therefore, we should either get
// an empty list of statements (regardless of the number of comments),
// or a list of empty statements.
if (!Matchers.<Tree>kindIs(EMPTY_STATEMENT).matches(catchStatement, state)) {
return doesNotMatch();
}
}
return matches(
failStatement,
catchesThrowable
? JAVA_LANG_THROWABLE
: catchesError ? JAVA_LANG_ERROR : SOME_ASSERTION_FAILURE);
}
static final class MatchResult {
static final MatchResult DOES_NOT_MATCH = new MatchResult(null, null);
static MatchResult matches(StatementTree failStatement, CaughtType caughtType) {
return new MatchResult(checkNotNull(failStatement), checkNotNull(caughtType));
}
static MatchResult doesNotMatch() {
return DOES_NOT_MATCH;
}
final StatementTree failStatement;
final CaughtType caughtType;
MatchResult(StatementTree failStatement, CaughtType caughtType) {
this.failStatement = failStatement;
this.caughtType = caughtType;
}
boolean matched() {
return caughtType != null;
}
}
enum CaughtType {
JAVA_LANG_ERROR,
JAVA_LANG_THROWABLE,
SOME_ASSERTION_FAILURE,
;
}
private static StatementTree getLastStatement(BlockTree blockTree) {
return getLast(blockTree.getStatements());
}
private static CatchTree getOnlyCatch(TryTree tryTree) {
return tryTree.getCatches().get(0);
}
}
|
core/src/main/java/com/google/errorprone/bugpatterns/TryFailThrowable.java
|
/*
* Copyright 2013 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.getLast;
import static com.google.errorprone.BugPattern.Category.JUNIT;
import static com.google.errorprone.BugPattern.SeverityLevel.ERROR;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.JAVA_LANG_ERROR;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.JAVA_LANG_THROWABLE;
import static com.google.errorprone.bugpatterns.TryFailThrowable.CaughtType.SOME_ASSERTION_FAILURE;
import static com.google.errorprone.bugpatterns.TryFailThrowable.MatchResult.doesNotMatch;
import static com.google.errorprone.bugpatterns.TryFailThrowable.MatchResult.matches;
import static com.google.errorprone.fixes.SuggestedFix.replace;
import static com.google.errorprone.matchers.Description.NO_MATCH;
import static com.google.errorprone.matchers.Matchers.anyOf;
import static com.google.errorprone.matchers.Matchers.isSameType;
import static com.google.errorprone.util.ASTHelpers.getSymbol;
import static com.sun.source.tree.Tree.Kind.BLOCK;
import static com.sun.source.tree.Tree.Kind.EMPTY_STATEMENT;
import static com.sun.source.tree.Tree.Kind.METHOD;
import static com.sun.source.tree.Tree.Kind.METHOD_INVOCATION;
import static java.lang.String.format;
import com.google.errorprone.BugPattern;
import com.google.errorprone.BugPattern.ProvidesFix;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.TryTreeMatcher;
import com.google.errorprone.fixes.Fix;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.matchers.Matchers;
import com.sun.source.tree.BlockTree;
import com.sun.source.tree.CatchTree;
import com.sun.source.tree.ExpressionStatementTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.StatementTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TryTree;
import com.sun.source.tree.VariableTree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.MethodSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Types;
import java.util.List;
/**
* A bug checker for the following code pattern:
*
* <pre>
* try {
* // do something
* Assert.fail(); // or any Assert.assert*
* // maybe do something more
* } catch (Throwable t) {
* // empty or only comments
* }
* </pre>
*
* * Matches all static methods named "fail" and starting with "assert" from the following classes:
*
* <ul>
* <li>{@code org.junit.Assert},
* <li>{@code junit.framework.Assert},
* <li>{@code junit.framework.TestCase} - which overrides the methods from Assert in order to
* deprecate them,
* <li>{@code com.google.testing.util.MoreAsserts} and
* <li>every class whose name ends with "MoreAsserts".
* </ul>
*
* Possible improvements/generalizations of this matcher:
*
* <ul>
* <li>support multiple catch() blocks
* <li>support MoreAsserts
* </ul>
*
* @author adamwos@google.com (Adam Wos)
*/
@BugPattern(
name = "TryFailThrowable",
summary = "Catching Throwable/Error masks failures from fail() or assert*() in the try block",
category = JUNIT,
severity = ERROR,
providesFix = ProvidesFix.REQUIRES_HUMAN_ATTENTION)
public class TryFailThrowable extends BugChecker implements TryTreeMatcher {
private static final Matcher<VariableTree> javaLangThrowable = isSameType("java.lang.Throwable");
private static final Matcher<VariableTree> javaLangError = isSameType("java.lang.Error");
private static final Matcher<VariableTree> someAssertionFailure =
anyOf(
isSameType("java.lang.AssertionError"),
isSameType("junit.framework.AssertionFailedError"));
private static final Matcher<ExpressionTree> failOrAssert =
new Matcher<ExpressionTree>() {
@Override
public boolean matches(ExpressionTree item, VisitorState state) {
if (item.getKind() != METHOD_INVOCATION) {
return false;
}
Symbol sym = getSymbol(item);
if (!(sym instanceof MethodSymbol)) {
throw new IllegalArgumentException("not a method call");
}
if (!sym.isStatic()) {
return false;
}
String methodName = sym.getQualifiedName().toString();
String className = sym.owner.getQualifiedName().toString();
// TODO(cpovirk): Look for literal "throw new AssertionError()," etc.
return (methodName.startsWith("assert") || methodName.startsWith("fail"))
&& (className.equals("org.junit.Assert")
|| className.equals("junit.framework.Assert")
|| className.equals("junit.framework.TestCase")
|| className.endsWith("MoreAsserts"));
}
};
@Override
public Description matchTry(TryTree tree, VisitorState state) {
MatchResult matchResult = tryTreeMatches(tree, state);
if (!matchResult.matched()) {
return NO_MATCH;
}
Description.Builder builder = buildDescription(tree.getCatches().get(0).getParameter());
if (matchResult.caughtType == JAVA_LANG_THROWABLE) {
builder.addFix(fixByCatchingException(tree));
}
if (matchResult.caughtType == SOME_ASSERTION_FAILURE) {
builder.addFix(fixByThrowingJavaLangError(matchResult.failStatement, state));
}
builder.addFix(fixWithReturnOrBoolean(tree, matchResult.failStatement, state));
return builder.build();
}
private static Fix fixByCatchingException(TryTree tryTree) {
VariableTree catchParameter = getOnlyCatch(tryTree).getParameter();
return replace(catchParameter, "Exception " + catchParameter.getName());
}
private static Fix fixByThrowingJavaLangError(StatementTree failStatement, VisitorState state) {
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.FALSE);
return replace(failStatement, format("throw new Error(%s);", messageSnippet));
}
private static Fix fixWithReturnOrBoolean(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
Tree parent = state.getPath().getParentPath().getLeaf();
Tree grandparent = state.getPath().getParentPath().getParentPath().getLeaf();
if (parent.getKind() == BLOCK
&& grandparent.getKind() == METHOD
&& tryTree == getLastStatement((BlockTree) parent)) {
return fixWithReturn(tryTree, failStatement, state);
} else {
return fixWithBoolean(tryTree, failStatement, state);
}
}
private static Fix fixWithReturn(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
SuggestedFix.Builder builder = SuggestedFix.builder();
builder.delete(failStatement);
builder.replace(getOnlyCatch(tryTree).getBlock(), "{ return; }");
// TODO(cpovirk): Use the file's preferred assertion API.
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.FALSE);
builder.postfixWith(tryTree, format("fail(%s);", messageSnippet));
return builder.build();
}
private static Fix fixWithBoolean(
TryTree tryTree, StatementTree failStatement, VisitorState state) {
SuggestedFix.Builder builder = SuggestedFix.builder();
builder.delete(failStatement);
builder.prefixWith(tryTree, "boolean threw = false;");
builder.replace(getOnlyCatch(tryTree).getBlock(), "{ threw = true; }");
// TODO(cpovirk): Use the file's preferred assertion API.
String messageSnippet = getMessageSnippet(failStatement, state, HasOtherParameters.TRUE);
builder.postfixWith(tryTree, format("assertTrue(%sthrew);", messageSnippet));
return builder.build();
}
private static String getMessageSnippet(
StatementTree failStatement, VisitorState state, HasOtherParameters hasOtherParameters) {
ExpressionTree expression = ((ExpressionStatementTree) failStatement).getExpression();
MethodSymbol sym = (MethodSymbol) getSymbol(expression);
String tail = hasOtherParameters == HasOtherParameters.TRUE ? ", " : "";
// The above casts were checked earlier by failOrAssert.
return hasInitialStringParameter(sym, state)
? state.getSourceForNode(((MethodInvocationTree) expression).getArguments().get(0)) + tail
: "";
}
/**
* Whether the assertion method we're inserting a call to has extra parameters besides its message
* (like {@code assertTrue}) or not (like {@code fail}).
*/
enum HasOtherParameters {
TRUE,
FALSE;
}
private static boolean hasInitialStringParameter(MethodSymbol sym, VisitorState state) {
Types types = state.getTypes();
List<VarSymbol> parameters = sym.getParameters();
return !parameters.isEmpty()
&& types.isSameType(parameters.get(0).type, state.getSymtab().stringType);
}
private static MatchResult tryTreeMatches(TryTree tryTree, VisitorState state) {
BlockTree tryBlock = tryTree.getBlock();
List<? extends StatementTree> statements = tryBlock.getStatements();
if (statements.isEmpty()) {
return doesNotMatch();
}
// Check if any of the statements is a fail or assert* method (i.e. any
// method that can throw an AssertionFailedError)
StatementTree failStatement = null;
for (StatementTree statement : statements) {
if (!(statement instanceof ExpressionStatementTree)) {
continue;
}
if (failOrAssert.matches(((ExpressionStatementTree) statement).getExpression(), state)) {
failStatement = statement;
break;
}
}
if (failStatement == null) {
return doesNotMatch();
}
// Verify that the only catch clause catches Throwable
List<? extends CatchTree> catches = tryTree.getCatches();
if (catches.size() != 1) {
// TODO(adamwos): this could be supported - only the last catch would need
// to be checked - it would either be Throwable or Error.
return doesNotMatch();
}
CatchTree catchTree = catches.get(0);
VariableTree catchType = catchTree.getParameter();
boolean catchesThrowable = javaLangThrowable.matches(catchType, state);
boolean catchesError = javaLangError.matches(catchType, state);
boolean catchesOtherError = someAssertionFailure.matches(catchType, state);
if (!catchesThrowable && !catchesError && !catchesOtherError) {
return doesNotMatch();
}
// Verify that the catch block is empty or contains only comments.
List<? extends StatementTree> catchStatements = catchTree.getBlock().getStatements();
for (StatementTree catchStatement : catchStatements) {
// Comments are not a part of the AST. Therefore, we should either get
// an empty list of statements (regardless of the number of comments),
// or a list of empty statements.
if (!Matchers.<Tree>kindIs(EMPTY_STATEMENT).matches(catchStatement, state)) {
return doesNotMatch();
}
}
return matches(
failStatement,
catchesThrowable
? JAVA_LANG_THROWABLE
: catchesError ? JAVA_LANG_ERROR : SOME_ASSERTION_FAILURE);
}
static final class MatchResult {
static final MatchResult DOES_NOT_MATCH = new MatchResult(null, null);
static MatchResult matches(StatementTree failStatement, CaughtType caughtType) {
return new MatchResult(checkNotNull(failStatement), checkNotNull(caughtType));
}
static MatchResult doesNotMatch() {
return DOES_NOT_MATCH;
}
final StatementTree failStatement;
final CaughtType caughtType;
MatchResult(StatementTree failStatement, CaughtType caughtType) {
this.failStatement = failStatement;
this.caughtType = caughtType;
}
boolean matched() {
return caughtType != null;
}
}
enum CaughtType {
JAVA_LANG_ERROR,
JAVA_LANG_THROWABLE,
SOME_ASSERTION_FAILURE,
;
}
private static StatementTree getLastStatement(BlockTree blockTree) {
return getLast(blockTree.getStatements());
}
private static CatchTree getOnlyCatch(TryTree tryTree) {
return tryTree.getCatches().get(0);
}
}
|
MoreAsserts is dead!
RELNOTES: remove MoreAsserts from TryFailThrowable
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=192503239
|
core/src/main/java/com/google/errorprone/bugpatterns/TryFailThrowable.java
|
MoreAsserts is dead!
|
|
Java
|
apache-2.0
|
136887bc1d95b9255f9155397698ea15ac6db8b2
| 0
|
OpenConext/OpenConext-dashboard,cybera/OpenConext-dashboard,cybera/OpenConext-dashboard,cybera/OpenConext-dashboard,OpenConext/OpenConext-dashboard,cybera/OpenConext-dashboard,OpenConext/OpenConext-dashboard,OpenConext/OpenConext-dashboard
|
/*
* Copyright 2012 SURFnet bv, The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package selfservice.service.impl;
import static java.util.stream.Collectors.toList;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import selfservice.domain.Action;
import selfservice.domain.Change;
import selfservice.domain.IdentityProvider;
import selfservice.domain.ServiceProvider;
import selfservice.manage.EntityType;
import selfservice.service.ActionsService;
import selfservice.service.EmailService;
import selfservice.manage.Manage;
@Service
public class ActionsServiceImpl implements ActionsService {
private static final Pattern namePattern = Pattern.compile("^Applicant name: (.*)$", Pattern.MULTILINE);
private static final Pattern emailPattern = Pattern.compile("^Applicant email: (.*)$", Pattern.MULTILINE);
@Autowired
private JiraClient jiraClient;
@Autowired
private EmailService emailService;
@Autowired
private Manage manage;
@Value("${administration.email.enabled}")
private boolean sendAdministrationEmail;
@Override
public Map<String, Object> getActions(String identityProvider, int startAt, int maxResults) {
Map<String, Object> result = jiraClient.getTasks(identityProvider, startAt, maxResults);
List<Action> issues = (List<Action>) result.get("issues");
List<Action> enrichedActions = issues.stream().map(this::addNames).map(this::addUser).collect(toList());
Map<String, Object> copyResult = new HashMap<>(result);
copyResult.put("issues", enrichedActions);
return copyResult;
}
private Action addUser(Action action) {
String body = action.getBody();
Optional<String> userEmail = findUserEmail(body);
Optional<String> userName = findUserName(body);
return action.unbuild()
.userEmail(userEmail.orElse("unknown"))
.userName(userName.orElse("unknown")).build();
}
private Optional<String> findUserEmail(String body) {
return matchingGroup(emailPattern, body);
}
private Optional<String> findUserName(String body) {
return matchingGroup(namePattern, body);
}
private Optional<String> matchingGroup(Pattern pattern, String input) {
Matcher matcher = pattern.matcher(input);
if (matcher.find()) {
return Optional.ofNullable(matcher.group(1));
}
return Optional.empty();
}
@Override
public Action create(Action action, List<Change> changes) {
String jiraKey = jiraClient.create(action, changes);
Action savedAction = addNames(action).unbuild().jiraKey(jiraKey).build();
sendAdministrationEmail(savedAction);
return savedAction;
}
private Action addNames(Action action) {
Optional<ServiceProvider> serviceProvider = manage.getServiceProvider(action.getSpId(), EntityType.saml20_sp, true);
Optional<IdentityProvider> identityProvider = manage.getIdentityProvider(action.getIdpId(), true);
return action.unbuild()
.idpName(identityProvider.map(IdentityProvider::getName).orElse("Information unavailable"))
.spName(serviceProvider.map(ServiceProvider::getName).orElse("Information unavailable")).build();
}
private void sendAdministrationEmail(Action action) {
if (!sendAdministrationEmail) {
return;
}
String subject = String.format(
"[Services (%s) request] %s connection from IdP '%s' to SP '%s' (Issue : %s)",
getHost(), action.getType().name(), action.getIdpId(), action.getSpId(), action.getJiraKey().orElse("???"));
StringBuilder body = new StringBuilder();
body.append("SP EntityID: " + action.getSpId() + "\n");
body.append("SP Name: " + action.getSpName() + "\n");
body.append("IdP EntityID: " + action.getIdpId() + "\n");
body.append("IdP Name: " + action.getIdpName() + "\n");
body.append("Request: " + action.getType().name() + "\n");
body.append("Applicant name: " + action.getUserName() + "\n");
body.append("Applicant email: " + action.getUserEmail() + " \n");
body.append("Mail applicant: mailto:" + action.getUserEmail() + "?CC=surfconext-beheer@surfnet.nl&SUBJECT=[" + action.getJiraKey().orElse("???") + "]%20" + action.getType().name() + "%20to%20" + action.getSpName() + "&BODY=Beste%20" + action.getUserName() + " \n");
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy HH:MM");
body.append("Time: " + sdf.format(new Date()) + "\n");
body.append("Remark from User:\n");
body.append(action.getBody());
emailService.sendMail(action.getUserEmail(), subject.toString(), body.toString());
}
private String getHost() {
try {
return InetAddress.getLocalHost().toString();
} catch (UnknownHostException e) {
return "UNKNOWN";
}
}
}
|
selfservice/src/main/java/selfservice/service/impl/ActionsServiceImpl.java
|
/*
* Copyright 2012 SURFnet bv, The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package selfservice.service.impl;
import static java.util.stream.Collectors.toList;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import selfservice.domain.Action;
import selfservice.domain.Change;
import selfservice.domain.IdentityProvider;
import selfservice.domain.ServiceProvider;
import selfservice.manage.EntityType;
import selfservice.service.ActionsService;
import selfservice.service.EmailService;
import selfservice.manage.Manage;
@Service
public class ActionsServiceImpl implements ActionsService {
private static final Pattern namePattern = Pattern.compile("^Applicant name: (.*)$", Pattern.MULTILINE);
private static final Pattern emailPattern = Pattern.compile("^Applicant email: (.*)$", Pattern.MULTILINE);
@Autowired
private JiraClient jiraClient;
@Autowired
private EmailService emailService;
@Autowired
private Manage manage;
@Value("${administration.email.enabled}")
private boolean sendAdministrationEmail;
@Override
public Map<String, Object> getActions(String identityProvider, int startAt, int maxResults) {
Map<String, Object> result = jiraClient.getTasks(identityProvider, startAt, maxResults);
List<Action> issues = (List<Action>) result.get("issues");
List<Action> enrichedActions = issues.stream().map(this::addNames).map(this::addUser).collect(toList());
Map<String, Object> copyResult = new HashMap<>(result);
copyResult.put("issues", enrichedActions);
return copyResult;
}
private Action addUser(Action action) {
String body = action.getBody();
Optional<String> userEmail = findUserEmail(body);
Optional<String> userName = findUserName(body);
return action.unbuild()
.userEmail(userEmail.orElse("unknown"))
.userName(userName.orElse("unknown")).build();
}
private Optional<String> findUserEmail(String body) {
return matchingGroup(emailPattern, body);
}
private Optional<String> findUserName(String body) {
return matchingGroup(namePattern, body);
}
private Optional<String> matchingGroup(Pattern pattern, String input) {
Matcher matcher = pattern.matcher(input);
if (matcher.find()) {
return Optional.ofNullable(matcher.group(1));
}
return Optional.empty();
}
@Override
public Action create(Action action, List<Change> changes) {
String jiraKey = jiraClient.create(action, changes);
Action savedAction = addNames(action).unbuild().jiraKey(jiraKey).build();
sendAdministrationEmail(savedAction);
return savedAction;
}
private Action addNames(Action action) {
Optional<ServiceProvider> serviceProvider = manage.getServiceProvider(action.getSpId(), EntityType.saml20_sp, true);
Optional<IdentityProvider> identityProvider = manage.getIdentityProvider(action.getIdpId(), true);
return action.unbuild()
.idpName(identityProvider.map(IdentityProvider::getName).orElse("Unknown idp"))
.spName(serviceProvider.map(ServiceProvider::getName).orElse("Unknown sp")).build();
}
private void sendAdministrationEmail(Action action) {
if (!sendAdministrationEmail) {
return;
}
String subject = String.format(
"[Services (%s) request] %s connection from IdP '%s' to SP '%s' (Issue : %s)",
getHost(), action.getType().name(), action.getIdpId(), action.getSpId(), action.getJiraKey().orElse("???"));
StringBuilder body = new StringBuilder();
body.append("SP EntityID: " + action.getSpId() + "\n");
body.append("SP Name: " + action.getSpName() + "\n");
body.append("IdP EntityID: " + action.getIdpId() + "\n");
body.append("IdP Name: " + action.getIdpName() + "\n");
body.append("Request: " + action.getType().name() + "\n");
body.append("Applicant name: " + action.getUserName() + "\n");
body.append("Applicant email: " + action.getUserEmail() + " \n");
body.append("Mail applicant: mailto:" + action.getUserEmail() + "?CC=surfconext-beheer@surfnet.nl&SUBJECT=[" + action.getJiraKey().orElse("???") + "]%20" + action.getType().name() + "%20to%20" + action.getSpName() + "&BODY=Beste%20" + action.getUserName() + " \n");
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy HH:MM");
body.append("Time: " + sdf.format(new Date()) + "\n");
body.append("Remark from User:\n");
body.append(action.getBody());
emailService.sendMail(action.getUserEmail(), subject.toString(), body.toString());
}
private String getHost() {
try {
return InetAddress.getLocalHost().toString();
} catch (UnknownHostException e) {
return "UNKNOWN";
}
}
}
|
Better default message for unknown IDP / SP
|
selfservice/src/main/java/selfservice/service/impl/ActionsServiceImpl.java
|
Better default message for unknown IDP / SP
|
|
Java
|
apache-2.0
|
9015edf518ab8083748c4b7448111de68f2d1f6f
| 0
|
creswick/StreamingQR,creswick/StreamingQR
|
package com.galois.qrstream.qrpipe;
import java.util.BitSet;
/**
* Used to mark the progress of decoding stream of QR codes.
*/
public class DecodeState {
private BitSet data = null;
private int capacity = 0;
/**
* Initialize DecodeState with given capacity. After initialization, requests
* to change capacity are ignored. This allows external applications to query
* status of transmission and QR decoding, in the case that {@code Receive}
* is has not yet received and decoded its first QR code. It needs at least
* one QR code to know how many QR codes it should expect to receive.
*/
public void setInitialCapacity(int capacity) {
if (this.data == null) {
if (capacity < 0) {
throw new IllegalArgumentException("DecodeState must have capacity > 0");
}
this.capacity = capacity;
this.data = new BitSet(capacity);
}
}
/**
* Return the state that a transmission is in. If no QR codes have been
* received and decoded, this returns the {@code Initial} state. When at
* least one QR code has been decoded, return {@code Intermediate} state.
* Finally, when all QR codes have been received and decoded, the state
* transitions to {@code Final}.
*/
public State getState () {
if (data == null || data.isEmpty()) {
return State.Initial;
} else if (data.cardinality() == this.capacity) {
return State.Final;
} else {
return State.Intermediate;
}
}
/**
* Mark progress of data transmission by setting the bit corresponding
* to {@code chunkId} of the underlying bitset to true.
*
* @throw IndexOutOfBoundsException if {@code chunkId} is not within bounds
* of capacity of underlying bitset.
*/
public void markDataReceived (int chunkId) throws IndexOutOfBoundsException {
if (data != null) {
if (chunkId < 1 || chunkId > capacity) {
throw new IndexOutOfBoundsException("Cannot mark bit, chunkId: " + chunkId +
", is out of bounds");
}
data.set(chunkId - 1);
}
}
/**
* Get a deep copy of the underlying bitset.
*
* @return The bitset underlying the decode state.
*/
public BitSet getData() {
return (BitSet) data.clone();
}
}
|
development/qrlib/src/main/java/com/galois/qrstream/qrpipe/DecodeState.java
|
package com.galois.qrstream.qrpipe;
import java.util.BitSet;
/**
* Used to mark the progress of decoding stream of QR codes.
*/
public class DecodeState {
private BitSet data = null;
private int capacity = 0;
/**
* Initialize DecodeState with given capacity. After initialization, requests
* to change capacity are ignored. This allows external applications to query
* status of transmission and QR decoding, in the case that {@code Receive}
* is has not yet received and decoded its first QR code. It needs at least
* one QR code to know how many QR codes it should expect to receive.
*/
public void setInitialCapacity(int capacity) {
if (this.data == null) {
if (capacity < 0) {
throw new IllegalArgumentException("DecodeState must have capacity > 0");
}
this.capacity = capacity;
this.data = new BitSet(capacity);
}
}
/**
* Return the state that a transmission is in. If no QR codes have been
* received and decoded, this returns the {@code Initial} state. When at
* least one QR code has been decoded, return {@code Intermediate} state.
* Finally, when all QR codes have been received and decoded, the state
* transitions to {@code Final}.
*/
public State getState () {
if (data == null || data.isEmpty()) {
return State.Initial;
} else if (data.cardinality() == this.capacity) {
return State.Final;
} else {
return State.Intermediate;
}
}
/**
* Mark progress of data transmission by setting the bit corresponding
* to {@code chunkId} of the underlying bitset to true.
*
* @throw IndexOutOfBoundsException if {@code chunkId} is not within bounds
* of capacity of underlying bitset.
*/
public void markDataReceived (int chunkId) throws IndexOutOfBoundsException {
if (data != null) {
if (chunkId < 1 || chunkId > capacity) {
throw new IndexOutOfBoundsException("Cannot mark bit, chunkId: " + chunkId +
", is out of bounds");
}
data.set(chunkId - 1);
}
}
//TODO: This was in master, but maybe not necessary?
public void set(int position) {
data.set(position);
}
/**
* Get a deep copy of the underlying bitset.
*
* @return The bitset underlying the decode state.
*/
public BitSet getData() {
return (BitSet) data.clone();
}
}
|
qrlib: prefer 'markDataReceived' over 'set' since it performs more error checking
|
development/qrlib/src/main/java/com/galois/qrstream/qrpipe/DecodeState.java
|
qrlib: prefer 'markDataReceived' over 'set' since it performs more error checking
|
|
Java
|
apache-2.0
|
43a6b19590a280ceee8f307e9fa24e92fc1d194a
| 0
|
xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read
|
package org.eclipse.jetty.servlet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import javax.servlet.AsyncContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import junit.framework.Assert;
import org.eclipse.jetty.continuation.Continuation;
import org.eclipse.jetty.continuation.ContinuationSupport;
import org.eclipse.jetty.server.AsyncContinuation;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.LocalConnector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.util.log.Log;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/**
* This tests the correct functioning of the AsyncContext
*
* @author tbecker
*
*/
public class AsyncContextTest
{
private Server _server = new Server();
private ServletContextHandler _contextHandler = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
private LocalConnector _connector = new LocalConnector();
@Before
public void setUp() throws Exception
{
_connector.setMaxIdleTime(30000);
_server.setConnectors(new Connector[]
{ _connector });
_contextHandler.setContextPath("/");
_contextHandler.addServlet(new ServletHolder(new TestServlet()),"/servletPath");
HandlerList handlers = new HandlerList();
handlers.setHandlers(new Handler[]
{ _contextHandler, new DefaultHandler() });
_server.setHandler(handlers);
_server.start();
}
@Test
//Ignore ("failing test illustrating potential issue")
public void testSimpleAsyncContext() throws Exception
{
String request = "GET /servletPath HTTP/1.1\r\n" + "Host: localhost\r\n" + "Content-Type: application/x-www-form-urlencoded\r\n"
+ "Connection: close\r\n" + "\r\n";
String responseString = _connector.getResponses(request);
System.out.println(responseString);
Assert.assertTrue("check in servlet doGet", responseString.contains("doGet:getServletPath:/servletPath"));
Assert.assertTrue("check in servlet doGet via async", responseString.contains("doGet:async:getServletPath:/servletPath"));
Assert.assertTrue("check in async runnable", responseString.contains("async:run:/servletPath"));
Assert.assertTrue("async attr check: servlet path", responseString.contains("async:run:attr:servletPath:/servletPath"));
// should validate these are indeed correct
Assert.assertTrue("async attr check: path info", responseString.contains("async:run:attr:pathInfo:null"));
Assert.assertTrue("async attr check: request uri", responseString.contains("async:run:attr:requestURI:/servletPath"));
Assert.assertTrue("async attr check: query string", responseString.contains("async:run:attr:queryString:null"));
Assert.assertTrue("async attr check: context path", responseString.contains("async:run:attr:contextPath:"));
}
@After
public void tearDown() throws Exception
{
_server.stop();
_server.join();
}
private class TestServlet extends HttpServlet
{
private static final long serialVersionUID = 1L;
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
response.getOutputStream().print("doGet:getServletPath:" + request.getServletPath() + "\n");
AsyncContext asyncContext = request.startAsync(request, response);
response.getOutputStream().print("doGet:async:getServletPath:" + ((HttpServletRequest)asyncContext.getRequest()).getServletPath() + "\n");
//Runnable runable = new AsyncRunnable(asyncContext);
//new Thread(runable).start();
asyncContext.start(new AsyncRunnable(asyncContext));
return;
}
}
private class AsyncRunnable implements Runnable
{
private AsyncContext _context;
public AsyncRunnable(AsyncContext context)
{
_context = context;
}
@Override
public void run()
{
HttpServletRequest req = (HttpServletRequest)_context.getRequest();
assert (req.getServletPath().equals("/servletPath"));
System.out.println(req.getServletPath());
try
{
_context.getResponse().getOutputStream().print("async:run:" + req.getServletPath() + "\n");
_context.getResponse().getOutputStream().print("async:run:attr:servletPath:" + req.getAttribute(AsyncContext.ASYNC_SERVLET_PATH) + "\n");
_context.getResponse().getOutputStream().print("async:run:attr:pathInfo:" + req.getAttribute(AsyncContext.ASYNC_PATH_INFO) + "\n");
_context.getResponse().getOutputStream().print("async:run:attr:requestURI:" + req.getAttribute(AsyncContext.ASYNC_REQUEST_URI) + "\n");
_context.getResponse().getOutputStream().print("async:run:attr:queryString:" + req.getAttribute(AsyncContext.ASYNC_QUERY_STRING) + "\n");
_context.getResponse().getOutputStream().print("async:run:attr:contextPath:" + req.getAttribute(AsyncContext.ASYNC_CONTEXT_PATH) + "\n");
}
catch (IOException e)
{
e.printStackTrace();
}
_context.complete();
}
}
}
|
jetty-servlet/src/test/java/org/eclipse/jetty/servlet/AsyncContextTest.java
|
package org.eclipse.jetty.servlet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import javax.servlet.AsyncContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import junit.framework.Assert;
import org.eclipse.jetty.continuation.Continuation;
import org.eclipse.jetty.continuation.ContinuationSupport;
import org.eclipse.jetty.server.AsyncContinuation;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.LocalConnector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.util.log.Log;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/**
* This tests the correct functioning of the AsyncContext
*
* @author tbecker
*
*/
public class AsyncContextTest
{
private Server _server = new Server();
private ServletContextHandler _contextHandler = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
private LocalConnector _connector = new LocalConnector();
@Before
public void setUp() throws Exception
{
_connector.setMaxIdleTime(30000);
_server.setConnectors(new Connector[]
{ _connector });
_contextHandler.setContextPath("/");
_contextHandler.addServlet(new ServletHolder(new TestServlet()),"/servletPath");
HandlerList handlers = new HandlerList();
handlers.setHandlers(new Handler[]
{ _contextHandler, new DefaultHandler() });
_server.setHandler(handlers);
_server.start();
}
@Test
@Ignore ("failing test illustrating potential issue")
public void testSimpleAsyncContext() throws Exception
{
String request = "GET /servletPath HTTP/1.1\r\n" + "Host: localhost\r\n" + "Content-Type: application/x-www-form-urlencoded\r\n"
+ "Connection: close\r\n" + "\r\n";
String responseString = _connector.getResponses(request);
System.out.println(responseString);
Assert.assertTrue("check in servlet doGet", responseString.contains("doGet:getServletPath:/servletPath"));
Assert.assertTrue("check in servlet doGet via async", responseString.contains("doGet:async:getServletPath:/servletPath"));
Assert.assertTrue("check in async runnable", responseString.contains("async:run:/servletPath"));
}
@After
public void tearDown() throws Exception
{
_server.stop();
_server.join();
}
private class TestServlet extends HttpServlet
{
private static final long serialVersionUID = 1L;
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
response.getOutputStream().print("doGet:getServletPath:" + request.getServletPath() + "\n");
AsyncContext asyncContext = request.startAsync();
response.getOutputStream().print("doGet:async:getServletPath:" + ((HttpServletRequest)asyncContext.getRequest()).getServletPath() + "\n");
Runnable runable = new AsyncRunnable(asyncContext);
new Thread(runable).start();
}
}
private class AsyncRunnable implements Runnable
{
private AsyncContext _context;
public AsyncRunnable(AsyncContext context)
{
_context = context;
}
@Override
public void run()
{
HttpServletRequest req = (HttpServletRequest)_context.getRequest();
assert (req.getServletPath().equals("/servletPath"));
System.out.println(req.getServletPath());
try
{
_context.getResponse().getOutputStream().print("async:run:" + req.getServletPath() + "\n");
}
catch (IOException e)
{
e.printStackTrace();
}
_context.complete();
}
}
}
|
expand the test a bit more
|
jetty-servlet/src/test/java/org/eclipse/jetty/servlet/AsyncContextTest.java
|
expand the test a bit more
|
|
Java
|
apache-2.0
|
e82e16ca9c7912725ca838d6a0b83913e2063c2b
| 0
|
Crespo911/encog-java-core,ThiagoGarciaAlves/encog-java-core,SpenceSouth/encog-java-core,krzysztof-magosa/encog-java-core,ThiagoGarciaAlves/encog-java-core,spradnyesh/encog-java-core,Crespo911/encog-java-core,krzysztof-magosa/encog-java-core,spradnyesh/encog-java-core,SpenceSouth/encog-java-core,danilodesousacubas/encog-java-core,danilodesousacubas/encog-java-core
|
package org.encog.neural.activation;
import junit.framework.TestCase;
import org.encog.EncogError;
import org.encog.persist.persistors.ActivationBiPolarPersistor;
import org.encog.persist.persistors.ActivationLinearPersistor;
import org.junit.Assert;
import org.junit.Test;
public class TestActivationLinear extends TestCase {
@Test
public void testLinear() throws Throwable
{
ActivationLinear activation = new ActivationLinear();
Assert.assertTrue(activation.hasDerivative());
ActivationLinear clone = (ActivationLinear)activation.clone();
Assert.assertNotNull(clone);
double[] input = { 1,2,3 };
activation.activationFunction(input);
Assert.assertEquals(1.0,input[0],0.1);
Assert.assertEquals(2.0,input[1],0.1);
Assert.assertEquals(3.0,input[2],0.1);
// this will throw an error if it does not work
ActivationLinearPersistor p = (ActivationLinearPersistor)activation.createPersistor();
// test derivative, should throw an error
activation.derivativeFunction(input);
// test name and description
// names and descriptions are not stored for these
activation.setName("name");
activation.setDescription("name");
Assert.assertEquals(null, activation.getName());
Assert.assertEquals(null, activation.getDescription() );
}
}
|
test/org/encog/neural/activation/TestActivationLinear.java
|
package org.encog.neural.activation;
import junit.framework.TestCase;
import org.encog.EncogError;
import org.encog.persist.persistors.ActivationBiPolarPersistor;
import org.encog.persist.persistors.ActivationLinearPersistor;
import org.junit.Assert;
import org.junit.Test;
public class TestActivationLinear extends TestCase {
@Test
public void testLinear() throws Throwable
{
ActivationLinear activation = new ActivationLinear();
Assert.assertTrue(activation.hasDerivative());
ActivationLinear clone = (ActivationLinear)activation.clone();
Assert.assertNotNull(clone);
double[] input = { 1,2,3 };
activation.activationFunction(input);
Assert.assertEquals(1.0,input[0],0.1);
Assert.assertEquals(2.0,input[1],0.1);
Assert.assertEquals(3.0,input[2],0.1);
// this will throw an error if it does not work
ActivationLinearPersistor p = (ActivationLinearPersistor)activation.createPersistor();
// test derivative, should throw an error
try
{
activation.derivativeFunction(input);
Assert.assertTrue(false);// mark an error
}
catch(EncogError e)
{
// good, this should happen
}
// test name and description
// names and descriptions are not stored for these
activation.setName("name");
activation.setDescription("name");
Assert.assertEquals(null, activation.getName());
Assert.assertEquals(null, activation.getDescription() );
}
}
|
fix core build
git-svn-id: f699b99be54c313e643266d5dd560bc939300b59@1189 f90f6e9a-ac51-0410-b353-d1b83c6f6923
|
test/org/encog/neural/activation/TestActivationLinear.java
|
fix core build
|
|
Java
|
apache-2.0
|
00c104a56f6b19c9d75feb3663e5bedecff80f0c
| 0
|
PhilippSalvisberg/aqdemo
|
package com.trivadis.aqdemo;
import java.sql.SQLException;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.Session;
import javax.jms.TopicConnectionFactory;
import javax.sql.DataSource;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import oracle.jms.AQjmsFactory;
import oracle.ucp.jdbc.PoolDataSource;
import oracle.ucp.jdbc.PoolDataSourceFactory;
@EnableTransactionManagement
@Configuration
public class AppConfig {
private final Logger logger = Logger.getLogger(AppConfig.class);
@Autowired(required = false)
@Qualifier("aqUrl")
private String aqUrl = "jdbc:oracle:thin:@localhost:1521:odb";
@Autowired(required = false)
@Qualifier("aqUserName")
private String aqUserName = "aqdemo";
@Autowired(required = false)
@Qualifier("aqPassword")
private String aqPassword = "aqdemo";
@Autowired(required = false)
@Qualifier("requestQueueName")
private String requestQueueName = "aqdemo.requests_aq";
@Autowired(required = false)
@Qualifier("responseQueueName")
private String responseQueueName = "aqdemo.responses_aq";
@Autowired(required = false)
@Qualifier("appName")
private String appName = "Java";
@Autowired(required = false)
@Qualifier("concurrency")
private String concurrency = "1-4";
@Bean
public ConnectionFactory connectionFactory() {
logger.info("connectionFactory() called.");
TopicConnectionFactory connectionFactory;
try {
connectionFactory = AQjmsFactory.getTopicConnectionFactory(aqDataSource());
} catch (JMSException e) {
throw new RuntimeException("cannot get connection factory.");
}
return connectionFactory;
}
@Bean
public TextMessageListener messageListener() {
logger.info("messageListener() called.");
return new TextMessageListener();
}
@Bean
public DefaultMessageListenerContainer highPriorityJmsContainer() {
logger.info("highPriorityJmsContainer() called.");
DefaultMessageListenerContainer cont = new DefaultMessageListenerContainer();
cont.setMessageListener(messageListener());
cont.setConnectionFactory(connectionFactory());
cont.setDestinationName(requestQueueName);
cont.setPubSubDomain(true);
cont.setSubscriptionName(appName + "_High_Priority");
cont.setSubscriptionDurable(true); // allow enqueue when service is down
cont.setMessageSelector("(JMSPriority IN (1,2) and appName = '" + appName + "')");
cont.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED);
cont.setSessionTransacted(true);
cont.setConcurrency(concurrency);
cont.setMaxMessagesPerTask(1);
return cont;
}
@Bean
public DefaultMessageListenerContainer lowPriorityJmsContainer() {
logger.info("lowPriorityJmsContainer() called.");
DefaultMessageListenerContainer cont = new DefaultMessageListenerContainer();
cont.setMessageListener(messageListener());
cont.setConnectionFactory(connectionFactory());
cont.setDestinationName(requestQueueName);
cont.setPubSubDomain(true);
cont.setSubscriptionName(appName + "_Low_Priority");
cont.setSubscriptionDurable(true); // allow enqueue when service is down
cont.setMessageSelector("(JMSPriority > 2 and appName = '" + appName + "')");
cont.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED);
cont.setSessionTransacted(true);
cont.setConcurrency(concurrency);
cont.setMaxMessagesPerTask(1);
return cont;
}
@Bean
public DataSource aqDataSource() {
logger.info("aqDataSource() called.");
PoolDataSource pds = PoolDataSourceFactory.getPoolDataSource();
try {
pds.setConnectionFactoryClassName("oracle.jdbc.OracleDriver");
String url = aqUrl;
// see https://docs.oracle.com/database/122/JJUAR/oracle/ucp/jdbc/PoolDataSource.html
pds.setURL(url);
pds.setUser(aqUserName);
pds.setPassword(aqPassword);
// close inactive connections within the pool after 60 seconds
pds.setInactiveConnectionTimeout(60);
// return inactive connections to the pool after 60 seconds, e.g. to recover from network failure
pds.setAbandonedConnectionTimeout(60);
// allow a borrowed connection to be used infinitely, required for long running transactions
pds.setTimeToLiveConnectionTimeout(0);
// check all timeout settings every 30 seconds
pds.setTimeoutCheckInterval(30);
} catch (SQLException e) {
throw new RuntimeException("driver not found");
}
return pds;
}
@Bean
public PlatformTransactionManager txManager() {
return new DataSourceTransactionManager(aqDataSource());
}
}
|
aqdemo/src/main/java/com/trivadis/aqdemo/AppConfig.java
|
package com.trivadis.aqdemo;
import java.sql.SQLException;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.Session;
import javax.jms.TopicConnectionFactory;
import javax.sql.DataSource;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import oracle.jms.AQjmsFactory;
import oracle.ucp.jdbc.PoolDataSource;
import oracle.ucp.jdbc.PoolDataSourceFactory;
@EnableTransactionManagement
@Configuration
public class AppConfig {
private final Logger logger = Logger.getLogger(AppConfig.class);
@Autowired(required = false)
@Qualifier("aqUrl")
private String aqUrl = "jdbc:oracle:thin:@localhost:1521:odb";
@Autowired(required = false)
@Qualifier("aqUserName")
private String aqUserName = "aqdemo";
@Autowired(required = false)
@Qualifier("aqPassword")
private String aqPassword = "aqdemo";
@Autowired(required = false)
@Qualifier("requestQueueName")
private String requestQueueName = "aqdemo.requests_aq";
@Autowired(required = false)
@Qualifier("responseQueueName")
private String responseQueueName = "aqdemo.responses_aq";
@Autowired(required = false)
@Qualifier("appName")
private String appName = "Java";
@Autowired(required = false)
@Qualifier("concurrency")
private String concurrency = "1-4";
@Bean
public ConnectionFactory connectionFactory() {
logger.info("connectionFactory() called.");
TopicConnectionFactory connectionFactory;
try {
connectionFactory = AQjmsFactory.getTopicConnectionFactory(aqDataSource());
} catch (JMSException e) {
throw new RuntimeException("cannot get connection factory.");
}
return connectionFactory;
}
@Bean
public TextMessageListener messageListener() {
logger.info("messageListener() called.");
return new TextMessageListener();
}
@Bean
public DefaultMessageListenerContainer highPriorityJmsContainer() {
logger.info("highPriorityJmsContainer() called.");
DefaultMessageListenerContainer cont = new DefaultMessageListenerContainer();
cont.setMessageListener(messageListener());
cont.setConnectionFactory(connectionFactory());
cont.setDestinationName(requestQueueName);
cont.setPubSubDomain(true);
cont.setSubscriptionName(appName + "_High_Priority");
cont.setSubscriptionDurable(true); // allow enqueue when service is down
cont.setMessageSelector("(JMSPriority IN (1,2) and appName = '" + appName + "')");
cont.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED);
cont.setSessionTransacted(true);
cont.setConcurrency(concurrency);
cont.setMaxMessagesPerTask(1);
return cont;
}
@Bean
public DefaultMessageListenerContainer lowPriorityJmsContainer() {
logger.info("lowPriorityJmsContainer() called.");
DefaultMessageListenerContainer cont = new DefaultMessageListenerContainer();
cont.setMessageListener(messageListener());
cont.setConnectionFactory(connectionFactory());
cont.setDestinationName(requestQueueName);
cont.setPubSubDomain(true);
cont.setSubscriptionName(appName + "_Low_Priority");
cont.setSubscriptionDurable(true); // allow enqueue when service is down
cont.setMessageSelector("(JMSPriority > 2 and appName = '" + appName + "')");
cont.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED);
cont.setSessionTransacted(true);
cont.setConcurrency(concurrency);
cont.setMaxMessagesPerTask(1);
return cont;
}
@Bean
public DataSource aqDataSource() {
logger.info("aqDataSource() called.");
PoolDataSource pds = PoolDataSourceFactory.getPoolDataSource();
try {
pds.setConnectionFactoryClassName("oracle.jdbc.OracleDriver");
String url = aqUrl;
pds.setURL(url);
pds.setUser(aqUserName);
pds.setPassword(aqPassword);
pds.setInactiveConnectionTimeout(60);
} catch (SQLException e) {
throw new RuntimeException("driver not found");
}
return pds;
}
@Bean
public PlatformTransactionManager txManager() {
return new DataSourceTransactionManager(aqDataSource());
}
}
|
Make JMS connection more robust, recover from network failure
automatically. Use setAbandonedConnectionTimeout(60) in UCP.
|
aqdemo/src/main/java/com/trivadis/aqdemo/AppConfig.java
|
Make JMS connection more robust, recover from network failure automatically. Use setAbandonedConnectionTimeout(60) in UCP.
|
|
Java
|
apache-2.0
|
1f7417e66a87a48971b2dc48190cf5b187a8902f
| 0
|
larsgeorge/hbase-book,larsgeorge/hbase-book,larsgeorge/hbase-book
|
package security;
import java.io.IOException;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.security.access.AccessControlClient;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import util.HBaseHelper;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
// cc AccessControlExample Example using the API to handle ACLs
public class AccessControlExample {
private static TableName tableName;
// vv AccessControlExample
static class AuthenticatedUser implements AutoCloseable {
private UserGroupInformation ugi;
private Configuration conf;
private Connection connection;
public AuthenticatedUser(String user, String path)
throws IOException, InterruptedException {
ugi = loginUserWithKeyTab(user, path); // co AccessControlExample-01-LoginKeytab Login the user with a given keytab.
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
conf = HBaseConfiguration.create();
connection = ConnectionFactory.createConnection(conf); // co AccessControlExample-02-CreateConn Create the connection in the context of the authorized user.
return null;
}
});
}
private UserGroupInformation loginUserWithKeyTab(String user, String path)
throws IOException {
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path);
}
public Connection getConnection() {
return connection;
}
/*...*/
// ^^ AccessControlExample
public Configuration getConfiguration() {
return conf;
}
public UserGroupInformation getUgi() {
return ugi;
}
public String getShortUserName() {
return ugi.getShortUserName();
}
// vv AccessControlExample
public <T> T doAs(PrivilegedAction<T> action) {
return ugi.doAs(action);
}
public <T> T doAs(PrivilegedExceptionAction<T> action)
throws IOException, InterruptedException {
return ugi.doAs(action);
}
@Override
public void close() throws Exception {
if (connection != null)
connection.close();
connection = null;
}
/*...*/
// ^^ AccessControlExample
public void grant(final String user, final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, user, action); // co AccessControlExample-03-GrantHelper Call the access control client method in the context of the authenticated user.
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public void grant(final TableName tableName, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, tableName, user,
family != null ? Bytes.toBytes(family) : null,
qualifier != null ? Bytes.toBytes(qualifier): null,
action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
public void grant(final TableName tableName, final String user,
final Scan scan, final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Table table = connection.getTable(tableName);
ResultScanner scanner = table.getScanner(scan);
Map<String, Permission> perms = new HashMap<>();
perms.put(user, new Permission(action));
int rows = 0, cells = 0;
for (Result row : scanner) {
for (Cell cell : row.listCells()) {
Put put = new Put(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength());
put.add(cell);
put.setACL(perms);
table.put(put); // put per cell to avoid possible OOMEs
System.out.println("Put: " + put);
cells++;
}
rows++;
}
System.out.println("Processed " + rows + " rows and " +
cells + " cells.");
return null;
}
});
}
/*...*/
// ^^ AccessControlExample
public void grant(final String namespace, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, namespace, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
public void revoke(final String user, final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public void revoke(final TableName tableName, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, tableName, user,
family != null ? Bytes.toBytes(family) : null,
qualifier != null ? Bytes.toBytes(qualifier): null,
action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
/*...*/
// ^^ AccessControlExample
public void revoke(final String namespace, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, namespace, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public List<UserPermission> getUserPermissions(final String tableRegex)
throws Throwable {
return doAs(new PrivilegedExceptionAction<List<UserPermission>>() {
@Override
public List<UserPermission> run() throws Exception {
try {
return AccessControlClient.getUserPermissions(connection, tableRegex);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
}
});
}
/*...*/
// ^^ AccessControlExample
public void printUserPermissions(final String tableRegex)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
List<UserPermission> ups = ups = AccessControlClient.
getUserPermissions(connection, tableRegex);
System.out.println("User permissions (" +
(tableRegex != null ? tableRegex : "hbase:acl") + ":");
int count = 0;
for (UserPermission perm : ups) {
System.out.println(" " + perm);
count++;
}
System.out.println("Found " + count + " permissions.");
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
public void scan(final TableName tableName, final Scan scan) {
doAs(new PrivilegedAction<Void>() {
@Override
public Void run() {
try {
Table table = connection.getTable(tableName);
ResultScanner resultScanner = table.getScanner(scan);
System.out.println("Starting scan...");
int rows = 0;
for (Result result: resultScanner) {
System.out.println(" " + result);
rows++;
}
System.out.println("Found " + rows + " rows.");
resultScanner.close();
table.close();
} catch (Exception e) {
System.out.println("Scan failed with: " + e.getMessage());
}
return null;
}
});
}
public void put(final TableName tableName, final Put put)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Table table = connection.getTable(tableName);
try {
table.put(put);
} catch(Exception e) {
System.out.println("Put failed with: " + e);
}
return null;
}
});
}
public Result get(final TableName tableName, final Get get)
throws Exception {
return doAs(new PrivilegedExceptionAction<Result>() {
@Override
public Result run() throws Exception {
Table table = connection.getTable(tableName);
try {
Result result = table.get(get);
System.out.println("Get result: " + result);
return result;
} catch(Exception e) {
System.out.println("Get failed with: " + e);
}
return null;
}
});
}
// vv AccessControlExample
}
// ^^ AccessControlExample
static Subject getSubject() throws Exception {
LoginContext context = new LoginContext("", new Subject(), null,
new javax.security.auth.login.Configuration() {
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("useKeyTab", "false");
options.put("storeKey", "false");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
options.put("debug", "true");
return new AppConfigurationEntry[]{
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
});
context.login();
return context.getSubject();
}
// vv AccessControlExample
public static void main(String[] args) throws Throwable {
final AuthenticatedUser superuser = new AuthenticatedUser( // co AccessControlExample-04-LoginUsers Login the three user roles: superuser, global admin, and application user.
"hbase/master-1.hbase.book@HBASE.BOOK", "/tmp/hbase.keytab");
AuthenticatedUser admin = new AuthenticatedUser(
"hbasebook@HBASE.BOOK", "/tmp/hbasebook.keytab");
AuthenticatedUser app1 = new AuthenticatedUser(
"app1user1@HBASE.BOOK", "/tmp/app1user1.keytab");
tableName = TableName.valueOf("testtable");
// ^^ AccessControlExample
System.out.println("Superuser: Preparing table and data...");
superuser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Configuration conf = HBaseConfiguration.create();
HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropTable("testtable");
helper.createTable("testtable", "colfam1", "colfam2");
System.out.println("Adding rows to table...");
helper.fillTable("testtable", 1, 100, 100, "colfam1", "colfam2");
helper.close();
return null;
}
});
System.out.println("Superuser: Checking cluster settings...");
// vv AccessControlExample
superuser.doAs(new PrivilegedExceptionAction<Void>() { // co AccessControlExample-05-DoAsSuperuser Run the next commands as the superuser.
@Override
public Void run() throws Exception {
Connection connection = superuser.getConnection(); // co AccessControlExample-06-GetConn Get dedicated connection for authenticated user.
Admin admin = connection.getAdmin();
Table table = connection.getTable(tableName);
List<SecurityCapability> sc = admin.getSecurityCapabilities(); // co AccessControlExample-07-ListCaps List the security capabilities as reported from the Master.
for (SecurityCapability cap : sc) {
System.out.println(cap);
}
System.out.println("Report AccessController features...");
System.out.println("Access Controller Running: " +
AccessControlClient.isAccessControllerRunning(connection)); // co AccessControlExample-08-PrintAccCtlOpts Report the features enabled regarding access control.
System.out.println("Authorization Enabled: " +
AccessControlClient.isAuthorizationEnabled(connection));
System.out.println("Cell Authorization Enabled: " +
AccessControlClient.isCellAuthorizationEnabled(connection));
List<UserPermission> ups = null;
try {
ups = AccessControlClient.getUserPermissions(connection, ".*"); // co AccessControlExample-09-PrintPerms Print the current permissions for all tables.
System.out.println("User permissions:");
for (UserPermission perm : ups) {
System.out.println(" " + perm);
}
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
table.close();
return null;
}
});
// ^^ AccessControlExample
System.out.println("Superuser: Grant global admin to hbasebook...");
// vv AccessControlExample
superuser.grant(admin.getShortUserName(), Permission.Action.values());
// ^^ AccessControlExample
System.out.println("Admin & App1: Print permissions...");
// vv AccessControlExample
admin.printUserPermissions(null);
admin.printUserPermissions(".*");
app1.printUserPermissions(tableName.toString());
// ^^ AccessControlExample
System.out.println("Application: Attempting to scan table, should fail...");
// vv AccessControlExample
app1.scan(tableName, new Scan()); // co AccessControlExample-10-ScanFail The scan will fail with an access denied message because the application user has no access permissions granted.
// ^^ AccessControlExample
System.out.println("Admin: Grant table read access to application...");
// vv AccessControlExample
admin.grant(tableName, app1.getShortUserName(), "colfam1", "col-1",
Permission.Action.READ);
app1.printUserPermissions(tableName.toString());
// ^^ AccessControlExample
System.out.println("Application: Attempting to scan table again...");
// vv AccessControlExample
app1.scan(tableName, new Scan()); // co AccessControlExample-11-ScanSuccessThe second scan will work and only return one column from the otherwise unrestricted scan.
// ^^ AccessControlExample
System.out.println("Admin: Grant table write access to application...");
// vv AccessControlExample
admin.grant(tableName, app1.getShortUserName(), "colfam1", "col-acl", // co AccessControlExample-12-ColQual Grant write access to the application for a single new column (which does not exist yet).
Permission.Action.WRITE);
// ^^ AccessControlExample
System.out.println("Application: Write into table...");
// vv AccessControlExample
Put put = new Put(Bytes.toBytes("row-1"));
put.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col-acl"),
Bytes.toBytes("val-acl"));
app1.put(tableName, put); // co AccessControlExample-13-WriteColumn Insert a value into the granted column.
// ^^ AccessControlExample
System.out.println("Application: Scanning table, value not visible...");
// vv AccessControlExample
app1.scan(tableName, new Scan(Bytes.toBytes("row-1"), // co AccessControlExample-14-ScanColumn Scanning the table does not show the write-only column, and a direct read of the column fails with an access denied error.
Bytes.toBytes("row-10")));
// ^^ AccessControlExample
System.out.println("Application: Attempting to directly access column, will fail...");
// vv AccessControlExample
Get get = new Get(Bytes.toBytes("row-1"));
get.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col-acl"));
app1.get(tableName, get);
// ^^ AccessControlExample
System.out.println("Admin: Grant read to application for new column...");
// vv AccessControlExample
Scan scan = new Scan(Bytes.toBytes("row-1"),
Bytes.toBytes("row-10"));
scan.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col-acl"));
admin.grant(tableName, app1.getShortUserName(), scan,
Permission.Action.READ);
// ^^ AccessControlExample
System.out.println("Application: Read new column...");
// vv AccessControlExample
app1.scan(tableName, new Scan(Bytes.toBytes("row-1"),
Bytes.toBytes("row-10")));
// ^^ AccessControlExample
System.out.println("Admin: Revoking all access for application...");
// vv AccessControlExample
admin.revoke(tableName, app1.getShortUserName(), "colfam1", "col-1",
Permission.Action.values());
admin.revoke(tableName, app1.getShortUserName(), "colfam1", "col-acl",
Permission.Action.values());
// ^^ AccessControlExample
System.out.println("Application: Attempting to scan, should fail...");
// vv AccessControlExample
app1.scan(tableName, new Scan());
}
}
|
ch13/src/main/java/security/AccessControlExample.java
|
package security;
import java.io.IOException;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.security.access.AccessControlClient;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import util.HBaseHelper;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
// cc AccessControlExample Example using the API to handle ACLs
public class AccessControlExample {
private static TableName tableName;
// vv AccessControlExample
static class AuthenticatedUser implements AutoCloseable {
private UserGroupInformation ugi;
private Configuration conf;
private Connection connection;
public AuthenticatedUser(String user, String path)
throws IOException, InterruptedException {
ugi = loginUserWithKeyTab(user, path); // co AccessControlExample-01-LoginKeytab Login the user with a given keytab.
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
conf = HBaseConfiguration.create();
connection = ConnectionFactory.createConnection(conf); // co AccessControlExample-02-CreateConn Create the connection in the context of the authorized user.
return null;
}
});
}
private UserGroupInformation loginUserWithKeyTab(String user, String path)
throws IOException {
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path);
}
public Connection getConnection() {
return connection;
}
/*...*/
// ^^ AccessControlExample
public Configuration getConfiguration() {
return conf;
}
public UserGroupInformation getUgi() {
return ugi;
}
public String getShortUserName() {
return ugi.getShortUserName();
}
// vv AccessControlExample
public <T> T doAs(PrivilegedAction<T> action) {
return ugi.doAs(action);
}
public <T> T doAs(PrivilegedExceptionAction<T> action)
throws IOException, InterruptedException {
return ugi.doAs(action);
}
@Override
public void close() throws Exception {
if (connection != null)
connection.close();
connection = null;
}
/*...*/
// ^^ AccessControlExample
public void grant(final String user, final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, user, action); // co AccessControlExample-03-GrantHelper Call the access control client method in the context of the authenticated user.
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public void grant(final TableName tableName, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, tableName, user,
family != null ? Bytes.toBytes(family) : null,
qualifier != null ? Bytes.toBytes(qualifier): null,
action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
/*...*/
// ^^ AccessControlExample
public void grant(final String namespace, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.grant(connection, namespace, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
public void revoke(final String user, final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public void revoke(final TableName tableName, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, tableName, user,
family != null ? Bytes.toBytes(family) : null,
qualifier != null ? Bytes.toBytes(qualifier): null,
action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
/*...*/
// ^^ AccessControlExample
public void revoke(final String namespace, final String user,
final String family, final String qualifier,
final Permission.Action... action)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
AccessControlClient.revoke(connection, namespace, user, action);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
// vv AccessControlExample
public List<UserPermission> getUserPermissions(final String tableRegex)
throws Throwable {
return doAs(new PrivilegedExceptionAction<List<UserPermission>>() {
@Override
public List<UserPermission> run() throws Exception {
try {
return AccessControlClient.getUserPermissions(connection, tableRegex);
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
}
});
}
/*...*/
// ^^ AccessControlExample
public void printUserPermissions(final String tableRegex)
throws Exception {
doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
List<UserPermission> ups = ups = AccessControlClient.
getUserPermissions(connection, tableRegex);
System.out.println("User permissions:");
for (UserPermission perm : ups) {
System.out.println(" " + perm);
}
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
return null;
}
});
}
public void scan(final TableName tableName, final Scan scan) {
doAs(new PrivilegedAction<Void>() {
@Override
public Void run() {
try {
Table table = connection.getTable(tableName);
ResultScanner resultScanner = table.getScanner(scan);
System.out.println("Starting scan...");
int rows = 0;
for (Result result: resultScanner) {
System.out.println(result);
rows++;
}
System.out.println("Found " + rows + " rows.");
} catch (Exception e) {
System.out.println("Scan failed with: " + e);
}
return null;
}
});
}
// vv AccessControlExample
}
// ^^ AccessControlExample
static Subject getSubject() throws Exception {
LoginContext context = new LoginContext("", new Subject(), null,
new javax.security.auth.login.Configuration() {
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("useKeyTab", "false");
options.put("storeKey", "false");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
options.put("debug", "true");
return new AppConfigurationEntry[]{
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
});
context.login();
return context.getSubject();
}
// vv AccessControlExample
public static void main(String[] args) throws Throwable {
final AuthenticatedUser superuser = new AuthenticatedUser( // co AccessControlExample-04-LoginUsers Login the three user roles: superuser, global admin, and application user.
"hbase/master-1.hbase.book@HBASE.BOOK", "/tmp/hbase.keytab");
AuthenticatedUser admin = new AuthenticatedUser(
"hbasebook@HBASE.BOOK", "/tmp/hbasebook.keytab");
AuthenticatedUser app1 = new AuthenticatedUser(
"app1user1@HBASE.BOOK", "/tmp/app1user1.keytab");
tableName = TableName.valueOf("testtable");
// ^^ AccessControlExample
System.out.println("Superuser: Preparing table and data...");
superuser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Configuration conf = HBaseConfiguration.create();
HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropTable("testtable");
helper.createTable("testtable", "colfam1", "colfam2");
System.out.println("Adding rows to table...");
helper.fillTable("testtable", 1, 100, 100, "colfam1", "colfam2");
helper.close();
return null;
}
});
System.out.println("Superuser: Checking cluster settings...");
// vv AccessControlExample
superuser.doAs(new PrivilegedExceptionAction<Void>() { // co AccessControlExample-05-DoAsSuperuser Run the next commands as the superuser.
@Override
public Void run() throws Exception {
Connection connection = superuser.getConnection(); // co AccessControlExample-06-GetConn Get dedicated connection for authenticated user.
Admin admin = connection.getAdmin();
Table table = connection.getTable(tableName);
List<SecurityCapability> sc = admin.getSecurityCapabilities(); // co AccessControlExample-07-ListCaps List the security capabilities as reported from the Master.
for (SecurityCapability cap : sc) {
System.out.println(cap);
}
System.out.println("Report AccessController features...");
System.out.println("Access Controller Running: " +
AccessControlClient.isAccessControllerRunning(connection)); // co AccessControlExample-08-PrintAccCtlOpts Report the features enabled regarding access control.
System.out.println("Authorization Enabled: " +
AccessControlClient.isAuthorizationEnabled(connection));
System.out.println("Cell Authorization Enabled: " +
AccessControlClient.isCellAuthorizationEnabled(connection));
List<UserPermission> ups = null;
try {
ups = AccessControlClient.getUserPermissions(connection, ".*"); // co AccessControlExample-09-PrintPerms Print the current permissions.
System.out.println("User permissions:");
for (UserPermission perm : ups) {
System.out.println(" " + perm);
}
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
table.close();
return null;
}
});
// ^^ AccessControlExample
System.out.println("Superuser: Grant global admin to hbasebook...");
// vv AccessControlExample
superuser.grant(admin.getShortUserName(), Permission.Action.ADMIN);
// ^^ AccessControlExample
System.out.println("Admin & App1: Print permissions...");
// vv AccessControlExample
admin.printUserPermissions(null);
app1.printUserPermissions(tableName.toString());
// ^^ AccessControlExample
System.out.println("Application: Attempting to scan table...");
// vv AccessControlExample
app1.scan(tableName, new Scan());
// ^^ AccessControlExample
System.out.println("Admin: Grant table access to application...");
// vv AccessControlExample
admin.grant(tableName, app1.getShortUserName(), "colfam1", "col-1",
Permission.Action.READ);
app1.printUserPermissions(tableName.toString());
// ^^ AccessControlExample
System.out.println("Application: Attempting to scan table again...");
// vv AccessControlExample
app1.scan(tableName, new Scan());
}
}
|
Updated ch13 ACL example
|
ch13/src/main/java/security/AccessControlExample.java
|
Updated ch13 ACL example
|
|
Java
|
bsd-2-clause
|
10d8dee566d0d882f97b8b89950702c1237a1b55
| 0
|
MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2
|
/** @file
CollectPCDAction class.
This action class is to collect PCD information from MSA, SPD, FPD xml file.
This class will be used for wizard and build tools, So it can *not* inherit
from buildAction or wizardAction.
Copyright (c) 2006, Intel Corporation
All rights reserved. This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
package org.tianocore.build.pcd.action;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.tianocore.DynamicPcdBuildDefinitionsDocument.DynamicPcdBuildDefinitions;
import org.tianocore.FrameworkModulesDocument;
import org.tianocore.FrameworkPlatformDescriptionDocument;
import org.tianocore.ModuleSADocument;
import org.tianocore.PcdBuildDefinitionDocument.PcdBuildDefinition;
import org.tianocore.build.global.GlobalData;
import org.tianocore.build.global.SurfaceAreaQuery;
import org.tianocore.build.pcd.action.ActionMessage;
import org.tianocore.build.pcd.entity.DynamicTokenValue;
import org.tianocore.build.pcd.entity.MemoryDatabaseManager;
import org.tianocore.build.pcd.entity.SkuInstance;
import org.tianocore.build.pcd.entity.Token;
import org.tianocore.build.pcd.entity.UsageInstance;
import org.tianocore.build.pcd.exception.EntityException;
import org.tianocore.ModuleTypeDef;
class CStructTypeDeclaration {
String key;
int alignmentSize;
String cCode;
boolean initTable;
public CStructTypeDeclaration (String key, int alignmentSize, String cCode, boolean initTable) {
this.key = key;
this.alignmentSize = alignmentSize;
this.cCode = cCode;
this.initTable = initTable;
}
}
class StringTable {
private ArrayList<String> al;
private ArrayList<String> alComments;
private String phase;
int len;
public StringTable (String phase) {
this.phase = phase;
al = new ArrayList<String>();
alComments = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.StringTableSizeMacro, phase, getSize());
}
private int getSize () {
//
// We have at least one Unicode Character in the table.
//
return len == 0 ? 1 : len;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.StringTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable) {
final String stringTable = "StringTable";
final String tab = "\t";
final String newLine = "\r\n";
final String commaNewLine = ",\r\n";
CStructTypeDeclaration decl;
String cDeclCode = "";
String cInstCode = "";
//
// If we have a empty StringTable
//
if (al.size() == 0) {
cDeclCode += tab + String.format("UINT16 %s[1]; /* StringTable is Empty */", stringTable) + newLine;
decl = new CStructTypeDeclaration (
stringTable,
2,
cDeclCode,
true
);
declaList.add(decl);
cInstCode = " { 0 } " + String.format("/* %s */", stringTable);
instTable.put(stringTable, cInstCode);
} else {
//
// If there is any String in the StringTable
//
for (int i = 0; i < al.size(); i++) {
String str = al.get(i);
String stringTableName;
if (i == 0) {
//
// StringTable is a well-known name in the PCD DXE driver
//
stringTableName = stringTable;
} else {
stringTableName = String.format("%s_%d", stringTable, i);
cDeclCode += tab;
}
cDeclCode += String.format("UINT16 %s[%d]; /* %s */", stringTableName, str.length() + 1, alComments.get(i)) + newLine;
if (i == 0) {
cInstCode = "/* StringTable */" + newLine;
}
cInstCode += tab + String.format("L\"%s\" /* %s */", al.get(i), alComments.get(i));
if (i != al.size() - 1) {
cInstCode += commaNewLine;
}
}
decl = new CStructTypeDeclaration (
stringTable,
2,
cDeclCode,
true
);
declaList.add(decl);
instTable.put(stringTable, cInstCode);
}
}
public String getTypeDeclaration () {
String output;
final String stringTable = "StringTable";
final String tab = "\t";
final String newLine = ";\r\n";
output = "/* StringTable */\r\n";
if (al.size() == 0) {
output += tab + String.format("UINT16 %s[1] /* StringTable is Empty */", stringTable) + newLine;
}
for (int i = 0; i < al.size(); i++) {
String str = al.get(i);
if (i == 0) {
//
// StringTable is a well-known name in the PCD DXE driver
//
output += tab + String.format("UINT16 %s[%d] /* %s */", stringTable, str.length() + 1, alComments.get(i)) + newLine;
} else {
output += tab + String.format("UINT16 %s_%d[%d] /* %s */", stringTable, i, str.length() + 1, alComments.get(i)) + newLine;
}
}
return output;
}
public ArrayList<String> getInstantiation () {
ArrayList<String> output = new ArrayList<String>();
output.add("/* StringTable */");
if (al.size() == 0) {
output.add("{ 0 }");
} else {
String str;
for (int i = 0; i < al.size(); i++) {
str = String.format("L\"%s\" /* %s */", al.get(i), alComments.get(i));
if (i != al.size() - 1) {
str += ",";
}
output.add(str);
}
}
return output;
}
public int add (String inputStr, Token token) {
int i;
int pos;
String str = inputStr;
//
// The input can be two types:
// "L\"Bootmode\"" or "Bootmode".
// We drop the L\" and \" for the first type.
if (str.startsWith("L\"") && str.endsWith("\"")) {
str = str.substring(2, str.length() - 1);
}
//
// Check if StringTable has this String already.
// If so, return the current pos.
//
for (i = 0, pos = 0; i < al.size(); i++) {
String s = al.get(i);;
if (str.equals(s)) {
return pos;
}
pos = s.length() + 1;
}
i = len;
//
// Include the NULL character at the end of String
//
len += str.length() + 1;
al.add(str);
alComments.add(token.getPrimaryKeyString());
return i;
}
}
class SizeTable {
private ArrayList<Integer> al;
private ArrayList<String> alComments;
private String phase;
private int len;
public SizeTable (String phase) {
this.phase = phase;
al = new ArrayList<Integer>();
alComments = new ArrayList<String>();
len = 0;
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "SizeTable";
CStructTypeDeclaration decl;
String cCode;
cCode = String.format(PcdDatabase.SizeTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
2,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.SizeTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* SizeTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("0");
} else {
for (int index = 0; index < al.size(); index++) {
Integer n = al.get(index);
String str = "\t" + n.toString();
if (index != (al.size() - 1)) {
str += ",";
}
str += " /* " + alComments.get(index) + " */";
Output.add(str);
}
}
Output.add("}");
return Output;
}
public int add (Token token) {
int index = len;
len++;
al.add(token.datumSize);
alComments.add(token.getPrimaryKeyString());
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
}
class GuidTable {
private ArrayList<UUID> al;
private ArrayList<String> alComments;
private String phase;
private int len;
private int bodyLineNum;
public GuidTable (String phase) {
this.phase = phase;
al = new ArrayList<UUID>();
alComments = new ArrayList<String>();
len = 0;
bodyLineNum = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.GuidTableSizeMacro, phase, getSize());
}
private int getSize () {
return (al.size() == 0)? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.GuidTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "GuidTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.GuidTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
8,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.GuidTableDeclaration, phase);
}
private String getUuidCString (UUID uuid) {
String[] guidStrArray;
guidStrArray =(uuid.toString()).split("-");
return String.format("{ 0x%s, 0x%s, 0x%s, { 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s } }",
guidStrArray[0],
guidStrArray[1],
guidStrArray[2],
(guidStrArray[3].substring(0, 2)),
(guidStrArray[3].substring(2, 4)),
(guidStrArray[4].substring(0, 2)),
(guidStrArray[4].substring(2, 4)),
(guidStrArray[4].substring(4, 6)),
(guidStrArray[4].substring(6, 8)),
(guidStrArray[4].substring(8, 10)),
(guidStrArray[4].substring(10, 12))
);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* GuidTable */");
Output.add("{");
if (al.size() == 0) {
Output.add(getUuidCString(new UUID(0, 0)));
}
for (int i = 0; i < al.size(); i++) {
String str = "\t" + getUuidCString(al.get(i));
str += "/* " + alComments.get(i) + " */";
if (i != (al.size() - 1)) {
str += ",";
}
Output.add(str);
bodyLineNum++;
}
Output.add("}");
return Output;
}
public int add (UUID uuid, String name) {
//
// Check if GuidTable has this entry already.
// If so, return the GuidTable index.
//
for (int i = 0; i < al.size(); i++) {
if (al.get(i).equals(uuid)) {
return i;
}
}
len++;
al.add(uuid);
alComments.add(name);
//
// Return the previous Table Index
//
return len - 1;
}
public int getTableLen () {
return al.size() == 0 ? 0 : al.size();
}
}
class SkuIdTable {
private ArrayList<Integer[]> al;
private ArrayList<String> alComment;
private String phase;
private int len;
public SkuIdTable (String phase) {
this.phase = phase;
al = new ArrayList<Integer[]>();
alComment = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.SkuIdTableSizeMacro, phase, getSize());
}
private int getSize () {
return (len == 0)? 1 : len;
}
public String getExistanceMacro () {
return String.format(PcdDatabase.SkuTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "SkuIdTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.SkuIdTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
1,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
//
// SystemSkuId is in PEI phase PCD Database
//
if (phase.equalsIgnoreCase("PEI")) {
decl = new CStructTypeDeclaration (
"SystemSkuId",
1,
"SKU_ID SystemSkuId;\r\n",
true
);
declaList.add(decl);
instTable.put("SystemSkuId", "0");
}
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.SkuIdTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String> ();
Output.add("/* SkuIdTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("\t0");
}
for (int index = 0; index < al.size(); index++) {
String str;
str = "/* " + alComment.get(index) + "*/ ";
str += "/* MaxSku */ ";
Integer[] ia = al.get(index);
str += "\t" + ia[0].toString() + ", ";
for (int index2 = 1; index2 < ia.length; index2++) {
str += ia[index2].toString();
if (!((index2 == ia.length - 1) && (index == al.size() - 1))) {
str += ", ";
}
}
Output.add(str);
}
Output.add("}");
return Output;
}
public int add (Token token) {
int index;
int pos;
//
// Check if this SKU_ID Array is already in the table
//
pos = 0;
for (Object o: al) {
Integer [] s = (Integer[]) o;
boolean different = false;
if (s[0] == token.getSkuIdCount()) {
for (index = 1; index < s.length; index++) {
if (s[index] != token.skuData.get(index-1).id) {
different = true;
break;
}
}
} else {
different = true;
}
if (different) {
pos += s[0] + 1;
} else {
return pos;
}
}
Integer [] skuIds = new Integer[token.skuData.size() + 1];
skuIds[0] = new Integer(token.skuData.size());
for (index = 1; index < skuIds.length; index++) {
skuIds[index] = new Integer(token.skuData.get(index - 1).id);
}
index = len;
len += skuIds.length;
al.add(skuIds);
alComment.add(token.getPrimaryKeyString());
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
}
class LocalTokenNumberTable {
private ArrayList<String> al;
private ArrayList<String> alComment;
private String phase;
private int len;
public LocalTokenNumberTable (String phase) {
this.phase = phase;
al = new ArrayList<String>();
alComment = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.LocalTokenNumberTableSizeMacro, phase, getSize())
+ String.format(PcdDatabase.LocalTokenNumberSizeMacro, phase, al.size());
}
public int getSize () {
return (al.size() == 0)? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.DatabaseExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "LocalTokenNumberTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.LocalTokenNumberTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
4,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.LocalTokenNumberTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> output = new ArrayList<String>();
output.add("/* LocalTokenNumberTable */");
output.add("{");
if (al.size() == 0) {
output.add("0");
}
for (int index = 0; index < al.size(); index++) {
String str;
str = "\t" + (String)al.get(index);
str += " /* " + alComment.get(index) + " */ ";
if (index != (al.size() - 1)) {
str += ",";
}
output.add(str);
}
output.add("}");
return output;
}
public int add (Token token) {
int index = len;
String str;
len++;
str = String.format(PcdDatabase.offsetOfStrTemplate, phase, token.hasDefaultValue() ? "Init" : "Uninit", token.getPrimaryKeyString());
if (token.isUnicodeStringType()) {
str += " | PCD_TYPE_STRING";
}
if (token.isSkuEnable()) {
str += " | PCD_TYPE_SKU_ENABLED";
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
str += " | PCD_TYPE_HII";
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
str += " | PCD_TYPE_VPD";
}
al.add(str);
alComment.add(token.getPrimaryKeyString());
return index;
}
}
class ExMapTable {
class ExTriplet {
public Integer guidTableIdx;
public Long exTokenNumber;
public Long localTokenIdx;
public ExTriplet (int guidTableIdx, long exTokenNumber, long localTokenIdx) {
this.guidTableIdx = new Integer(guidTableIdx);
this.exTokenNumber = new Long(exTokenNumber);
this.localTokenIdx = new Long(localTokenIdx);
}
}
private ArrayList<ExTriplet> al;
private ArrayList<String> alComment;
private String phase;
private int len;
private int bodyLineNum;
public ExMapTable (String phase) {
this.phase = phase;
al = new ArrayList<ExTriplet>();
alComment = new ArrayList<String>();
bodyLineNum = 0;
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.ExMapTableSizeMacro, phase, getTableLen())
+ String.format(PcdDatabase.ExTokenNumber, phase, al.size());
}
public String getExistanceMacro () {
return String.format(PcdDatabase.ExMapTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String exMapTableName = "ExMapTable";
sortTable();
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.ExMapTableDeclaration, phase);
decl = new CStructTypeDeclaration (
exMapTableName,
4,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(exMapTableName, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.ExMapTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* ExMapTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("\t{0, 0, 0}");
}
int index;
for (index = 0; index < al.size(); index++) {
String str;
ExTriplet e = (ExTriplet)al.get(index);
str = "\t" + "{ " + String.format("0x%08X", e.exTokenNumber) + ", ";
str += e.localTokenIdx.toString() + ", ";
str += e.guidTableIdx.toString();
str += "}" + " /* " + alComment.get(index) + " */" ;
if (index != al.size() - 1) {
str += ",";
}
Output.add(str);
bodyLineNum++;
}
Output.add("}");
return Output;
}
public int add (int localTokenIdx, long exTokenNum, int guidTableIdx, String name) {
int index = len;
len++;
al.add(new ExTriplet(guidTableIdx, exTokenNum, localTokenIdx));
alComment.add(name);
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
//
// To simplify the algorithm for GetNextToken and GetNextTokenSpace in
// PCD PEIM/Driver, we need to sort the ExMapTable according to the
// following order:
// 1) ExGuid
// 2) ExTokenNumber
//
class ExTripletComp implements Comparator<ExTriplet> {
public int compare (ExTriplet a, ExTriplet b) {
if (a.guidTableIdx == b.guidTableIdx ) {
if (a.exTokenNumber > b.exTokenNumber) {
return 1;
} else if (a.exTokenNumber > b.exTokenNumber) {
return 1;
} else {
return 0;
}
}
return a.guidTableIdx - b.guidTableIdx;
}
}
private void sortTable () {
java.util.Comparator<ExTriplet> comparator = new ExTripletComp();
java.util.Collections.sort(al, comparator);
}
}
class PcdDatabase {
private final static int SkuHeadAlignmentSize = 4;
private final String newLine = "\r\n";
private final String commaNewLine = ",\r\n";
private final String tab = "\t";
public final static String ExMapTableDeclaration = "DYNAMICEX_MAPPING ExMapTable[%s_EXMAPPING_TABLE_SIZE];\r\n";
public final static String GuidTableDeclaration = "EFI_GUID GuidTable[%s_GUID_TABLE_SIZE];\r\n";
public final static String LocalTokenNumberTableDeclaration = "UINT32 LocalTokenNumberTable[%s_LOCAL_TOKEN_NUMBER_TABLE_SIZE];\r\n";
public final static String StringTableDeclaration = "UINT16 StringTable[%s_STRING_TABLE_SIZE];\r\n";
public final static String SizeTableDeclaration = "UINT16 SizeTable[%s_LOCAL_TOKEN_NUMBER_TABLE_SIZE];\r\n";
public final static String SkuIdTableDeclaration = "UINT8 SkuIdTable[%s_SKUID_TABLE_SIZE];\r\n";
public final static String ExMapTableSizeMacro = "#define %s_EXMAPPING_TABLE_SIZE %d\r\n";
public final static String ExTokenNumber = "#define %s_EX_TOKEN_NUMBER %d\r\n";
public final static String GuidTableSizeMacro = "#define %s_GUID_TABLE_SIZE %d\r\n";
public final static String LocalTokenNumberTableSizeMacro = "#define %s_LOCAL_TOKEN_NUMBER_TABLE_SIZE %d\r\n";
public final static String LocalTokenNumberSizeMacro = "#define %s_LOCAL_TOKEN_NUMBER %d\r\n";
public final static String StringTableSizeMacro = "#define %s_STRING_TABLE_SIZE %d\r\n";
public final static String SkuIdTableSizeMacro = "#define %s_SKUID_TABLE_SIZE %d\r\n";
public final static String ExMapTableExistenceMacro = "#define %s_EXMAP_TABLE_EMPTY %s\r\n";
public final static String GuidTableExistenceMacro = "#define %s_GUID_TABLE_EMPTY %s\r\n";
public final static String DatabaseExistenceMacro = "#define %s_DATABASE_EMPTY %s\r\n";
public final static String StringTableExistenceMacro = "#define %s_STRING_TABLE_EMPTY %s\r\n";
public final static String SkuTableExistenceMacro = "#define %s_SKUID_TABLE_EMPTY %s\r\n";
public final static String offsetOfSkuHeadStrTemplate = "offsetof(%s_PCD_DATABASE, %s.%s_SkuDataTable)";
public final static String offsetOfVariableEnabledDefault = "offsetof(%s_PCD_DATABASE, %s.%s_VariableDefault_%d)";
public final static String offsetOfStrTemplate = "offsetof(%s_PCD_DATABASE, %s.%s)";
private final static String skuDataTableTemplate = "SkuDataTable";
private StringTable stringTable;
private GuidTable guidTable;
private LocalTokenNumberTable localTokenNumberTable;
private SkuIdTable skuIdTable;
private SizeTable sizeTable;
private ExMapTable exMapTable;
private ArrayList<Token> alTokens;
private String phase;
private int assignedTokenNumber;
//
// Use two class global variable to store
// temperary
//
private String privateGlobalName;
private String privateGlobalCCode;
//
// After Major changes done to the PCD
// database generation class PcdDatabase
// Please increment the version and please
// also update the version number in PCD
// service PEIM and DXE driver accordingly.
//
private final int version = 2;
private String hString;
private String cString;
class AlignmentSizeComp implements Comparator<Token> {
public int compare (Token a, Token b) {
return getAlignmentSize(b)
- getAlignmentSize(a);
}
}
public PcdDatabase (ArrayList<Token> alTokens, String exePhase, int startLen) {
phase = exePhase;
stringTable = new StringTable(phase);
guidTable = new GuidTable(phase);
localTokenNumberTable = new LocalTokenNumberTable(phase);
skuIdTable = new SkuIdTable(phase);
sizeTable = new SizeTable(phase);
exMapTable = new ExMapTable(phase);
assignedTokenNumber = startLen + 1;
this.alTokens = alTokens;
}
private void getNonExAndExTokens (ArrayList<Token> alTokens, List<Token> nexTokens, List<Token> exTokens) {
for (int i = 0; i < alTokens.size(); i++) {
Token t = (Token)alTokens.get(i);
if (t.isDynamicEx()) {
exTokens.add(t);
} else {
nexTokens.add(t);
}
}
return;
}
private void getTwoGroupsOfTokens (ArrayList<Token> alTokens, List<Token> initTokens, List<Token> uninitTokens) {
for (int i = 0; i < alTokens.size(); i++) {
Token t = (Token)alTokens.get(i);
if (t.hasDefaultValue()) {
initTokens.add(t);
} else {
uninitTokens.add(t);
}
}
return;
}
private int getDataTypeAlignmentSize (Token token) {
switch (token.datumType) {
case UINT8:
return 1;
case UINT16:
return 2;
case UINT32:
return 4;
case UINT64:
return 8;
case POINTER:
return 1;
case BOOLEAN:
return 1;
default:
return 1;
}
}
private int getAlignmentSize (Token token) {
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
return 2;
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
return 4;
}
if (token.isUnicodeStringType()) {
return 2;
}
return getDataTypeAlignmentSize(token);
}
public String getCString () {
return cString;
}
public String getHString () {
return hString;
}
private void genCodeWorker(Token t,
ArrayList<CStructTypeDeclaration> declaList,
HashMap<String, String> instTable, String phase)
throws EntityException {
CStructTypeDeclaration decl;
//
// Insert SKU_HEAD if isSkuEnable is true
//
if (t.isSkuEnable()) {
int tableIdx;
tableIdx = skuIdTable.add(t);
decl = new CStructTypeDeclaration(t.getPrimaryKeyString(),
SkuHeadAlignmentSize, getSkuEnabledTypeDeclaration(t), true);
declaList.add(decl);
instTable.put(t.getPrimaryKeyString(),
getSkuEnabledTypeInstantiaion(t, tableIdx));
}
//
// Insert PCD_ENTRY declaration and instantiation
//
getCDeclarationString(t);
decl = new CStructTypeDeclaration(privateGlobalName,
getAlignmentSize(t), privateGlobalCCode, t.hasDefaultValue());
declaList.add(decl);
if (t.hasDefaultValue()) {
instTable.put(privateGlobalName,
getTypeInstantiation(t, declaList, instTable, phase)
);
}
}
private void ProcessTokensNew (List<Token> tokens,
ArrayList<CStructTypeDeclaration> cStructDeclList,
HashMap<String, String> cStructInstTable,
String phase
)
throws EntityException {
for (int idx = 0; idx < tokens.size(); idx++) {
Token t = tokens.get(idx);
genCodeWorker (t, cStructDeclList, cStructInstTable, phase);
sizeTable.add(t);
localTokenNumberTable.add(t);
t.tokenNumber = assignedTokenNumber++;
//
// Add a mapping if this dynamic PCD entry is a EX type
//
if (t.isDynamicEx()) {
exMapTable.add(t.tokenNumber,
t.dynamicExTokenNumber,
guidTable.add(t.tokenSpaceName, t.getPrimaryKeyString()),
t.getPrimaryKeyString()
);
}
}
}
public void genCodeNew () throws EntityException {
ArrayList<CStructTypeDeclaration> cStructDeclList = new ArrayList<CStructTypeDeclaration>();
HashMap<String, String> cStructInstTable = new HashMap<String, String>();
List<Token> nexTokens = new ArrayList<Token> ();
List<Token> exTokens = new ArrayList<Token> ();
getNonExAndExTokens (alTokens, nexTokens, exTokens);
//
// We have to process Non-Ex type PCD entry first. The reason is
// that our optimization assumes that the Token Number of Non-Ex
// PCD entry start from 1 (for PEI phase) and grows continously upwards.
//
// EX type token number starts from the last Non-EX PCD entry and
// grows continously upwards.
//
ProcessTokensNew (nexTokens, cStructDeclList, cStructInstTable, phase);
ProcessTokensNew (exTokens, cStructDeclList, cStructInstTable, phase);
stringTable.genCodeNew(cStructDeclList, cStructInstTable);
skuIdTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
exMapTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
localTokenNumberTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
sizeTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
guidTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
hString = genCMacroCode ();
HashMap <String, String> result;
result = genCStructCode(cStructDeclList,
cStructInstTable,
phase
);
hString += result.get("initDeclStr");
hString += result.get("uninitDeclStr");
hString += String.format("#define PCD_%s_SERVICE_DRIVER_VERSION %d", phase, version);
cString = newLine + newLine + result.get("initInstStr");
}
private String genCMacroCode () {
String macroStr = "";
//
// Generate size info Macro for all Tables
//
macroStr += guidTable.getSizeMacro();
macroStr += stringTable.getSizeMacro();
macroStr += skuIdTable.getSizeMacro();
macroStr += localTokenNumberTable.getSizeMacro();
macroStr += exMapTable.getSizeMacro();
//
// Generate existance info Macro for all Tables
//
macroStr += guidTable.getExistanceMacro();
macroStr += stringTable.getExistanceMacro();
macroStr += skuIdTable.getExistanceMacro();
macroStr += localTokenNumberTable.getExistanceMacro();
macroStr += exMapTable.getExistanceMacro();
macroStr += newLine;
return macroStr;
}
private HashMap <String, String> genCStructCode(
ArrayList<CStructTypeDeclaration> declaList,
HashMap<String, String> instTable,
String phase
) {
int i;
HashMap <String, String> result = new HashMap<String, String>();
HashMap <Integer, ArrayList<String>> alignmentInitDecl = new HashMap<Integer, ArrayList<String>>();
HashMap <Integer, ArrayList<String>> alignmentUninitDecl = new HashMap<Integer, ArrayList<String>>();
HashMap <Integer, ArrayList<String>> alignmentInitInst = new HashMap<Integer, ArrayList<String>>();
//
// Initialize the storage for each alignment
//
for (i = 8; i > 0; i>>=1) {
alignmentInitDecl.put(new Integer(i), new ArrayList<String>());
alignmentInitInst.put(new Integer(i), new ArrayList<String>());
alignmentUninitDecl.put(new Integer(i), new ArrayList<String>());
}
String initDeclStr = "typedef struct {" + newLine;
String initInstStr = String.format("%s_PCD_DATABASE_INIT g%sPcdDbInit = { ", phase.toUpperCase(), phase.toUpperCase()) + newLine;
String uninitDeclStr = "typedef struct {" + newLine;
//
// Sort all C declaration and instantiation base on Alignment Size
//
for (Object d : declaList) {
CStructTypeDeclaration decl = (CStructTypeDeclaration) d;
if (decl.initTable) {
alignmentInitDecl.get(new Integer(decl.alignmentSize)).add(decl.cCode);
alignmentInitInst.get(new Integer(decl.alignmentSize)).add(instTable.get(decl.key));
} else {
alignmentUninitDecl.get(new Integer(decl.alignmentSize)).add(decl.cCode);
}
}
//
// Generate code for every alignment size
//
boolean uinitDatabaseEmpty = true;
for (int align = 8; align > 0; align >>= 1) {
ArrayList<String> declaListBasedOnAlignment = alignmentInitDecl.get(new Integer(align));
ArrayList<String> instListBasedOnAlignment = alignmentInitInst.get(new Integer(align));
for (i = 0; i < declaListBasedOnAlignment.size(); i++) {
initDeclStr += tab + declaListBasedOnAlignment.get(i);
initInstStr += tab + instListBasedOnAlignment.get(i);
//
// We made a assumption that both PEI_PCD_DATABASE and DXE_PCD_DATABASE
// has a least one data memember with alignment size of 1. So we can
// remove the last "," in the C structure instantiation string.
//
if ((align == 1) && (i == declaListBasedOnAlignment.size() - 1)) {
initInstStr += newLine;
} else {
initInstStr += commaNewLine;
}
}
declaListBasedOnAlignment = alignmentUninitDecl.get(new Integer(align));
if (declaListBasedOnAlignment.size() != 0) {
uinitDatabaseEmpty = false;
}
for (Object d : declaListBasedOnAlignment) {
String s = (String)d;
uninitDeclStr += tab + s;
}
}
if (uinitDatabaseEmpty) {
uninitDeclStr += tab + " UINT8 dummy; /* PCD_DATABASE_UNINIT is emptry */\r\n";
}
initDeclStr += String.format("} %s_PCD_DATABASE_INIT;", phase) + newLine + newLine;
initInstStr += "};" + newLine;
uninitDeclStr += String.format("} %s_PCD_DATABASE_UNINIT;", phase) + newLine + newLine;
result.put("initDeclStr", initDeclStr);
result.put("initInstStr", initInstStr);
result.put("uninitDeclStr", uninitDeclStr);
return result;
}
public void genCode ()
throws EntityException {
final String newLine = "\r\n";
final String declNewLine = ";\r\n";
final String tab = "\t";
final String commaNewLine = ", \r\n";
int i;
ArrayList<String> decla;
ArrayList<String> inst;
String macroStr = "";
String initDeclStr = "";
String initInstStr = "";
String uninitDeclStr = "";
List<Token> initTokens = new ArrayList<Token> ();
List<Token> uninitTokens = new ArrayList<Token> ();
HashMap <String, ArrayList<String>> initCode = new HashMap<String, ArrayList<String>> ();
HashMap <String, ArrayList<String>> uninitCode = new HashMap<String, ArrayList<String>> ();
getTwoGroupsOfTokens (alTokens, initTokens, uninitTokens);
//
// Generate Structure Declaration for PcdTokens without Default Value
// PEI_PCD_DATABASE_INIT
//
java.util.Comparator<Token> comparator = new AlignmentSizeComp();
java.util.Collections.sort(initTokens, comparator);
initCode = processTokens(initTokens);
//
// Generate Structure Declaration for PcdTokens without Default Value
// PEI_PCD_DATABASE_UNINIT
//
java.util.Collections.sort(uninitTokens, comparator);
uninitCode = processTokens(uninitTokens);
//
// Generate size info Macro for all Tables
//
macroStr += guidTable.getSizeMacro();
macroStr += stringTable.getSizeMacro();
macroStr += skuIdTable.getSizeMacro();
macroStr += localTokenNumberTable.getSizeMacro();
macroStr += exMapTable.getSizeMacro();
//
// Generate existance info Macro for all Tables
//
macroStr += guidTable.getExistanceMacro();
macroStr += stringTable.getExistanceMacro();
macroStr += skuIdTable.getExistanceMacro();
macroStr += localTokenNumberTable.getExistanceMacro();
macroStr += exMapTable.getExistanceMacro();
//
// Generate Structure Declaration for PcdTokens with Default Value
// for example PEI_PCD_DATABASE_INIT
//
initDeclStr += "typedef struct {" + newLine;
{
initDeclStr += tab + exMapTable.getTypeDeclaration();
initDeclStr += tab + guidTable.getTypeDeclaration();
initDeclStr += tab + localTokenNumberTable.getTypeDeclaration();
initDeclStr += tab + stringTable.getTypeDeclaration();
initDeclStr += tab + sizeTable.getTypeDeclaration();
initDeclStr += tab + skuIdTable.getTypeDeclaration();
if (phase.equalsIgnoreCase("PEI")) {
initDeclStr += tab + "SKU_ID SystemSkuId;" + newLine;
}
decla = initCode.get(new String("Declaration"));
for (i = 0; i < decla.size(); i++) {
initDeclStr += tab + decla.get(i) + declNewLine;
}
//
// Generate Structure Declaration for PcdToken with SkuEnabled
//
decla = initCode.get("DeclarationForSku");
for (i = 0; i < decla.size(); i++) {
initDeclStr += tab + decla.get(i) + declNewLine;
}
}
initDeclStr += String.format("} %s_PCD_DATABASE_INIT;\r\n\r\n", phase);
//
// Generate MACRO for structure intialization of PCDTokens with Default Value
// The sequence must match the sequence of declaration of the memembers in the structure
String tmp = String.format("%s_PCD_DATABASE_INIT g%sPcdDbInit = { ", phase.toUpperCase(), phase.toUpperCase());
initInstStr += tmp + newLine;
initInstStr += tab + genInstantiationStr(exMapTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(guidTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(localTokenNumberTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(stringTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(sizeTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(skuIdTable.getInstantiation()) + commaNewLine;
//
// For SystemSkuId
//
if (phase.equalsIgnoreCase("PEI")) {
initInstStr += tab + "0" + tab + "/* SystemSkuId */" + commaNewLine;
}
inst = initCode.get("Instantiation");
for (i = 0; i < inst.size(); i++) {
initInstStr += tab + inst.get(i) + commaNewLine;
}
inst = initCode.get("InstantiationForSku");
for (i = 0; i < inst.size(); i++) {
initInstStr += tab + inst.get(i);
if (i != inst.size() - 1) {
initInstStr += commaNewLine;
}
}
initInstStr += "};";
uninitDeclStr += "typedef struct {" + newLine;
{
decla = uninitCode.get("Declaration");
if (decla.size() == 0) {
uninitDeclStr += "UINT8 dummy /* The UINT struct is empty */" + declNewLine;
} else {
for (i = 0; i < decla.size(); i++) {
uninitDeclStr += tab + decla.get(i) + declNewLine;
}
decla = uninitCode.get("DeclarationForSku");
for (i = 0; i < decla.size(); i++) {
uninitDeclStr += tab + decla.get(i) + declNewLine;
}
}
}
uninitDeclStr += String.format("} %s_PCD_DATABASE_UNINIT;\r\n\r\n", phase);
cString = initInstStr + newLine;
hString = macroStr + newLine
+ initDeclStr + newLine
+ uninitDeclStr + newLine
+ newLine;
hString += String.format("#define PCD_%s_SERVICE_DRIVER_VERSION %d", phase, version);
}
public static String genInstantiationStr (ArrayList<String> alStr) {
String str = "";
for (int i = 0; i< alStr.size(); i++) {
if (i != 0) {
str += "\t";
}
str += alStr.get(i);
if (i != alStr.size() - 1) {
str += "\r\n";
}
}
return str;
}
private HashMap<String, ArrayList<String>> processTokens (List<Token> alToken)
throws EntityException {
HashMap <String, ArrayList<String>> map = new HashMap<String, ArrayList<String>>();
ArrayList<String> decl = new ArrayList<String>();
ArrayList<String> declForSkuEnableType = new ArrayList<String>();
ArrayList<String> inst = new ArrayList<String>();
ArrayList<String> instForSkuEnableType = new ArrayList<String>();
for (int index = 0; index < alToken.size(); index++) {
Token token = alToken.get(index);
if (token.isSkuEnable()) {
//
// BugBug: Schema only support Data type now
//
int tableIdx;
tableIdx = skuIdTable.add(token);
decl.add(getSkuEnabledTypeDeclaration(token));
if (token.hasDefaultValue()) {
inst.add(getSkuEnabledTypeInstantiaion(token, tableIdx));
}
declForSkuEnableType.add(getDataTypeDeclarationForSkuEnabled(token));
if (token.hasDefaultValue()) {
instForSkuEnableType.add(getDataTypeInstantiationForSkuEnabled(token));
}
} else {
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
decl.add(getVariableEnableTypeDeclaration(token));
inst.add(getVariableEnableInstantiation(token));
} else if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
decl.add(getVpdEnableTypeDeclaration(token));
inst.add(getVpdEnableTypeInstantiation(token));
} else if (token.isUnicodeStringType()) {
decl.add(getStringTypeDeclaration(token));
inst.add(getStringTypeInstantiation(stringTable.add(token.getStringTypeString(), token), token));
}
else {
decl.add(getDataTypeDeclaration(token));
if (token.hasDefaultValue()) {
inst.add(getDataTypeInstantiation(token));
}
}
}
sizeTable.add(token);
localTokenNumberTable.add(token);
token.tokenNumber = assignedTokenNumber++;
}
map.put("Declaration", decl);
map.put("DeclarationForSku", declForSkuEnableType);
map.put("Instantiation", inst);
map.put("InstantiationForSku", instForSkuEnableType);
return map;
}
private String getSkuEnabledTypeDeclaration (Token token) {
return String.format("SKU_HEAD %s;\r\n", token.getPrimaryKeyString());
}
private String getSkuEnabledTypeInstantiaion (Token token, int SkuTableIdx) {
String offsetof = String.format(PcdDatabase.offsetOfSkuHeadStrTemplate, phase, token.hasDefaultValue()? "Init" : "Uninit", token.getPrimaryKeyString());
return String.format("{ %s, %d } /* SKU_ENABLED: %s */", offsetof, SkuTableIdx, token.getPrimaryKeyString());
}
private String getDataTypeDeclarationForSkuEnabled (Token token) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s_%s[%d];\r\n", token.getPrimaryKeyString(), "SkuDataTable", token.datumSize * token.skuData.size());
}
return String.format(typeStr, token.getPrimaryKeyString(), "SkuDataTable", token.skuData.size());
}
private String getDataTypeInstantiationForSkuEnabled (Token token) {
String str = "";
if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s_%s[%d]", token.getPrimaryKeyString(), "SkuDataTable", token.datumSize * token.skuData.size());
} else {
str = "{ ";
for (int idx = 0; idx < token.skuData.size(); idx++) {
str += token.skuData.get(idx).toString();
if (idx != token.skuData.size() - 1) {
str += ", ";
}
}
str += "}";
return str;
}
}
private String getDataTypeInstantiationForVariableDefault_new (Token token, String cName, int skuId) {
return String.format("%s /* %s */", token.skuData.get(skuId).value.hiiDefaultValue, cName);
}
private String getDataTypeInstantiation (Token token) {
if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("%s /* %s */", token.getDefaultSku().value, token.getPrimaryKeyString());
} else {
return String.format("%s /* %s */", token.getDefaultSku().value, token.getPrimaryKeyString());
}
}
private String getCType (Token t)
throws EntityException {
if (t.isHiiEnable()) {
return "VARIABLE_HEAD";
}
if (t.isVpdEnable()) {
return "VPD_HEAD";
}
if (t.isUnicodeStringType()) {
return "STRING_HEAD";
}
switch (t.datumType) {
case UINT64:
return "UINT64";
case UINT32:
return "UINT32";
case UINT16:
return "UINT16";
case UINT8:
return "UINT8";
case BOOLEAN:
return "BOOLEAN";
case POINTER:
return "UINT8";
default:
throw new EntityException("Unknown type in getDataTypeCDeclaration");
}
}
private void getCDeclarationString(Token t)
throws EntityException {
if (t.isSkuEnable()) {
privateGlobalName = String.format("%s_%s", t.getPrimaryKeyString(), skuDataTableTemplate);
} else {
privateGlobalName = t.getPrimaryKeyString();
}
if (t.isUnicodeStringType()) {
privateGlobalCCode = String.format("STRING_HEAD %s[%d];\r\n", t.getPrimaryKeyString(), t.getSkuIdCount());
} else {
String type = getCType(t);
if (t.datumType == Token.DATUM_TYPE.POINTER) {
privateGlobalCCode = String.format("%s %s[%d][%d];\r\n", type, privateGlobalName, t.getSkuIdCount(), t.datumSize);
} else {
privateGlobalCCode = String.format("%s %s[%d];\r\n", type, privateGlobalName, t.getSkuIdCount());
}
}
}
private String getDataTypeDeclarationForVariableDefault_new (Token token, String cName, int skuId) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s[%d]", cName, token.datumSize);
} else {
}
return String.format("%s %s;\r\n", typeStr, cName);
}
private String getDataTypeDeclaration (Token token) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s[%d]", token.getPrimaryKeyString(), token.datumSize);
} else {
}
return String.format("%s %s", typeStr, token.getPrimaryKeyString());
}
private String getVpdEnableTypeDeclaration (Token token) {
return String.format("VPD_HEAD %s", token.getPrimaryKeyString());
}
private String getTypeInstantiation (Token t, ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) throws EntityException {
int i;
String s;
s = String.format("/* %s */", t.getPrimaryKeyString()) + newLine;
s += tab + "{" + newLine;
for (i = 0; i < t.skuData.size(); i++) {
if (t.isUnicodeStringType() && !t.isHiiEnable()) {
s += tab + tab + String.format("{ %d }", stringTable.add(t.skuData.get(i).value.value, t));
} else if (t.isHiiEnable()) {
/* VPD_HEAD definition
typedef struct {
UINT16 GuidTableIndex; // Offset in Guid Table in units of GUID.
UINT16 StringIndex; // Offset in String Table in units of UINT16.
UINT16 Offset; // Offset in Variable
} VARIABLE_HEAD ;
*/
String variableDefaultName = String.format("%s_VariableDefault_%d", t.getPrimaryKeyString(), i);
s += tab + tab + String.format("{ %d, %d, %s, %s }", guidTable.add(t.skuData.get(i).value.variableGuid, t.getPrimaryKeyString()),
stringTable.add(t.skuData.get(i).value.getStringOfVariableName(), t),
t.skuData.get(i).value.variableOffset,
String.format("offsetof(%s_PCD_DATABASE, Init.%s)", phase, variableDefaultName)
);
//
// We need to support the default value, so we add the declaration and
// the instantiation for the default value.
//
CStructTypeDeclaration decl = new CStructTypeDeclaration (variableDefaultName,
getDataTypeAlignmentSize(t),
getDataTypeDeclarationForVariableDefault_new(t, variableDefaultName, i),
true
);
declaList.add(decl);
instTable.put(variableDefaultName, getDataTypeInstantiationForVariableDefault_new (t, variableDefaultName, i));
} else if (t.isVpdEnable()) {
/* typedef struct {
UINT32 Offset;
} VPD_HEAD;
*/
s += tab + tab + String.format("{ %s }", t.skuData.get(i).value.vpdOffset);
} else {
s += tab + tab + String.format("{ %s }", t.skuData.get(i).value.value);
}
if (i != t.skuData.size() - 1) {
s += commaNewLine;
} else {
s += newLine;
}
}
s += tab + "}";
return s;
}
private String getVpdEnableTypeInstantiation (Token token) {
return String.format("{ %s } /* %s */", token.getDefaultSku().vpdOffset,
token.getPrimaryKeyString());
}
private String getStringTypeDeclaration (Token token) {
return String.format("UINT16 %s", token.getPrimaryKeyString());
}
private String getStringTypeInstantiation (int StringTableIdx, Token token) {
return String.format ("%d /* %s */", StringTableIdx,
token.getPrimaryKeyString());
}
private String getVariableEnableTypeDeclaration (Token token) {
return String.format("VARIABLE_HEAD %s", token.getPrimaryKeyString());
}
private String getVariableEnableInstantiation (Token token)
throws EntityException {
//
// Need scott fix
//
return String.format("{ %d, %d, %s } /* %s */", guidTable.add(token.getDefaultSku().variableGuid, token.getPrimaryKeyString()),
stringTable.add(token.getDefaultSku().getStringOfVariableName(), token),
token.getDefaultSku().variableOffset,
token.getPrimaryKeyString());
}
public int getTotalTokenNumber () {
return sizeTable.getTableLen();
}
public static String getPcdDatabaseCommonDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabaseCommonDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Common Definitions");
}
return retStr;
}
public static String getPcdDxeDatabaseDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabaseDxeDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Dxe Definitions");
}
return retStr;
}
public static String getPcdPeiDatabaseDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabasePeiDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Pei Definitions");
}
return retStr;
}
}
class ModuleInfo {
public ModuleSADocument.ModuleSA module;
public ModuleTypeDef.Enum type;
public ModuleInfo (ModuleSADocument.ModuleSA module, ModuleTypeDef.Enum type) {
this.module = module;
this.type = type;
}
}
/** This action class is to collect PCD information from MSA, SPD, FPD xml file.
This class will be used for wizard and build tools, So it can *not* inherit
from buildAction or UIAction.
**/
public class CollectPCDAction {
/// memoryDatabase hold all PCD information collected from SPD, MSA, FPD.
private MemoryDatabaseManager dbManager;
/// Workspacepath hold the workspace information.
private String workspacePath;
/// FPD file is the root file.
private String fpdFilePath;
/// Message level for CollectPCDAction.
private int originalMessageLevel;
/// Cache the fpd docment instance for private usage.
private FrameworkPlatformDescriptionDocument fpdDocInstance;
/**
Set WorkspacePath parameter for this action class.
@param workspacePath parameter for this action
**/
public void setWorkspacePath(String workspacePath) {
this.workspacePath = workspacePath;
}
/**
Set action message level for CollectPcdAction tool.
The message should be restored when this action exit.
@param actionMessageLevel parameter for this action
**/
public void setActionMessageLevel(int actionMessageLevel) {
originalMessageLevel = ActionMessage.messageLevel;
ActionMessage.messageLevel = actionMessageLevel;
}
/**
Set FPDFileName parameter for this action class.
@param fpdFilePath fpd file path
**/
public void setFPDFilePath(String fpdFilePath) {
this.fpdFilePath = fpdFilePath;
}
/**
Common function interface for outer.
@param workspacePath The path of workspace of current build or analysis.
@param fpdFilePath The fpd file path of current build or analysis.
@param messageLevel The message level for this Action.
@throws Exception The exception of this function. Because it can *not* be predict
where the action class will be used. So only Exception can be throw.
**/
public void perform(String workspacePath, String fpdFilePath,
int messageLevel) throws Exception {
setWorkspacePath(workspacePath);
setFPDFilePath(fpdFilePath);
setActionMessageLevel(messageLevel);
checkParameter();
execute();
ActionMessage.messageLevel = originalMessageLevel;
}
/**
Core execution function for this action class.
This function work flows will be:
1) Collect and prepocess PCD information from FPD file, all PCD
information will be stored into memory database.
2) Generate 3 strings for
a) All modules using Dynamic(Ex) PCD entry.(Token Number)
b) PEI PCDDatabase (C Structure) for PCD Service PEIM.
c) DXE PCD Database (C structure) for PCD Service DXE.
@throws EntityException Exception indicate failed to execute this action.
**/
private void execute() throws EntityException {
//
// Get memoryDatabaseManager instance from GlobalData.
// The memoryDatabaseManager should be initialized for whatever build
// tools or wizard tools
//
if((dbManager = GlobalData.getPCDMemoryDBManager()) == null) {
throw new EntityException("The instance of PCD memory database manager is null");
}
//
// Collect all PCD information defined in FPD file.
// Evenry token defind in FPD will be created as an token into
// memory database.
//
createTokenInDBFromFPD();
//
// Call Private function genPcdDatabaseSourceCode (void); ComponentTypeBsDriver
// 1) Generate for PEI, DXE PCD DATABASE's definition and initialization.
//
genPcdDatabaseSourceCode ();
}
/**
This function generates source code for PCD Database.
@param void
@throws EntityException If the token does *not* exist in memory database.
**/
private void genPcdDatabaseSourceCode()
throws EntityException {
String PcdCommonHeaderString = PcdDatabase.getPcdDatabaseCommonDefinitions ();
ArrayList<Token> alPei = new ArrayList<Token> ();
ArrayList<Token> alDxe = new ArrayList<Token> ();
dbManager.getTwoPhaseDynamicRecordArray(alPei, alDxe);
PcdDatabase pcdPeiDatabase = new PcdDatabase (alPei, "PEI", 0);
pcdPeiDatabase.genCodeNew();
MemoryDatabaseManager.PcdPeimHString = PcdCommonHeaderString + pcdPeiDatabase.getHString()
+ PcdDatabase.getPcdPeiDatabaseDefinitions();
MemoryDatabaseManager.PcdPeimCString = pcdPeiDatabase.getCString();
PcdDatabase pcdDxeDatabase = new PcdDatabase (alDxe,
"DXE",
alPei.size()
);
pcdDxeDatabase.genCodeNew();
MemoryDatabaseManager.PcdDxeHString = MemoryDatabaseManager.PcdPeimHString + pcdDxeDatabase.getHString()
+ PcdDatabase.getPcdDxeDatabaseDefinitions();
MemoryDatabaseManager.PcdDxeCString = pcdDxeDatabase.getCString();
}
/**
Get component array from FPD.
This function maybe provided by some Global class.
@return List<ModuleInfo> the component array.
*/
private List<ModuleInfo> getComponentsFromFPD()
throws EntityException {
List<ModuleInfo> allModules = new ArrayList<ModuleInfo>();
ModuleInfo current = null;
int index = 0;
org.tianocore.Components components = null;
FrameworkModulesDocument.FrameworkModules fModules = null;
ModuleSADocument.ModuleSA[] modules = null;
HashMap<String, XmlObject> map = new HashMap<String, XmlObject>();
if (fpdDocInstance == null) {
try {
fpdDocInstance = (FrameworkPlatformDescriptionDocument)XmlObject.Factory.parse(new File(fpdFilePath));
} catch(IOException ioE) {
throw new EntityException("File IO error for xml file:" + fpdFilePath + "\n" + ioE.getMessage());
} catch(XmlException xmlE) {
throw new EntityException("Can't parse the FPD xml fle:" + fpdFilePath + "\n" + xmlE.getMessage());
}
}
map.put("FrameworkPlatformDescription", fpdDocInstance);
SurfaceAreaQuery.setDoc(map);
modules = SurfaceAreaQuery.getFpdModuleSAs();
for (index = 0; index < modules.length; index ++) {
SurfaceAreaQuery.setDoc(GlobalData.getDoc(modules[index].getModuleName()));
allModules.add(new ModuleInfo(modules[index],
ModuleTypeDef.Enum.forString(SurfaceAreaQuery.getModuleType())));
}
return allModules;
}
/**
Create token instance object into memory database, the token information
comes for FPD file. Normally, FPD file will contain all token platform
informations.
@return FrameworkPlatformDescriptionDocument The FPD document instance for furture usage.
@throws EntityException Failed to parse FPD xml file.
**/
private void createTokenInDBFromFPD()
throws EntityException {
int index = 0;
int index2 = 0;
int pcdIndex = 0;
List<PcdBuildDefinition.PcdData> pcdBuildDataArray = new ArrayList<PcdBuildDefinition.PcdData>();
PcdBuildDefinition.PcdData pcdBuildData = null;
Token token = null;
SkuInstance skuInstance = null;
int skuIndex = 0;
List<ModuleInfo> modules = null;
String primaryKey = null;
String exceptionString = null;
UsageInstance usageInstance = null;
String primaryKey1 = null;
String primaryKey2 = null;
boolean isDuplicate = false;
Token.PCD_TYPE pcdType = Token.PCD_TYPE.UNKNOWN;
Token.DATUM_TYPE datumType = Token.DATUM_TYPE.UNKNOWN;
int tokenNumber = 0;
String moduleName = null;
String datum = null;
int maxDatumSize = 0;
//
// ----------------------------------------------
// 1), Get all <ModuleSA> from FPD file.
// ----------------------------------------------
//
modules = getComponentsFromFPD();
if (modules == null) {
throw new EntityException("[FPD file error] No modules in FPD file, Please check whether there are elements in <FrameworkModules> in FPD file!");
}
//
// -------------------------------------------------------------------
// 2), Loop all modules to process <PcdBuildDeclarations> for each module.
// -------------------------------------------------------------------
//
for (index = 0; index < modules.size(); index ++) {
isDuplicate = false;
for (index2 = 0; index2 < index; index2 ++) {
//
// BUGBUG: For transition schema, we can *not* get module's version from
// <ModuleSAs>, It is work around code.
//
primaryKey1 = UsageInstance.getPrimaryKey(modules.get(index).module.getModuleName(),
null,
null,
null,
modules.get(index).module.getArch().toString(),
null);
primaryKey2 = UsageInstance.getPrimaryKey(modules.get(index2).module.getModuleName(),
null,
null,
null,
modules.get(index2).module.getArch().toString(),
null);
if (primaryKey1.equalsIgnoreCase(primaryKey2)) {
isDuplicate = true;
break;
}
}
if (isDuplicate) {
continue;
}
//
// It is legal for a module does not contains ANY pcd build definitions.
//
if (modules.get(index).module.getPcdBuildDefinition() == null) {
continue;
}
pcdBuildDataArray = modules.get(index).module.getPcdBuildDefinition().getPcdDataList();
moduleName = modules.get(index).module.getModuleName();
//
// ----------------------------------------------------------------------
// 2.1), Loop all Pcd entry for a module and add it into memory database.
// ----------------------------------------------------------------------
//
for (pcdIndex = 0; pcdIndex < pcdBuildDataArray.size(); pcdIndex ++) {
pcdBuildData = pcdBuildDataArray.get(pcdIndex);
primaryKey = Token.getPrimaryKeyString(pcdBuildData.getCName(),
translateSchemaStringToUUID(pcdBuildData.getTokenSpaceGuid()));
pcdType = Token.getpcdTypeFromString(pcdBuildData.getItemType().toString());
datumType = Token.getdatumTypeFromString(pcdBuildData.getDatumType().toString());
tokenNumber = Integer.decode(pcdBuildData.getToken().toString());
if (pcdBuildData.getValue() != null) {
datum = pcdBuildData.getValue().toString();
} else {
datum = null;
}
maxDatumSize = pcdBuildData.getMaxDatumSize();
if ((pcdType == Token.PCD_TYPE.FEATURE_FLAG) &&
(datumType != Token.DATUM_TYPE.BOOLEAN)){
exceptionString = String.format("[FPD file error] For PCD %s in module %s, the PCD type is FEATRUE_FLAG but "+
"datum type of this PCD entry is not BOOLEAN!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// Check <TokenSpaceGuid> is exist? In future, because all schema verification will tools
// will check that, following checking code could be removed.
//
if (pcdBuildData.getTokenSpaceGuid() == null) {
exceptionString = String.format("[FPD file error] There is no <TokenSpaceGuid> for PCD %s in module %s! This is required!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// -------------------------------------------------------------------------------------------
// 2.1.1), Do some necessary checking work for FixedAtBuild, FeatureFlag and PatchableInModule
// -------------------------------------------------------------------------------------------
//
if (!Token.isDynamic(pcdType)) {
//
// Value is required.
//
if (datum == null) {
exceptionString = String.format("[FPD file error] There is no value for PCD entry %s in module %s!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// Check whether the datum size is matched datum type.
//
if ((exceptionString = verifyDatum(pcdBuildData.getCName(),
moduleName,
datum,
datumType,
maxDatumSize)) != null) {
throw new EntityException(exceptionString);
}
}
//
// ---------------------------------------------------------------------------------
// 2.1.2), Create token or update token information for current anaylized PCD data.
// ---------------------------------------------------------------------------------
//
if (dbManager.isTokenInDatabase(primaryKey)) {
//
// If the token is already exist in database, do some necessary checking
// and add a usage instance into this token in database
//
token = dbManager.getTokenByKey(primaryKey);
//
// checking for DatumType, DatumType should be unique for one PCD used in different
// modules.
//
if (token.datumType != datumType) {
exceptionString = String.format("[FPD file error] The datum type of PCD entry %s is %s, which is different with %s defined in before!",
pcdBuildData.getCName(),
pcdBuildData.getDatumType().toString(),
Token.getStringOfdatumType(token.datumType));
throw new EntityException(exceptionString);
}
//
// Check token number is valid
//
if (tokenNumber != token.tokenNumber) {
exceptionString = String.format("[FPD file error] The token number of PCD entry %s in module %s is different with same PCD entry in other modules!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// For same PCD used in different modules, the PCD type should all be dynamic or non-dynamic.
//
if (token.isDynamicPCD != Token.isDynamic(pcdType)) {
exceptionString = String.format("[FPD file error] For PCD entry %s in module %s, you define dynamic or non-dynamic PCD type which"+
"is different with others module's",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
if (token.isDynamicPCD) {
//
// Check datum is equal the datum in dynamic information.
// For dynamic PCD, you can do not write <Value> in sperated every <PcdBuildDefinition> in different <ModuleSA>,
// But if you write, the <Value> must be same as the value in <DynamicPcdBuildDefinitions>.
//
if (!token.isSkuEnable() &&
(token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.DEFAULT_TYPE) &&
(datum != null)) {
if (!datum.equalsIgnoreCase(token.getDefaultSku().value)) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in module %s, the datum in <ModuleSA> is "+
"not equal to the datum in <DynamicPcdBuildDefinitions>, it is "+
"illega! You could no set <Value> in <ModuleSA> for a dynamic PCD!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
}
if ((maxDatumSize != 0) &&
(maxDatumSize != token.datumSize)){
exceptionString = String.format("[FPD file error] For dynamic PCD %s in module %s, the max datum size is %d which "+
"is different with <MaxDatumSize> %d defined in <DynamicPcdBuildDefinitions>!",
token.cName,
moduleName,
maxDatumSize,
token.datumSize);
throw new EntityException(exceptionString);
}
}
} else {
//
// If the token is not in database, create a new token instance and add
// a usage instance into this token in database.
//
token = new Token(pcdBuildData.getCName(),
translateSchemaStringToUUID(pcdBuildData.getTokenSpaceGuid()));
token.datumType = datumType;
token.tokenNumber = tokenNumber;
token.isDynamicPCD = Token.isDynamic(pcdType);
token.datumSize = maxDatumSize;
if (token.isDynamicPCD) {
//
// For Dynamic and Dynamic Ex type, need find the dynamic information
// in <DynamicPcdBuildDefinition> section in FPD file.
//
updateDynamicInformation(moduleName,
token,
datum,
maxDatumSize);
}
dbManager.addTokenToDatabase(primaryKey, token);
}
//
// -----------------------------------------------------------------------------------
// 2.1.3), Add the PcdType in current module into this Pcd token's supported PCD type.
// -----------------------------------------------------------------------------------
//
token.updateSupportPcdType(pcdType);
//
// ------------------------------------------------
// 2.1.4), Create an usage instance for this token.
// ------------------------------------------------
//
usageInstance = new UsageInstance(token,
moduleName,
null,
null,
null,
modules.get(index).type,
pcdType,
modules.get(index).module.getArch().toString(),
null,
datum,
maxDatumSize);
token.addUsageInstance(usageInstance);
}
}
}
/**
Verify the datum value according its datum size and datum type, this
function maybe moved to FPD verification tools in future.
@param cName
@param moduleName
@param datum
@param datumType
@param maxDatumSize
@return String
*/
/***/
public String verifyDatum(String cName,
String moduleName,
String datum,
Token.DATUM_TYPE datumType,
int maxDatumSize) {
String exceptionString = null;
int value;
BigInteger value64;
String subStr;
int index;
if (moduleName == null) {
moduleName = "section <DynamicPcdBuildDefinitions>";
} else {
moduleName = "module " + moduleName;
}
if (maxDatumSize == 0) {
exceptionString = String.format("[FPD file error] You maybe miss <MaxDatumSize> for PCD %s in %s",
cName,
moduleName);
return exceptionString;
}
switch (datumType) {
case UINT8:
if (maxDatumSize != 1) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT8, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
value = Integer.decode(datum);
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not valid "+
"digital format of UINT8",
cName,
moduleName);
return exceptionString;
}
if (value > 0xFF) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s exceed"+
" the max size of UINT8 - 0xFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT16:
if (maxDatumSize != 2) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT16, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
value = Integer.decode(datum);
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is "+
"not valid digital of UINT16",
cName,
moduleName);
return exceptionString;
}
if (value > 0xFFFF) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s "+
"which exceed the range of UINT16 - 0xFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT32:
if (maxDatumSize != 4) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT32, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
if (datum.length() > 2) {
if ((datum.charAt(0) == '0') &&
((datum.charAt(1) == 'x') || (datum.charAt(1) == 'X'))){
subStr = datum.substring(2, datum.length());
value64 = new BigInteger(subStr, 16);
} else {
value64 = new BigInteger(datum);
}
} else {
value64 = new BigInteger(datum);
}
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not "+
"valid digital of UINT32",
cName,
moduleName);
return exceptionString;
}
if (value64.bitLength() > 32) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s which "+
"exceed the range of UINT32 - 0xFFFFFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT64:
if (maxDatumSize != 8) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT64, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
if (datum.length() > 2) {
if ((datum.charAt(0) == '0') &&
((datum.charAt(1) == 'x') || (datum.charAt(1) == 'X'))){
subStr = datum.substring(2, datum.length());
value64 = new BigInteger(subStr, 16);
} else {
value64 = new BigInteger(datum);
}
} else {
value64 = new BigInteger(datum);
}
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not valid"+
" digital of UINT64",
cName,
moduleName);
return exceptionString;
}
if (value64.bitLength() > 64) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s "+
"exceed the range of UINT64 - 0xFFFFFFFFFFFFFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case BOOLEAN:
if (maxDatumSize != 1) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is BOOLEAN, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
if (!(datum.equalsIgnoreCase("TRUE") ||
datum.equalsIgnoreCase("FALSE"))) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is BOOELAN, but value is not 'true'/'TRUE' or 'FALSE'/'false'",
cName,
moduleName);
return exceptionString;
}
}
break;
case POINTER:
if (datum == null) {
break;
}
char ch = datum.charAt(0);
int start, end;
String strValue;
//
// For void* type PCD, only three datum is support:
// 1) Unicode: string with start char is "L"
// 2) Ansci: String start char is ""
// 3) byte array: String start char "{"
//
if (ch == 'L') {
start = datum.indexOf('\"');
end = datum.lastIndexOf('\"');
if ((start > end) ||
(end > datum.length())||
((start == end) && (datum.length() > 0))) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID* and datum is "+
"a UNICODE string because start with L\", but format maybe"+
"is not right, correct UNICODE string is L\"...\"!",
cName,
moduleName);
return exceptionString;
}
strValue = datum.substring(start + 1, end);
if ((strValue.length() * 2) > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is "+
"a UNICODE string, but the datum size is %d exceed to <MaxDatumSize> : %d",
cName,
moduleName,
strValue.length() * 2,
maxDatumSize);
return exceptionString;
}
} else if (ch == '\"'){
start = datum.indexOf('\"');
end = datum.lastIndexOf('\"');
if ((start > end) ||
(end > datum.length())||
((start == end) && (datum.length() > 0))) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID* and datum is "+
"a ANSCII string because start with \", but format maybe"+
"is not right, correct ANSIC string is \"...\"!",
cName,
moduleName);
return exceptionString;
}
strValue = datum.substring(start + 1, end);
if ((strValue.length()) > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is "+
"a ANSCI string, but the datum size is %d which exceed to <MaxDatumSize> : %d",
cName,
moduleName,
strValue.length(),
maxDatumSize);
return exceptionString;
}
} else if (ch =='{') {
String[] strValueArray;
start = datum.indexOf('{');
end = datum.lastIndexOf('}');
strValue = datum.substring(start + 1, end);
strValue = strValue.trim();
if (strValue.length() == 0) {
break;
}
strValueArray = strValue.split(",");
for (index = 0; index < strValueArray.length; index ++) {
try{
value = Integer.decode(strValueArray[index].trim());
} catch (NumberFormatException nfeEx) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and "+
"it is byte array in fact. For every byte in array should be a valid"+
"byte digital, but element %s is not a valid byte digital!",
cName,
moduleName,
strValueArray[index]);
return exceptionString;
}
if (value > 0xFF) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, "+
"it is byte array in fact. But the element of %s exceed the byte range",
cName,
moduleName,
strValueArray[index]);
return exceptionString;
}
}
if (strValueArray.length > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is byte"+
"array, but the number of bytes is %d which exceed to <MaxDatumSzie> : %d!",
cName,
moduleName,
strValueArray.length,
maxDatumSize);
return exceptionString;
}
} else {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*. For VOID* type, you have three format choise:\n "+
"1) UNICODE string: like L\"xxxx\";\r\n"+
"2) ANSIC string: like \"xxx\";\r\n"+
"3) Byte array: like {0x2, 0x45, 0x23}\r\n"+
"But the datum in seems does not following above format!",
cName,
moduleName);
return exceptionString;
}
break;
default:
exceptionString = String.format("[FPD file error] For PCD entry %s in %s, datum type is unknown, it should be one of "+
"UINT8, UINT16, UINT32, UINT64, VOID*, BOOLEAN",
cName,
moduleName);
return exceptionString;
}
return null;
}
/**
Get dynamic information for a dynamic PCD from <DynamicPcdBuildDefinition> seciton in FPD file.
This function should be implemented in GlobalData in future.
@param token The token instance which has hold module's PCD information
@param moduleName The name of module who will use this Dynamic PCD.
@return DynamicPcdBuildDefinitions.PcdBuildData
*/
/***/
private DynamicPcdBuildDefinitions.PcdBuildData getDynamicInfoFromFPD(Token token,
String moduleName)
throws EntityException {
int index = 0;
String exceptionString = null;
String dynamicPrimaryKey = null;
DynamicPcdBuildDefinitions dynamicPcdBuildDefinitions = null;
List<DynamicPcdBuildDefinitions.PcdBuildData> dynamicPcdBuildDataArray = null;
//
// If FPD document is not be opened, open and initialize it.
//
if (fpdDocInstance == null) {
try {
fpdDocInstance = (FrameworkPlatformDescriptionDocument)XmlObject.Factory.parse(new File(fpdFilePath));
} catch(IOException ioE) {
throw new EntityException("File IO error for xml file:" + fpdFilePath + "\n" + ioE.getMessage());
} catch(XmlException xmlE) {
throw new EntityException("Can't parse the FPD xml fle:" + fpdFilePath + "\n" + xmlE.getMessage());
}
}
dynamicPcdBuildDefinitions = fpdDocInstance.getFrameworkPlatformDescription().getDynamicPcdBuildDefinitions();
if (dynamicPcdBuildDefinitions == null) {
exceptionString = String.format("[FPD file error] There are no <PcdDynamicBuildDescriptions> in FPD file but contains Dynamic type "+
"PCD entry %s in module %s!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
dynamicPcdBuildDataArray = dynamicPcdBuildDefinitions.getPcdBuildDataList();
for (index = 0; index < dynamicPcdBuildDataArray.size(); index ++) {
//
// Check <TokenSpaceGuid> is exist? In future, because all schema verification will tools
// will check that, following checking code could be removed.
//
if (dynamicPcdBuildDataArray.get(index).getTokenSpaceGuid() == null) {
exceptionString = String.format("[FPD file error] There is no <TokenSpaceGuid> for PCD %s in <DynamicPcdBuildDefinitions>! This is required!",
dynamicPcdBuildDataArray.get(index).getCName());
throw new EntityException(exceptionString);
}
dynamicPrimaryKey = Token.getPrimaryKeyString(dynamicPcdBuildDataArray.get(index).getCName(),
translateSchemaStringToUUID(dynamicPcdBuildDataArray.get(index).getTokenSpaceGuid()));
if (dynamicPrimaryKey.equalsIgnoreCase(token.getPrimaryKeyString())) {
return dynamicPcdBuildDataArray.get(index);
}
}
return null;
}
/**
Update dynamic information for PCD entry.
Dynamic information is retrieved from <PcdDynamicBuildDeclarations> in
FPD file.
@param moduleName The name of the module who use this PCD
@param token The token instance
@param datum The <datum> in module's PCD information
@param maxDatumSize The <maxDatumSize> in module's PCD information
@return Token
*/
private Token updateDynamicInformation(String moduleName,
Token token,
String datum,
int maxDatumSize)
throws EntityException {
int index = 0;
int offset;
String exceptionString = null;
DynamicTokenValue dynamicValue;
SkuInstance skuInstance = null;
String temp;
boolean hasSkuId0 = false;
Token.PCD_TYPE pcdType = Token.PCD_TYPE.UNKNOWN;
int tokenNumber = 0;
String hiiDefaultValue = null;
String[] variableGuidString = null;
List<DynamicPcdBuildDefinitions.PcdBuildData.SkuInfo> skuInfoList = null;
DynamicPcdBuildDefinitions.PcdBuildData dynamicInfo = null;
dynamicInfo = getDynamicInfoFromFPD(token, moduleName);
if (dynamicInfo == null) {
exceptionString = String.format("[FPD file error] For Dynamic PCD %s used by module %s, "+
"there is no dynamic information in <DynamicPcdBuildDefinitions> "+
"in FPD file, but it is required!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
token.datumSize = dynamicInfo.getMaxDatumSize();
exceptionString = verifyDatum(token.cName,
moduleName,
null,
token.datumType,
token.datumSize);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
if ((maxDatumSize != 0) &&
(maxDatumSize != token.datumSize)) {
exceptionString = String.format("FPD file error] For dynamic PCD %s, the datum size in module %s is %d, but "+
"the datum size in <DynamicPcdBuildDefinitions> is %d, they are not match!",
token.cName,
moduleName,
maxDatumSize,
dynamicInfo.getMaxDatumSize());
throw new EntityException(exceptionString);
}
tokenNumber = Integer.decode(dynamicInfo.getToken().toString());
if (tokenNumber != token.tokenNumber) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s, the token number in module %s is 0x%x, but"+
"in <DynamicPcdBuildDefinictions>, the token number is 0x%x, they are not match!",
token.cName,
moduleName,
token.tokenNumber,
tokenNumber);
throw new EntityException(exceptionString);
}
pcdType = Token.getpcdTypeFromString(dynamicInfo.getItemType().toString());
if (pcdType == Token.PCD_TYPE.DYNAMIC_EX) {
token.dynamicExTokenNumber = tokenNumber;
}
skuInfoList = dynamicInfo.getSkuInfoList();
//
// Loop all sku data
//
for (index = 0; index < skuInfoList.size(); index ++) {
skuInstance = new SkuInstance();
//
// Although SkuId in schema is BigInteger, but in fact, sku id is 32 bit value.
//
temp = skuInfoList.get(index).getSkuId().toString();
skuInstance.id = Integer.decode(temp);
if (skuInstance.id == 0) {
hasSkuId0 = true;
}
//
// Judge whether is DefaultGroup at first, because most case is DefautlGroup.
//
if (skuInfoList.get(index).getValue() != null) {
skuInstance.value.setValue(skuInfoList.get(index).getValue().toString());
if ((exceptionString = verifyDatum(token.cName,
null,
skuInfoList.get(index).getValue().toString(),
token.datumType,
token.datumSize)) != null) {
throw new EntityException(exceptionString);
}
token.skuData.add(skuInstance);
//
// Judege wether is same of datum between module's information
// and dynamic information.
//
if (datum != null) {
if ((skuInstance.id == 0) &&
!datum.toString().equalsIgnoreCase(skuInfoList.get(index).getValue().toString())) {
exceptionString = "[FPD file error] For dynamic PCD " + token.cName + ", the value in module " + moduleName + " is " + datum.toString() + " but the "+
"value of sku 0 data in <DynamicPcdBuildDefinition> is " + skuInstance.value.value + ". They are must be same!"+
" or you could not define value for a dynamic PCD in every <ModuleSA>!";
throw new EntityException(exceptionString);
}
}
continue;
}
//
// Judge whether is HII group case.
//
if (skuInfoList.get(index).getVariableName() != null) {
exceptionString = null;
if (skuInfoList.get(index).getVariableGuid() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <VariableGuid> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getVariableOffset() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <VariableOffset> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getHiiDefaultValue() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <HiiDefaultValue> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getHiiDefaultValue() != null) {
hiiDefaultValue = skuInfoList.get(index).getHiiDefaultValue().toString();
} else {
hiiDefaultValue = null;
}
if ((exceptionString = verifyDatum(token.cName,
null,
hiiDefaultValue,
token.datumType,
token.datumSize)) != null) {
throw new EntityException(exceptionString);
}
offset = Integer.decode(skuInfoList.get(index).getVariableOffset());
if (offset > 0xFFFF) {
throw new EntityException(String.format("[FPD file error] For dynamic PCD %s , the variable offset defined in sku %d data "+
"exceed 64K, it is not allowed!",
token.cName,
index));
}
//
// Get variable guid string according to the name of guid which will be mapped into a GUID in SPD file.
//
variableGuidString = GlobalData.getGuidInfoGuid(skuInfoList.get(index).getVariableGuid().toString());
if (variableGuidString == null) {
throw new EntityException(String.format("[GUID Error] For dynamic PCD %s, the variable guid %s can be found in all SPD file!",
token.cName,
skuInfoList.get(index).getVariableGuid().toString()));
}
skuInstance.value.setHiiData(skuInfoList.get(index).getVariableName(),
translateSchemaStringToUUID(variableGuidString[1]),
skuInfoList.get(index).getVariableOffset(),
skuInfoList.get(index).getHiiDefaultValue().toString());
token.skuData.add(skuInstance);
continue;
}
if (skuInfoList.get(index).getVpdOffset() != null) {
skuInstance.value.setVpdData(skuInfoList.get(index).getVpdOffset());
token.skuData.add(skuInstance);
continue;
}
exceptionString = String.format("[FPD file error] For dynamic PCD %s, the dynamic info must "+
"be one of 'DefaultGroup', 'HIIGroup', 'VpdGroup'.",
token.cName);
throw new EntityException(exceptionString);
}
if (!hasSkuId0) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions>, there are "+
"no sku id = 0 data, which is required for every dynamic PCD",
token.cName);
throw new EntityException(exceptionString);
}
return token;
}
/**
Translate the schema string to UUID instance.
In schema, the string of UUID is defined as following two types string:
1) GuidArrayType: pattern = 0x[a-fA-F0-9]{1,8},( )*0x[a-fA-F0-9]{1,4},(
)*0x[a-fA-F0-9]{1,4}(,( )*\{)?(,?( )*0x[a-fA-F0-9]{1,2}){8}( )*(\})?
2) GuidNamingConvention: pattern =
[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}
This function will convert string and create uuid instance.
@param uuidString UUID string in XML file
@return UUID UUID instance
**/
private UUID translateSchemaStringToUUID(String uuidString)
throws EntityException {
String temp;
String[] splitStringArray;
int index;
int chIndex;
int chLen;
if (uuidString == null) {
return null;
}
if (uuidString.length() == 0) {
return null;
}
if (uuidString.equals("0") ||
uuidString.equalsIgnoreCase("0x0")) {
return new UUID(0, 0);
}
uuidString = uuidString.replaceAll("\\{", "");
uuidString = uuidString.replaceAll("\\}", "");
//
// If the UUID schema string is GuidArrayType type then need translate
// to GuidNamingConvention type at first.
//
if ((uuidString.charAt(0) == '0') && ((uuidString.charAt(1) == 'x') || (uuidString.charAt(1) == 'X'))) {
splitStringArray = uuidString.split("," );
if (splitStringArray.length != 11) {
throw new EntityException ("[FPD file error] Wrong format for UUID string: " + uuidString);
}
//
// Remove blank space from these string and remove header string "0x"
//
for (index = 0; index < 11; index ++) {
splitStringArray[index] = splitStringArray[index].trim();
splitStringArray[index] = splitStringArray[index].substring(2, splitStringArray[index].length());
}
//
// Add heading '0' to normalize the string length
//
for (index = 3; index < 11; index ++) {
chLen = splitStringArray[index].length();
for (chIndex = 0; chIndex < 2 - chLen; chIndex ++) {
splitStringArray[index] = "0" + splitStringArray[index];
}
}
//
// construct the final GuidNamingConvention string
//
temp = String.format("%s-%s-%s-%s%s-%s%s%s%s%s%s",
splitStringArray[0],
splitStringArray[1],
splitStringArray[2],
splitStringArray[3],
splitStringArray[4],
splitStringArray[5],
splitStringArray[6],
splitStringArray[7],
splitStringArray[8],
splitStringArray[9],
splitStringArray[10]);
uuidString = temp;
}
return UUID.fromString(uuidString);
}
/**
check parameter for this action.
@throws EntityException Bad parameter.
**/
private void checkParameter() throws EntityException {
File file = null;
if((fpdFilePath == null) ||(workspacePath == null)) {
throw new EntityException("WorkspacePath and FPDFileName should be blank for CollectPCDAtion!");
}
if(fpdFilePath.length() == 0 || workspacePath.length() == 0) {
throw new EntityException("WorkspacePath and FPDFileName should be blank for CollectPCDAtion!");
}
file = new File(workspacePath);
if(!file.exists()) {
throw new EntityException("WorkpacePath " + workspacePath + " does not exist!");
}
file = new File(fpdFilePath);
if(!file.exists()) {
throw new EntityException("FPD File " + fpdFilePath + " does not exist!");
}
}
/**
Test case function
@param argv parameter from command line
**/
public static void main(String argv[]) throws EntityException {
CollectPCDAction ca = new CollectPCDAction();
ca.setWorkspacePath("m:/tianocore/edk2");
ca.setFPDFilePath("m:/tianocore/edk2/EdkNt32Pkg/Nt32.fpd");
ca.setActionMessageLevel(ActionMessage.MAX_MESSAGE_LEVEL);
GlobalData.initInfo("Tools" + File.separator + "Conf" + File.separator + "FrameworkDatabase.db",
"m:/tianocore/edk2");
ca.execute();
}
}
|
Tools/Source/GenBuild/org/tianocore/build/pcd/action/CollectPCDAction.java
|
/** @file
CollectPCDAction class.
This action class is to collect PCD information from MSA, SPD, FPD xml file.
This class will be used for wizard and build tools, So it can *not* inherit
from buildAction or wizardAction.
Copyright (c) 2006, Intel Corporation
All rights reserved. This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
package org.tianocore.build.pcd.action;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.tianocore.DynamicPcdBuildDefinitionsDocument.DynamicPcdBuildDefinitions;
import org.tianocore.FrameworkModulesDocument;
import org.tianocore.FrameworkPlatformDescriptionDocument;
import org.tianocore.ModuleSADocument;
import org.tianocore.PcdBuildDefinitionDocument.PcdBuildDefinition;
import org.tianocore.build.global.GlobalData;
import org.tianocore.build.global.SurfaceAreaQuery;
import org.tianocore.build.pcd.action.ActionMessage;
import org.tianocore.build.pcd.entity.DynamicTokenValue;
import org.tianocore.build.pcd.entity.MemoryDatabaseManager;
import org.tianocore.build.pcd.entity.SkuInstance;
import org.tianocore.build.pcd.entity.Token;
import org.tianocore.build.pcd.entity.UsageInstance;
import org.tianocore.build.pcd.exception.EntityException;
import org.tianocore.ModuleTypeDef;
class CStructTypeDeclaration {
String key;
int alignmentSize;
String cCode;
boolean initTable;
public CStructTypeDeclaration (String key, int alignmentSize, String cCode, boolean initTable) {
this.key = key;
this.alignmentSize = alignmentSize;
this.cCode = cCode;
this.initTable = initTable;
}
}
class StringTable {
private ArrayList<String> al;
private ArrayList<String> alComments;
private String phase;
int len;
public StringTable (String phase) {
this.phase = phase;
al = new ArrayList<String>();
alComments = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.StringTableSizeMacro, phase, getSize());
}
private int getSize () {
//
// We have at least one Unicode Character in the table.
//
return len == 0 ? 1 : len;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.StringTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable) {
final String stringTable = "StringTable";
final String tab = "\t";
final String newLine = "\r\n";
final String commaNewLine = ",\r\n";
CStructTypeDeclaration decl;
String cDeclCode = "";
String cInstCode = "";
//
// If we have a empty StringTable
//
if (al.size() == 0) {
cDeclCode += tab + String.format("UINT16 %s[1]; /* StringTable is Empty */", stringTable) + newLine;
decl = new CStructTypeDeclaration (
stringTable,
2,
cDeclCode,
true
);
declaList.add(decl);
cInstCode = tab + " { 0 } " + String.format("/* %s */", stringTable) + commaNewLine;
instTable.put(stringTable, cInstCode);
} else {
//
// If there is any String in the StringTable
//
for (int i = 0; i < al.size(); i++) {
String str = al.get(i);
String stringTableName;
if (i == 0) {
//
// StringTable is a well-known name in the PCD DXE driver
//
stringTableName = stringTable;
} else {
stringTableName = String.format("%s_%d", stringTable, i);
cDeclCode += tab;
}
cDeclCode += String.format("UINT16 %s[%d]; /* %s */", stringTableName, str.length() + 1, alComments.get(i)) + newLine;
if (i == 0) {
cInstCode = "/* StringTable */" + newLine;
}
cInstCode += tab + String.format("L\"%s\" /* %s */", al.get(i), alComments.get(i));
if (i != al.size() - 1) {
cInstCode += commaNewLine;
}
}
decl = new CStructTypeDeclaration (
stringTable,
2,
cDeclCode,
true
);
declaList.add(decl);
instTable.put(stringTable, cInstCode);
}
}
public String getTypeDeclaration () {
String output;
final String stringTable = "StringTable";
final String tab = "\t";
final String newLine = ";\r\n";
output = "/* StringTable */\r\n";
if (al.size() == 0) {
output += tab + String.format("UINT16 %s[1] /* StringTable is Empty */", stringTable) + newLine;
}
for (int i = 0; i < al.size(); i++) {
String str = al.get(i);
if (i == 0) {
//
// StringTable is a well-known name in the PCD DXE driver
//
output += tab + String.format("UINT16 %s[%d] /* %s */", stringTable, str.length() + 1, alComments.get(i)) + newLine;
} else {
output += tab + String.format("UINT16 %s_%d[%d] /* %s */", stringTable, i, str.length() + 1, alComments.get(i)) + newLine;
}
}
return output;
}
public ArrayList<String> getInstantiation () {
ArrayList<String> output = new ArrayList<String>();
output.add("/* StringTable */");
if (al.size() == 0) {
output.add("{ 0 }");
} else {
String str;
for (int i = 0; i < al.size(); i++) {
str = String.format("L\"%s\" /* %s */", al.get(i), alComments.get(i));
if (i != al.size() - 1) {
str += ",";
}
output.add(str);
}
}
return output;
}
public int add (String inputStr, Token token) {
int i;
int pos;
String str = inputStr;
//
// The input can be two types:
// "L\"Bootmode\"" or "Bootmode".
// We drop the L\" and \" for the first type.
if (str.startsWith("L\"") && str.endsWith("\"")) {
str = str.substring(2, str.length() - 1);
}
//
// Check if StringTable has this String already.
// If so, return the current pos.
//
for (i = 0, pos = 0; i < al.size(); i++) {
String s = al.get(i);;
if (str.equals(s)) {
return pos;
}
pos = s.length() + 1;
}
i = len;
//
// Include the NULL character at the end of String
//
len += str.length() + 1;
al.add(str);
alComments.add(token.getPrimaryKeyString());
return i;
}
}
class SizeTable {
private ArrayList<Integer> al;
private ArrayList<String> alComments;
private String phase;
private int len;
public SizeTable (String phase) {
this.phase = phase;
al = new ArrayList<Integer>();
alComments = new ArrayList<String>();
len = 0;
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "SizeTable";
CStructTypeDeclaration decl;
String cCode;
cCode = String.format(PcdDatabase.SizeTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
2,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.SizeTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* SizeTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("0");
} else {
for (int index = 0; index < al.size(); index++) {
Integer n = al.get(index);
String str = "\t" + n.toString();
if (index != (al.size() - 1)) {
str += ",";
}
str += " /* " + alComments.get(index) + " */";
Output.add(str);
}
}
Output.add("}");
return Output;
}
public int add (Token token) {
int index = len;
len++;
al.add(token.datumSize);
alComments.add(token.getPrimaryKeyString());
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
}
class GuidTable {
private ArrayList<UUID> al;
private ArrayList<String> alComments;
private String phase;
private int len;
private int bodyLineNum;
public GuidTable (String phase) {
this.phase = phase;
al = new ArrayList<UUID>();
alComments = new ArrayList<String>();
len = 0;
bodyLineNum = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.GuidTableSizeMacro, phase, getSize());
}
private int getSize () {
return (al.size() == 0)? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.GuidTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "GuidTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.GuidTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
8,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.GuidTableDeclaration, phase);
}
private String getUuidCString (UUID uuid) {
String[] guidStrArray;
guidStrArray =(uuid.toString()).split("-");
return String.format("{ 0x%s, 0x%s, 0x%s, { 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s } }",
guidStrArray[0],
guidStrArray[1],
guidStrArray[2],
(guidStrArray[3].substring(0, 2)),
(guidStrArray[3].substring(2, 4)),
(guidStrArray[4].substring(0, 2)),
(guidStrArray[4].substring(2, 4)),
(guidStrArray[4].substring(4, 6)),
(guidStrArray[4].substring(6, 8)),
(guidStrArray[4].substring(8, 10)),
(guidStrArray[4].substring(10, 12))
);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* GuidTable */");
Output.add("{");
if (al.size() == 0) {
Output.add(getUuidCString(new UUID(0, 0)));
}
for (int i = 0; i < al.size(); i++) {
String str = "\t" + getUuidCString(al.get(i));
str += "/* " + alComments.get(i) + " */";
if (i != (al.size() - 1)) {
str += ",";
}
Output.add(str);
bodyLineNum++;
}
Output.add("}");
return Output;
}
public int add (UUID uuid, String name) {
//
// Check if GuidTable has this entry already.
// If so, return the GuidTable index.
//
for (int i = 0; i < al.size(); i++) {
if (al.get(i).equals(uuid)) {
return i;
}
}
len++;
al.add(uuid);
alComments.add(name);
//
// Return the previous Table Index
//
return len - 1;
}
public int getTableLen () {
return al.size() == 0 ? 0 : al.size();
}
}
class SkuIdTable {
private ArrayList<Integer[]> al;
private ArrayList<String> alComment;
private String phase;
private int len;
public SkuIdTable (String phase) {
this.phase = phase;
al = new ArrayList<Integer[]>();
alComment = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.SkuIdTableSizeMacro, phase, getSize());
}
private int getSize () {
return (len == 0)? 1 : len;
}
public String getExistanceMacro () {
return String.format(PcdDatabase.SkuTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "SkuIdTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.SkuIdTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
1,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
//
// SystemSkuId is in PEI phase PCD Database
//
if (phase.equalsIgnoreCase("PEI")) {
decl = new CStructTypeDeclaration (
"SystemSkuId",
1,
"SKU_ID SystemSkuId;\r\n",
true
);
declaList.add(decl);
instTable.put("SystemSkuId", "0");
}
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.SkuIdTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String> ();
Output.add("/* SkuIdTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("\t0");
}
for (int index = 0; index < al.size(); index++) {
String str;
str = "/* " + alComment.get(index) + "*/ ";
str += "/* MaxSku */ ";
Integer[] ia = al.get(index);
str += "\t" + ia[0].toString() + ", ";
for (int index2 = 1; index2 < ia.length; index2++) {
str += ia[index2].toString();
if (!((index2 == ia.length - 1) && (index == al.size() - 1))) {
str += ", ";
}
}
Output.add(str);
}
Output.add("}");
return Output;
}
public int add (Token token) {
int index;
int pos;
//
// Check if this SKU_ID Array is already in the table
//
pos = 0;
for (Object o: al) {
Integer [] s = (Integer[]) o;
boolean different = false;
if (s[0] == token.getSkuIdCount()) {
for (index = 1; index < s.length; index++) {
if (s[index] != token.skuData.get(index-1).id) {
different = true;
break;
}
}
} else {
different = true;
}
if (different) {
pos += s[0] + 1;
} else {
return pos;
}
}
Integer [] skuIds = new Integer[token.skuData.size() + 1];
skuIds[0] = new Integer(token.skuData.size());
for (index = 1; index < skuIds.length; index++) {
skuIds[index] = new Integer(token.skuData.get(index - 1).id);
}
index = len;
len += skuIds.length;
al.add(skuIds);
alComment.add(token.getPrimaryKeyString());
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
}
class LocalTokenNumberTable {
private ArrayList<String> al;
private ArrayList<String> alComment;
private String phase;
private int len;
public LocalTokenNumberTable (String phase) {
this.phase = phase;
al = new ArrayList<String>();
alComment = new ArrayList<String>();
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.LocalTokenNumberTableSizeMacro, phase, getSize())
+ String.format(PcdDatabase.LocalTokenNumberSizeMacro, phase, al.size());
}
public int getSize () {
return (al.size() == 0)? 1 : al.size();
}
public String getExistanceMacro () {
return String.format(PcdDatabase.DatabaseExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String name = "LocalTokenNumberTable";
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.LocalTokenNumberTableDeclaration, phase);
decl = new CStructTypeDeclaration (
name,
4,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(name, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.LocalTokenNumberTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> output = new ArrayList<String>();
output.add("/* LocalTokenNumberTable */");
output.add("{");
if (al.size() == 0) {
output.add("0");
}
for (int index = 0; index < al.size(); index++) {
String str;
str = "\t" + (String)al.get(index);
str += " /* " + alComment.get(index) + " */ ";
if (index != (al.size() - 1)) {
str += ",";
}
output.add(str);
}
output.add("}");
return output;
}
public int add (Token token) {
int index = len;
String str;
len++;
str = String.format(PcdDatabase.offsetOfStrTemplate, phase, token.hasDefaultValue() ? "Init" : "Uninit", token.getPrimaryKeyString());
if (token.isUnicodeStringType()) {
str += " | PCD_TYPE_STRING";
}
if (token.isSkuEnable()) {
str += " | PCD_TYPE_SKU_ENABLED";
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
str += " | PCD_TYPE_HII";
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
str += " | PCD_TYPE_VPD";
}
al.add(str);
alComment.add(token.getPrimaryKeyString());
return index;
}
}
class ExMapTable {
class ExTriplet {
public Integer guidTableIdx;
public Long exTokenNumber;
public Long localTokenIdx;
public ExTriplet (int guidTableIdx, long exTokenNumber, long localTokenIdx) {
this.guidTableIdx = new Integer(guidTableIdx);
this.exTokenNumber = new Long(exTokenNumber);
this.localTokenIdx = new Long(localTokenIdx);
}
}
private ArrayList<ExTriplet> al;
private ArrayList<String> alComment;
private String phase;
private int len;
private int bodyLineNum;
public ExMapTable (String phase) {
this.phase = phase;
al = new ArrayList<ExTriplet>();
alComment = new ArrayList<String>();
bodyLineNum = 0;
len = 0;
}
public String getSizeMacro () {
return String.format(PcdDatabase.ExMapTableSizeMacro, phase, getTableLen())
+ String.format(PcdDatabase.ExTokenNumber, phase, al.size());
}
public String getExistanceMacro () {
return String.format(PcdDatabase.ExMapTableExistenceMacro, phase, (al.size() == 0)? "TRUE":"FALSE");
}
public void genCodeNew (ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) {
final String exMapTableName = "ExMapTable";
sortTable();
CStructTypeDeclaration decl;
String cCode = "";
cCode += String.format(PcdDatabase.ExMapTableDeclaration, phase);
decl = new CStructTypeDeclaration (
exMapTableName,
4,
cCode,
true
);
declaList.add(decl);
cCode = PcdDatabase.genInstantiationStr(getInstantiation());
instTable.put(exMapTableName, cCode);
}
public String getTypeDeclaration () {
return String.format(PcdDatabase.ExMapTableDeclaration, phase);
}
public ArrayList<String> getInstantiation () {
ArrayList<String> Output = new ArrayList<String>();
Output.add("/* ExMapTable */");
Output.add("{");
if (al.size() == 0) {
Output.add("\t{0, 0, 0}");
}
int index;
for (index = 0; index < al.size(); index++) {
String str;
ExTriplet e = (ExTriplet)al.get(index);
str = "\t" + "{ " + String.format("0x%08X", e.exTokenNumber) + ", ";
str += e.localTokenIdx.toString() + ", ";
str += e.guidTableIdx.toString();
str += "}" + " /* " + alComment.get(index) + " */" ;
if (index != al.size() - 1) {
str += ",";
}
Output.add(str);
bodyLineNum++;
}
Output.add("}");
return Output;
}
public int add (int localTokenIdx, long exTokenNum, int guidTableIdx, String name) {
int index = len;
len++;
al.add(new ExTriplet(guidTableIdx, exTokenNum, localTokenIdx));
alComment.add(name);
return index;
}
public int getTableLen () {
return al.size() == 0 ? 1 : al.size();
}
//
// To simplify the algorithm for GetNextToken and GetNextTokenSpace in
// PCD PEIM/Driver, we need to sort the ExMapTable according to the
// following order:
// 1) ExGuid
// 2) ExTokenNumber
//
class ExTripletComp implements Comparator<ExTriplet> {
public int compare (ExTriplet a, ExTriplet b) {
if (a.guidTableIdx == b.guidTableIdx ) {
if (a.exTokenNumber > b.exTokenNumber) {
return 1;
} else if (a.exTokenNumber > b.exTokenNumber) {
return 1;
} else {
return 0;
}
}
return a.guidTableIdx - b.guidTableIdx;
}
}
private void sortTable () {
java.util.Comparator<ExTriplet> comparator = new ExTripletComp();
java.util.Collections.sort(al, comparator);
}
}
class PcdDatabase {
private final static int SkuHeadAlignmentSize = 4;
private final String newLine = "\r\n";
private final String commaNewLine = ",\r\n";
private final String tab = "\t";
public final static String ExMapTableDeclaration = "DYNAMICEX_MAPPING ExMapTable[%s_EXMAPPING_TABLE_SIZE];\r\n";
public final static String GuidTableDeclaration = "EFI_GUID GuidTable[%s_GUID_TABLE_SIZE];\r\n";
public final static String LocalTokenNumberTableDeclaration = "UINT32 LocalTokenNumberTable[%s_LOCAL_TOKEN_NUMBER_TABLE_SIZE];\r\n";
public final static String StringTableDeclaration = "UINT16 StringTable[%s_STRING_TABLE_SIZE];\r\n";
public final static String SizeTableDeclaration = "UINT16 SizeTable[%s_LOCAL_TOKEN_NUMBER_TABLE_SIZE];\r\n";
public final static String SkuIdTableDeclaration = "UINT8 SkuIdTable[%s_SKUID_TABLE_SIZE];\r\n";
public final static String ExMapTableSizeMacro = "#define %s_EXMAPPING_TABLE_SIZE %d\r\n";
public final static String ExTokenNumber = "#define %s_EX_TOKEN_NUMBER %d\r\n";
public final static String GuidTableSizeMacro = "#define %s_GUID_TABLE_SIZE %d\r\n";
public final static String LocalTokenNumberTableSizeMacro = "#define %s_LOCAL_TOKEN_NUMBER_TABLE_SIZE %d\r\n";
public final static String LocalTokenNumberSizeMacro = "#define %s_LOCAL_TOKEN_NUMBER %d\r\n";
public final static String StringTableSizeMacro = "#define %s_STRING_TABLE_SIZE %d\r\n";
public final static String SkuIdTableSizeMacro = "#define %s_SKUID_TABLE_SIZE %d\r\n";
public final static String ExMapTableExistenceMacro = "#define %s_EXMAP_TABLE_EMPTY %s\r\n";
public final static String GuidTableExistenceMacro = "#define %s_GUID_TABLE_EMPTY %s\r\n";
public final static String DatabaseExistenceMacro = "#define %s_DATABASE_EMPTY %s\r\n";
public final static String StringTableExistenceMacro = "#define %s_STRING_TABLE_EMPTY %s\r\n";
public final static String SkuTableExistenceMacro = "#define %s_SKUID_TABLE_EMPTY %s\r\n";
public final static String offsetOfSkuHeadStrTemplate = "offsetof(%s_PCD_DATABASE, %s.%s_SkuDataTable)";
public final static String offsetOfVariableEnabledDefault = "offsetof(%s_PCD_DATABASE, %s.%s_VariableDefault_%d)";
public final static String offsetOfStrTemplate = "offsetof(%s_PCD_DATABASE, %s.%s)";
private final static String skuDataTableTemplate = "SkuDataTable";
private StringTable stringTable;
private GuidTable guidTable;
private LocalTokenNumberTable localTokenNumberTable;
private SkuIdTable skuIdTable;
private SizeTable sizeTable;
private ExMapTable exMapTable;
private ArrayList<Token> alTokens;
private String phase;
private int assignedTokenNumber;
//
// Use two class global variable to store
// temperary
//
private String privateGlobalName;
private String privateGlobalCCode;
//
// After Major changes done to the PCD
// database generation class PcdDatabase
// Please increment the version and please
// also update the version number in PCD
// service PEIM and DXE driver accordingly.
//
private final int version = 2;
private String hString;
private String cString;
class AlignmentSizeComp implements Comparator<Token> {
public int compare (Token a, Token b) {
return getAlignmentSize(b)
- getAlignmentSize(a);
}
}
public PcdDatabase (ArrayList<Token> alTokens, String exePhase, int startLen) {
phase = exePhase;
stringTable = new StringTable(phase);
guidTable = new GuidTable(phase);
localTokenNumberTable = new LocalTokenNumberTable(phase);
skuIdTable = new SkuIdTable(phase);
sizeTable = new SizeTable(phase);
exMapTable = new ExMapTable(phase);
assignedTokenNumber = startLen + 1;
this.alTokens = alTokens;
}
private void getNonExAndExTokens (ArrayList<Token> alTokens, List<Token> nexTokens, List<Token> exTokens) {
for (int i = 0; i < alTokens.size(); i++) {
Token t = (Token)alTokens.get(i);
if (t.isDynamicEx()) {
exTokens.add(t);
} else {
nexTokens.add(t);
}
}
return;
}
private void getTwoGroupsOfTokens (ArrayList<Token> alTokens, List<Token> initTokens, List<Token> uninitTokens) {
for (int i = 0; i < alTokens.size(); i++) {
Token t = (Token)alTokens.get(i);
if (t.hasDefaultValue()) {
initTokens.add(t);
} else {
uninitTokens.add(t);
}
}
return;
}
private int getDataTypeAlignmentSize (Token token) {
switch (token.datumType) {
case UINT8:
return 1;
case UINT16:
return 2;
case UINT32:
return 4;
case UINT64:
return 8;
case POINTER:
return 1;
case BOOLEAN:
return 1;
default:
return 1;
}
}
private int getAlignmentSize (Token token) {
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
return 2;
}
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
return 4;
}
if (token.isUnicodeStringType()) {
return 2;
}
return getDataTypeAlignmentSize(token);
}
public String getCString () {
return cString;
}
public String getHString () {
return hString;
}
private void genCodeWorker(Token t,
ArrayList<CStructTypeDeclaration> declaList,
HashMap<String, String> instTable, String phase)
throws EntityException {
CStructTypeDeclaration decl;
//
// Insert SKU_HEAD if isSkuEnable is true
//
if (t.isSkuEnable()) {
int tableIdx;
tableIdx = skuIdTable.add(t);
decl = new CStructTypeDeclaration(t.getPrimaryKeyString(),
SkuHeadAlignmentSize, getSkuEnabledTypeDeclaration(t), true);
declaList.add(decl);
instTable.put(t.getPrimaryKeyString(),
getSkuEnabledTypeInstantiaion(t, tableIdx));
}
//
// Insert PCD_ENTRY declaration and instantiation
//
getCDeclarationString(t);
decl = new CStructTypeDeclaration(privateGlobalName,
getAlignmentSize(t), privateGlobalCCode, t.hasDefaultValue());
declaList.add(decl);
if (t.hasDefaultValue()) {
instTable.put(privateGlobalName,
getTypeInstantiation(t, declaList, instTable, phase)
);
}
}
private void ProcessTokensNew (List<Token> tokens,
ArrayList<CStructTypeDeclaration> cStructDeclList,
HashMap<String, String> cStructInstTable,
String phase
)
throws EntityException {
for (int idx = 0; idx < tokens.size(); idx++) {
Token t = tokens.get(idx);
genCodeWorker (t, cStructDeclList, cStructInstTable, phase);
sizeTable.add(t);
localTokenNumberTable.add(t);
t.tokenNumber = assignedTokenNumber++;
//
// Add a mapping if this dynamic PCD entry is a EX type
//
if (t.isDynamicEx()) {
exMapTable.add(t.tokenNumber,
t.dynamicExTokenNumber,
guidTable.add(t.tokenSpaceName, t.getPrimaryKeyString()),
t.getPrimaryKeyString()
);
}
}
}
public void genCodeNew () throws EntityException {
ArrayList<CStructTypeDeclaration> cStructDeclList = new ArrayList<CStructTypeDeclaration>();
HashMap<String, String> cStructInstTable = new HashMap<String, String>();
List<Token> nexTokens = new ArrayList<Token> ();
List<Token> exTokens = new ArrayList<Token> ();
getNonExAndExTokens (alTokens, nexTokens, exTokens);
//
// We have to process Non-Ex type PCD entry first. The reason is
// that our optimization assumes that the Token Number of Non-Ex
// PCD entry start from 1 (for PEI phase) and grows continously upwards.
//
// EX type token number starts from the last Non-EX PCD entry and
// grows continously upwards.
//
ProcessTokensNew (nexTokens, cStructDeclList, cStructInstTable, phase);
ProcessTokensNew (exTokens, cStructDeclList, cStructInstTable, phase);
stringTable.genCodeNew(cStructDeclList, cStructInstTable);
skuIdTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
exMapTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
localTokenNumberTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
sizeTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
guidTable.genCodeNew(cStructDeclList, cStructInstTable, phase);
hString = genCMacroCode ();
HashMap <String, String> result;
result = genCStructCode(cStructDeclList,
cStructInstTable,
phase
);
hString += result.get("initDeclStr");
hString += result.get("uninitDeclStr");
hString += String.format("#define PCD_%s_SERVICE_DRIVER_VERSION %d", phase, version);
cString = newLine + newLine + result.get("initInstStr");
}
private String genCMacroCode () {
String macroStr = "";
//
// Generate size info Macro for all Tables
//
macroStr += guidTable.getSizeMacro();
macroStr += stringTable.getSizeMacro();
macroStr += skuIdTable.getSizeMacro();
macroStr += localTokenNumberTable.getSizeMacro();
macroStr += exMapTable.getSizeMacro();
//
// Generate existance info Macro for all Tables
//
macroStr += guidTable.getExistanceMacro();
macroStr += stringTable.getExistanceMacro();
macroStr += skuIdTable.getExistanceMacro();
macroStr += localTokenNumberTable.getExistanceMacro();
macroStr += exMapTable.getExistanceMacro();
macroStr += newLine;
return macroStr;
}
private HashMap <String, String> genCStructCode(
ArrayList<CStructTypeDeclaration> declaList,
HashMap<String, String> instTable,
String phase
) {
int i;
HashMap <String, String> result = new HashMap<String, String>();
HashMap <Integer, ArrayList<String>> alignmentInitDecl = new HashMap<Integer, ArrayList<String>>();
HashMap <Integer, ArrayList<String>> alignmentUninitDecl = new HashMap<Integer, ArrayList<String>>();
HashMap <Integer, ArrayList<String>> alignmentInitInst = new HashMap<Integer, ArrayList<String>>();
//
// Initialize the storage for each alignment
//
for (i = 8; i > 0; i>>=1) {
alignmentInitDecl.put(new Integer(i), new ArrayList<String>());
alignmentInitInst.put(new Integer(i), new ArrayList<String>());
alignmentUninitDecl.put(new Integer(i), new ArrayList<String>());
}
String initDeclStr = "typedef struct {" + newLine;
String initInstStr = String.format("%s_PCD_DATABASE_INIT g%sPcdDbInit = { ", phase.toUpperCase(), phase.toUpperCase()) + newLine;
String uninitDeclStr = "typedef struct {" + newLine;
//
// Sort all C declaration and instantiation base on Alignment Size
//
for (Object d : declaList) {
CStructTypeDeclaration decl = (CStructTypeDeclaration) d;
if (decl.initTable) {
alignmentInitDecl.get(new Integer(decl.alignmentSize)).add(decl.cCode);
alignmentInitInst.get(new Integer(decl.alignmentSize)).add(instTable.get(decl.key));
} else {
alignmentUninitDecl.get(new Integer(decl.alignmentSize)).add(decl.cCode);
}
}
//
// Generate code for every alignment size
//
for (int align = 8; align > 0; align >>= 1) {
ArrayList<String> declaListBasedOnAlignment = alignmentInitDecl.get(new Integer(align));
ArrayList<String> instListBasedOnAlignment = alignmentInitInst.get(new Integer(align));
for (i = 0; i < declaListBasedOnAlignment.size(); i++) {
initDeclStr += tab + declaListBasedOnAlignment.get(i);
initInstStr += tab + instListBasedOnAlignment.get(i);
//
// We made a assumption that both PEI_PCD_DATABASE and DXE_PCD_DATABASE
// has a least one data memember with alignment size of 1. So we can
// remove the last "," in the C structure instantiation string.
//
if ((align == 1) && (i == declaListBasedOnAlignment.size() - 1)) {
initInstStr += newLine;
} else {
initInstStr += commaNewLine;
}
}
declaListBasedOnAlignment = alignmentUninitDecl.get(new Integer(align));
for (Object d : declaListBasedOnAlignment) {
String s = (String)d;
uninitDeclStr += tab + s;
}
}
initDeclStr += String.format("} %s_PCD_DATABASE_INIT;", phase) + newLine + newLine;
initInstStr += "};";
uninitDeclStr += String.format("} %s_PCD_DATABASE_UNINIT;", phase) + newLine + newLine;
result.put("initDeclStr", initDeclStr);
result.put("initInstStr", initInstStr);
result.put("uninitDeclStr", uninitDeclStr);
return result;
}
public void genCode ()
throws EntityException {
final String newLine = "\r\n";
final String declNewLine = ";\r\n";
final String tab = "\t";
final String commaNewLine = ", \r\n";
int i;
ArrayList<String> decla;
ArrayList<String> inst;
String macroStr = "";
String initDeclStr = "";
String initInstStr = "";
String uninitDeclStr = "";
List<Token> initTokens = new ArrayList<Token> ();
List<Token> uninitTokens = new ArrayList<Token> ();
HashMap <String, ArrayList<String>> initCode = new HashMap<String, ArrayList<String>> ();
HashMap <String, ArrayList<String>> uninitCode = new HashMap<String, ArrayList<String>> ();
getTwoGroupsOfTokens (alTokens, initTokens, uninitTokens);
//
// Generate Structure Declaration for PcdTokens without Default Value
// PEI_PCD_DATABASE_INIT
//
java.util.Comparator<Token> comparator = new AlignmentSizeComp();
java.util.Collections.sort(initTokens, comparator);
initCode = processTokens(initTokens);
//
// Generate Structure Declaration for PcdTokens without Default Value
// PEI_PCD_DATABASE_UNINIT
//
java.util.Collections.sort(uninitTokens, comparator);
uninitCode = processTokens(uninitTokens);
//
// Generate size info Macro for all Tables
//
macroStr += guidTable.getSizeMacro();
macroStr += stringTable.getSizeMacro();
macroStr += skuIdTable.getSizeMacro();
macroStr += localTokenNumberTable.getSizeMacro();
macroStr += exMapTable.getSizeMacro();
//
// Generate existance info Macro for all Tables
//
macroStr += guidTable.getExistanceMacro();
macroStr += stringTable.getExistanceMacro();
macroStr += skuIdTable.getExistanceMacro();
macroStr += localTokenNumberTable.getExistanceMacro();
macroStr += exMapTable.getExistanceMacro();
//
// Generate Structure Declaration for PcdTokens with Default Value
// for example PEI_PCD_DATABASE_INIT
//
initDeclStr += "typedef struct {" + newLine;
{
initDeclStr += tab + exMapTable.getTypeDeclaration();
initDeclStr += tab + guidTable.getTypeDeclaration();
initDeclStr += tab + localTokenNumberTable.getTypeDeclaration();
initDeclStr += tab + stringTable.getTypeDeclaration();
initDeclStr += tab + sizeTable.getTypeDeclaration();
initDeclStr += tab + skuIdTable.getTypeDeclaration();
if (phase.equalsIgnoreCase("PEI")) {
initDeclStr += tab + "SKU_ID SystemSkuId;" + newLine;
}
decla = initCode.get(new String("Declaration"));
for (i = 0; i < decla.size(); i++) {
initDeclStr += tab + decla.get(i) + declNewLine;
}
//
// Generate Structure Declaration for PcdToken with SkuEnabled
//
decla = initCode.get("DeclarationForSku");
for (i = 0; i < decla.size(); i++) {
initDeclStr += tab + decla.get(i) + declNewLine;
}
}
initDeclStr += String.format("} %s_PCD_DATABASE_INIT;\r\n\r\n", phase);
//
// Generate MACRO for structure intialization of PCDTokens with Default Value
// The sequence must match the sequence of declaration of the memembers in the structure
String tmp = String.format("%s_PCD_DATABASE_INIT g%sPcdDbInit = { ", phase.toUpperCase(), phase.toUpperCase());
initInstStr += tmp + newLine;
initInstStr += tab + genInstantiationStr(exMapTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(guidTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(localTokenNumberTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(stringTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(sizeTable.getInstantiation()) + commaNewLine;
initInstStr += tab + genInstantiationStr(skuIdTable.getInstantiation()) + commaNewLine;
//
// For SystemSkuId
//
if (phase.equalsIgnoreCase("PEI")) {
initInstStr += tab + "0" + tab + "/* SystemSkuId */" + commaNewLine;
}
inst = initCode.get("Instantiation");
for (i = 0; i < inst.size(); i++) {
initInstStr += tab + inst.get(i) + commaNewLine;
}
inst = initCode.get("InstantiationForSku");
for (i = 0; i < inst.size(); i++) {
initInstStr += tab + inst.get(i);
if (i != inst.size() - 1) {
initInstStr += commaNewLine;
}
}
initInstStr += "};";
uninitDeclStr += "typedef struct {" + newLine;
{
decla = uninitCode.get("Declaration");
if (decla.size() == 0) {
uninitDeclStr += "UINT8 dummy /* The UINT struct is empty */" + declNewLine;
} else {
for (i = 0; i < decla.size(); i++) {
uninitDeclStr += tab + decla.get(i) + declNewLine;
}
decla = uninitCode.get("DeclarationForSku");
for (i = 0; i < decla.size(); i++) {
uninitDeclStr += tab + decla.get(i) + declNewLine;
}
}
}
uninitDeclStr += String.format("} %s_PCD_DATABASE_UNINIT;\r\n\r\n", phase);
cString = initInstStr + newLine;
hString = macroStr + newLine
+ initDeclStr + newLine
+ uninitDeclStr + newLine
+ newLine;
hString += String.format("#define PCD_%s_SERVICE_DRIVER_VERSION %d", phase, version);
}
public static String genInstantiationStr (ArrayList<String> alStr) {
String str = "";
for (int i = 0; i< alStr.size(); i++) {
if (i != 0) {
str += "\t";
}
str += alStr.get(i);
if (i != alStr.size() - 1) {
str += "\r\n";
}
}
return str;
}
private HashMap<String, ArrayList<String>> processTokens (List<Token> alToken)
throws EntityException {
HashMap <String, ArrayList<String>> map = new HashMap<String, ArrayList<String>>();
ArrayList<String> decl = new ArrayList<String>();
ArrayList<String> declForSkuEnableType = new ArrayList<String>();
ArrayList<String> inst = new ArrayList<String>();
ArrayList<String> instForSkuEnableType = new ArrayList<String>();
for (int index = 0; index < alToken.size(); index++) {
Token token = alToken.get(index);
if (token.isSkuEnable()) {
//
// BugBug: Schema only support Data type now
//
int tableIdx;
tableIdx = skuIdTable.add(token);
decl.add(getSkuEnabledTypeDeclaration(token));
if (token.hasDefaultValue()) {
inst.add(getSkuEnabledTypeInstantiaion(token, tableIdx));
}
declForSkuEnableType.add(getDataTypeDeclarationForSkuEnabled(token));
if (token.hasDefaultValue()) {
instForSkuEnableType.add(getDataTypeInstantiationForSkuEnabled(token));
}
} else {
if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.HII_TYPE) {
decl.add(getVariableEnableTypeDeclaration(token));
inst.add(getVariableEnableInstantiation(token));
} else if (token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.VPD_TYPE) {
decl.add(getVpdEnableTypeDeclaration(token));
inst.add(getVpdEnableTypeInstantiation(token));
} else if (token.isUnicodeStringType()) {
decl.add(getStringTypeDeclaration(token));
inst.add(getStringTypeInstantiation(stringTable.add(token.getStringTypeString(), token), token));
}
else {
decl.add(getDataTypeDeclaration(token));
if (token.hasDefaultValue()) {
inst.add(getDataTypeInstantiation(token));
}
}
}
sizeTable.add(token);
localTokenNumberTable.add(token);
token.tokenNumber = assignedTokenNumber++;
}
map.put("Declaration", decl);
map.put("DeclarationForSku", declForSkuEnableType);
map.put("Instantiation", inst);
map.put("InstantiationForSku", instForSkuEnableType);
return map;
}
private String getSkuEnabledTypeDeclaration (Token token) {
return String.format("SKU_HEAD %s;\r\n", token.getPrimaryKeyString());
}
private String getSkuEnabledTypeInstantiaion (Token token, int SkuTableIdx) {
String offsetof = String.format(PcdDatabase.offsetOfSkuHeadStrTemplate, phase, token.hasDefaultValue()? "Init" : "Uninit", token.getPrimaryKeyString());
return String.format("{ %s, %d } /* SKU_ENABLED: %s */", offsetof, SkuTableIdx, token.getPrimaryKeyString());
}
private String getDataTypeDeclarationForSkuEnabled (Token token) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64 %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN %s_%s[%d];\r\n";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s_%s[%d];\r\n", token.getPrimaryKeyString(), "SkuDataTable", token.datumSize * token.skuData.size());
}
return String.format(typeStr, token.getPrimaryKeyString(), "SkuDataTable", token.skuData.size());
}
private String getDataTypeInstantiationForSkuEnabled (Token token) {
String str = "";
if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s_%s[%d]", token.getPrimaryKeyString(), "SkuDataTable", token.datumSize * token.skuData.size());
} else {
str = "{ ";
for (int idx = 0; idx < token.skuData.size(); idx++) {
str += token.skuData.get(idx).toString();
if (idx != token.skuData.size() - 1) {
str += ", ";
}
}
str += "}";
return str;
}
}
private String getDataTypeInstantiationForVariableDefault_new (Token token, String cName, int skuId) {
return String.format("%s /* %s */", token.skuData.get(skuId).value.hiiDefaultValue, cName);
}
private String getDataTypeInstantiation (Token token) {
if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("%s /* %s */", token.getDefaultSku().value, token.getPrimaryKeyString());
} else {
return String.format("%s /* %s */", token.getDefaultSku().value, token.getPrimaryKeyString());
}
}
private String getCType (Token t)
throws EntityException {
if (t.isHiiEnable()) {
return "VARIABLE_HEAD";
}
if (t.isVpdEnable()) {
return "VPD_HEAD";
}
if (t.isUnicodeStringType()) {
return "STRING_HEAD";
}
switch (t.datumType) {
case UINT64:
return "UINT64";
case UINT32:
return "UINT32";
case UINT16:
return "UINT16";
case UINT8:
return "UINT8";
case BOOLEAN:
return "BOOLEAN";
case POINTER:
return "UINT8";
default:
throw new EntityException("Unknown type in getDataTypeCDeclaration");
}
}
private void getCDeclarationString(Token t)
throws EntityException {
if (t.isSkuEnable()) {
privateGlobalName = String.format("%s_%s", t.getPrimaryKeyString(), skuDataTableTemplate);
} else {
privateGlobalName = t.getPrimaryKeyString();
}
if (t.isUnicodeStringType()) {
privateGlobalCCode = String.format("STRING_HEAD %s[%d];\r\n", t.getPrimaryKeyString(), t.getSkuIdCount());
} else {
String type = getCType(t);
if (t.datumType == Token.DATUM_TYPE.POINTER) {
privateGlobalCCode = String.format("%s %s[%d][%d];\r\n", type, privateGlobalName, t.getSkuIdCount(), t.datumSize);
} else {
privateGlobalCCode = String.format("%s %s[%d];\r\n", type, privateGlobalName, t.getSkuIdCount());
}
}
}
private String getDataTypeDeclarationForVariableDefault_new (Token token, String cName, int skuId) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s[%d]", cName, token.datumSize);
} else {
}
return String.format("%s %s;\r\n", typeStr, cName);
}
private String getDataTypeDeclaration (Token token) {
String typeStr = "";
if (token.datumType == Token.DATUM_TYPE.UINT8) {
typeStr = "UINT8";
} else if (token.datumType == Token.DATUM_TYPE.UINT16) {
typeStr = "UINT16";
} else if (token.datumType == Token.DATUM_TYPE.UINT32) {
typeStr = "UINT32";
} else if (token.datumType == Token.DATUM_TYPE.UINT64) {
typeStr = "UINT64";
} else if (token.datumType == Token.DATUM_TYPE.BOOLEAN) {
typeStr = "BOOLEAN";
} else if (token.datumType == Token.DATUM_TYPE.POINTER) {
return String.format("UINT8 %s[%d]", token.getPrimaryKeyString(), token.datumSize);
} else {
}
return String.format("%s %s", typeStr, token.getPrimaryKeyString());
}
private String getVpdEnableTypeDeclaration (Token token) {
return String.format("VPD_HEAD %s", token.getPrimaryKeyString());
}
private String getTypeInstantiation (Token t, ArrayList<CStructTypeDeclaration> declaList, HashMap<String, String> instTable, String phase) throws EntityException {
int i;
String s;
s = String.format("/* %s */", t.getPrimaryKeyString()) + newLine;
s += tab + "{" + newLine;
for (i = 0; i < t.skuData.size(); i++) {
if (t.isUnicodeStringType() && !t.isHiiEnable()) {
s += tab + tab + String.format("{ %d }", stringTable.add(t.skuData.get(i).value.value, t));
} else if (t.isHiiEnable()) {
/* VPD_HEAD definition
typedef struct {
UINT16 GuidTableIndex; // Offset in Guid Table in units of GUID.
UINT16 StringIndex; // Offset in String Table in units of UINT16.
UINT16 Offset; // Offset in Variable
} VARIABLE_HEAD ;
*/
String variableDefaultName = String.format("%s_VariableDefault_%d", t.getPrimaryKeyString(), i);
s += tab + tab + String.format("{ %d, %d, %s, %s }", guidTable.add(t.skuData.get(i).value.variableGuid, t.getPrimaryKeyString()),
stringTable.add(t.skuData.get(i).value.getStringOfVariableName(), t),
t.skuData.get(i).value.variableOffset,
String.format("offsetof(%s_PCD_DATABASE, Init.%s)", phase, variableDefaultName)
);
//
// We need to support the default value, so we add the declaration and
// the instantiation for the default value.
//
CStructTypeDeclaration decl = new CStructTypeDeclaration (variableDefaultName,
getDataTypeAlignmentSize(t),
getDataTypeDeclarationForVariableDefault_new(t, variableDefaultName, i),
true
);
declaList.add(decl);
instTable.put(variableDefaultName, getDataTypeInstantiationForVariableDefault_new (t, variableDefaultName, i));
} else if (t.isVpdEnable()) {
/* typedef struct {
UINT32 Offset;
} VPD_HEAD;
*/
s += tab + tab + String.format("{ %s }", t.skuData.get(i).value.vpdOffset);
} else {
s += tab + tab + String.format("{ %s }", t.skuData.get(i).value.value);
}
if (i != t.skuData.size() - 1) {
s += commaNewLine;
} else {
s += newLine;
}
}
s += tab + "}";
return s;
}
private String getVpdEnableTypeInstantiation (Token token) {
return String.format("{ %s } /* %s */", token.getDefaultSku().vpdOffset,
token.getPrimaryKeyString());
}
private String getStringTypeDeclaration (Token token) {
return String.format("UINT16 %s", token.getPrimaryKeyString());
}
private String getStringTypeInstantiation (int StringTableIdx, Token token) {
return String.format ("%d /* %s */", StringTableIdx,
token.getPrimaryKeyString());
}
private String getVariableEnableTypeDeclaration (Token token) {
return String.format("VARIABLE_HEAD %s", token.getPrimaryKeyString());
}
private String getVariableEnableInstantiation (Token token)
throws EntityException {
//
// Need scott fix
//
return String.format("{ %d, %d, %s } /* %s */", guidTable.add(token.getDefaultSku().variableGuid, token.getPrimaryKeyString()),
stringTable.add(token.getDefaultSku().getStringOfVariableName(), token),
token.getDefaultSku().variableOffset,
token.getPrimaryKeyString());
}
public int getTotalTokenNumber () {
return sizeTable.getTableLen();
}
public static String getPcdDatabaseCommonDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabaseCommonDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Common Definitions");
}
return retStr;
}
public static String getPcdDxeDatabaseDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabaseDxeDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Dxe Definitions");
}
return retStr;
}
public static String getPcdPeiDatabaseDefinitions ()
throws EntityException {
String retStr = "";
try {
File file = new File(GlobalData.getWorkspacePath() + File.separator +
"Tools" + File.separator +
"Conf" + File.separator +
"Pcd" + File.separator +
"PcdDatabasePeiDefinitions.sample");
FileReader reader = new FileReader(file);
BufferedReader in = new BufferedReader(reader);
String str;
while ((str = in.readLine()) != null) {
retStr = retStr +"\r\n" + str;
}
} catch (Exception ex) {
throw new EntityException("Fatal error when generating PcdDatabase Pei Definitions");
}
return retStr;
}
}
class ModuleInfo {
public ModuleSADocument.ModuleSA module;
public ModuleTypeDef.Enum type;
public ModuleInfo (ModuleSADocument.ModuleSA module, ModuleTypeDef.Enum type) {
this.module = module;
this.type = type;
}
}
/** This action class is to collect PCD information from MSA, SPD, FPD xml file.
This class will be used for wizard and build tools, So it can *not* inherit
from buildAction or UIAction.
**/
public class CollectPCDAction {
/// memoryDatabase hold all PCD information collected from SPD, MSA, FPD.
private MemoryDatabaseManager dbManager;
/// Workspacepath hold the workspace information.
private String workspacePath;
/// FPD file is the root file.
private String fpdFilePath;
/// Message level for CollectPCDAction.
private int originalMessageLevel;
/// Cache the fpd docment instance for private usage.
private FrameworkPlatformDescriptionDocument fpdDocInstance;
/**
Set WorkspacePath parameter for this action class.
@param workspacePath parameter for this action
**/
public void setWorkspacePath(String workspacePath) {
this.workspacePath = workspacePath;
}
/**
Set action message level for CollectPcdAction tool.
The message should be restored when this action exit.
@param actionMessageLevel parameter for this action
**/
public void setActionMessageLevel(int actionMessageLevel) {
originalMessageLevel = ActionMessage.messageLevel;
ActionMessage.messageLevel = actionMessageLevel;
}
/**
Set FPDFileName parameter for this action class.
@param fpdFilePath fpd file path
**/
public void setFPDFilePath(String fpdFilePath) {
this.fpdFilePath = fpdFilePath;
}
/**
Common function interface for outer.
@param workspacePath The path of workspace of current build or analysis.
@param fpdFilePath The fpd file path of current build or analysis.
@param messageLevel The message level for this Action.
@throws Exception The exception of this function. Because it can *not* be predict
where the action class will be used. So only Exception can be throw.
**/
public void perform(String workspacePath, String fpdFilePath,
int messageLevel) throws Exception {
setWorkspacePath(workspacePath);
setFPDFilePath(fpdFilePath);
setActionMessageLevel(messageLevel);
checkParameter();
execute();
ActionMessage.messageLevel = originalMessageLevel;
}
/**
Core execution function for this action class.
This function work flows will be:
1) Collect and prepocess PCD information from FPD file, all PCD
information will be stored into memory database.
2) Generate 3 strings for
a) All modules using Dynamic(Ex) PCD entry.(Token Number)
b) PEI PCDDatabase (C Structure) for PCD Service PEIM.
c) DXE PCD Database (C structure) for PCD Service DXE.
@throws EntityException Exception indicate failed to execute this action.
**/
private void execute() throws EntityException {
//
// Get memoryDatabaseManager instance from GlobalData.
// The memoryDatabaseManager should be initialized for whatever build
// tools or wizard tools
//
if((dbManager = GlobalData.getPCDMemoryDBManager()) == null) {
throw new EntityException("The instance of PCD memory database manager is null");
}
//
// Collect all PCD information defined in FPD file.
// Evenry token defind in FPD will be created as an token into
// memory database.
//
createTokenInDBFromFPD();
//
// Call Private function genPcdDatabaseSourceCode (void); ComponentTypeBsDriver
// 1) Generate for PEI, DXE PCD DATABASE's definition and initialization.
//
genPcdDatabaseSourceCode ();
}
/**
This function generates source code for PCD Database.
@param void
@throws EntityException If the token does *not* exist in memory database.
**/
private void genPcdDatabaseSourceCode()
throws EntityException {
String PcdCommonHeaderString = PcdDatabase.getPcdDatabaseCommonDefinitions ();
ArrayList<Token> alPei = new ArrayList<Token> ();
ArrayList<Token> alDxe = new ArrayList<Token> ();
dbManager.getTwoPhaseDynamicRecordArray(alPei, alDxe);
PcdDatabase pcdPeiDatabase = new PcdDatabase (alPei, "PEI", 0);
pcdPeiDatabase.genCodeNew();
MemoryDatabaseManager.PcdPeimHString = PcdCommonHeaderString + pcdPeiDatabase.getHString()
+ PcdDatabase.getPcdPeiDatabaseDefinitions();
MemoryDatabaseManager.PcdPeimCString = pcdPeiDatabase.getCString();
PcdDatabase pcdDxeDatabase = new PcdDatabase (alDxe,
"DXE",
alPei.size()
);
pcdDxeDatabase.genCodeNew();
MemoryDatabaseManager.PcdDxeHString = MemoryDatabaseManager.PcdPeimHString + pcdDxeDatabase.getHString()
+ PcdDatabase.getPcdDxeDatabaseDefinitions();
MemoryDatabaseManager.PcdDxeCString = pcdDxeDatabase.getCString();
}
/**
Get component array from FPD.
This function maybe provided by some Global class.
@return List<ModuleInfo> the component array.
*/
private List<ModuleInfo> getComponentsFromFPD()
throws EntityException {
List<ModuleInfo> allModules = new ArrayList<ModuleInfo>();
ModuleInfo current = null;
int index = 0;
org.tianocore.Components components = null;
FrameworkModulesDocument.FrameworkModules fModules = null;
ModuleSADocument.ModuleSA[] modules = null;
HashMap<String, XmlObject> map = new HashMap<String, XmlObject>();
if (fpdDocInstance == null) {
try {
fpdDocInstance = (FrameworkPlatformDescriptionDocument)XmlObject.Factory.parse(new File(fpdFilePath));
} catch(IOException ioE) {
throw new EntityException("File IO error for xml file:" + fpdFilePath + "\n" + ioE.getMessage());
} catch(XmlException xmlE) {
throw new EntityException("Can't parse the FPD xml fle:" + fpdFilePath + "\n" + xmlE.getMessage());
}
}
map.put("FrameworkPlatformDescription", fpdDocInstance);
SurfaceAreaQuery.setDoc(map);
modules = SurfaceAreaQuery.getFpdModuleSAs();
for (index = 0; index < modules.length; index ++) {
SurfaceAreaQuery.setDoc(GlobalData.getDoc(modules[index].getModuleName()));
allModules.add(new ModuleInfo(modules[index],
ModuleTypeDef.Enum.forString(SurfaceAreaQuery.getModuleType())));
}
return allModules;
}
/**
Create token instance object into memory database, the token information
comes for FPD file. Normally, FPD file will contain all token platform
informations.
@return FrameworkPlatformDescriptionDocument The FPD document instance for furture usage.
@throws EntityException Failed to parse FPD xml file.
**/
private void createTokenInDBFromFPD()
throws EntityException {
int index = 0;
int index2 = 0;
int pcdIndex = 0;
List<PcdBuildDefinition.PcdData> pcdBuildDataArray = new ArrayList<PcdBuildDefinition.PcdData>();
PcdBuildDefinition.PcdData pcdBuildData = null;
Token token = null;
SkuInstance skuInstance = null;
int skuIndex = 0;
List<ModuleInfo> modules = null;
String primaryKey = null;
String exceptionString = null;
UsageInstance usageInstance = null;
String primaryKey1 = null;
String primaryKey2 = null;
boolean isDuplicate = false;
Token.PCD_TYPE pcdType = Token.PCD_TYPE.UNKNOWN;
Token.DATUM_TYPE datumType = Token.DATUM_TYPE.UNKNOWN;
int tokenNumber = 0;
String moduleName = null;
String datum = null;
int maxDatumSize = 0;
//
// ----------------------------------------------
// 1), Get all <ModuleSA> from FPD file.
// ----------------------------------------------
//
modules = getComponentsFromFPD();
if (modules == null) {
throw new EntityException("[FPD file error] No modules in FPD file, Please check whether there are elements in <FrameworkModules> in FPD file!");
}
//
// -------------------------------------------------------------------
// 2), Loop all modules to process <PcdBuildDeclarations> for each module.
// -------------------------------------------------------------------
//
for (index = 0; index < modules.size(); index ++) {
isDuplicate = false;
for (index2 = 0; index2 < index; index2 ++) {
//
// BUGBUG: For transition schema, we can *not* get module's version from
// <ModuleSAs>, It is work around code.
//
primaryKey1 = UsageInstance.getPrimaryKey(modules.get(index).module.getModuleName(),
null,
null,
null,
modules.get(index).module.getArch().toString(),
null);
primaryKey2 = UsageInstance.getPrimaryKey(modules.get(index2).module.getModuleName(),
null,
null,
null,
modules.get(index2).module.getArch().toString(),
null);
if (primaryKey1.equalsIgnoreCase(primaryKey2)) {
isDuplicate = true;
break;
}
}
if (isDuplicate) {
continue;
}
//
// It is legal for a module does not contains ANY pcd build definitions.
//
if (modules.get(index).module.getPcdBuildDefinition() == null) {
continue;
}
pcdBuildDataArray = modules.get(index).module.getPcdBuildDefinition().getPcdDataList();
moduleName = modules.get(index).module.getModuleName();
//
// ----------------------------------------------------------------------
// 2.1), Loop all Pcd entry for a module and add it into memory database.
// ----------------------------------------------------------------------
//
for (pcdIndex = 0; pcdIndex < pcdBuildDataArray.size(); pcdIndex ++) {
pcdBuildData = pcdBuildDataArray.get(pcdIndex);
primaryKey = Token.getPrimaryKeyString(pcdBuildData.getCName(),
translateSchemaStringToUUID(pcdBuildData.getTokenSpaceGuid()));
pcdType = Token.getpcdTypeFromString(pcdBuildData.getItemType().toString());
datumType = Token.getdatumTypeFromString(pcdBuildData.getDatumType().toString());
tokenNumber = Integer.decode(pcdBuildData.getToken().toString());
if (pcdBuildData.getValue() != null) {
datum = pcdBuildData.getValue().toString();
} else {
datum = null;
}
maxDatumSize = pcdBuildData.getMaxDatumSize();
if ((pcdType == Token.PCD_TYPE.FEATURE_FLAG) &&
(datumType != Token.DATUM_TYPE.BOOLEAN)){
exceptionString = String.format("[FPD file error] For PCD %s in module %s, the PCD type is FEATRUE_FLAG but "+
"datum type of this PCD entry is not BOOLEAN!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// Check <TokenSpaceGuid> is exist? In future, because all schema verification will tools
// will check that, following checking code could be removed.
//
if (pcdBuildData.getTokenSpaceGuid() == null) {
exceptionString = String.format("[FPD file error] There is no <TokenSpaceGuid> for PCD %s in module %s! This is required!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// -------------------------------------------------------------------------------------------
// 2.1.1), Do some necessary checking work for FixedAtBuild, FeatureFlag and PatchableInModule
// -------------------------------------------------------------------------------------------
//
if (!Token.isDynamic(pcdType)) {
//
// Value is required.
//
if (datum == null) {
exceptionString = String.format("[FPD file error] There is no value for PCD entry %s in module %s!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// Check whether the datum size is matched datum type.
//
if ((exceptionString = verifyDatum(pcdBuildData.getCName(),
moduleName,
datum,
datumType,
maxDatumSize)) != null) {
throw new EntityException(exceptionString);
}
}
//
// ---------------------------------------------------------------------------------
// 2.1.2), Create token or update token information for current anaylized PCD data.
// ---------------------------------------------------------------------------------
//
if (dbManager.isTokenInDatabase(primaryKey)) {
//
// If the token is already exist in database, do some necessary checking
// and add a usage instance into this token in database
//
token = dbManager.getTokenByKey(primaryKey);
//
// checking for DatumType, DatumType should be unique for one PCD used in different
// modules.
//
if (token.datumType != datumType) {
exceptionString = String.format("[FPD file error] The datum type of PCD entry %s is %s, which is different with %s defined in before!",
pcdBuildData.getCName(),
pcdBuildData.getDatumType().toString(),
Token.getStringOfdatumType(token.datumType));
throw new EntityException(exceptionString);
}
//
// Check token number is valid
//
if (tokenNumber != token.tokenNumber) {
exceptionString = String.format("[FPD file error] The token number of PCD entry %s in module %s is different with same PCD entry in other modules!",
pcdBuildData.getCName(),
moduleName);
throw new EntityException(exceptionString);
}
//
// For same PCD used in different modules, the PCD type should all be dynamic or non-dynamic.
//
if (token.isDynamicPCD != Token.isDynamic(pcdType)) {
exceptionString = String.format("[FPD file error] For PCD entry %s in module %s, you define dynamic or non-dynamic PCD type which"+
"is different with others module's",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
if (token.isDynamicPCD) {
//
// Check datum is equal the datum in dynamic information.
// For dynamic PCD, you can do not write <Value> in sperated every <PcdBuildDefinition> in different <ModuleSA>,
// But if you write, the <Value> must be same as the value in <DynamicPcdBuildDefinitions>.
//
if (!token.isSkuEnable() &&
(token.getDefaultSku().type == DynamicTokenValue.VALUE_TYPE.DEFAULT_TYPE) &&
(datum != null)) {
if (!datum.equalsIgnoreCase(token.getDefaultSku().value)) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in module %s, the datum in <ModuleSA> is "+
"not equal to the datum in <DynamicPcdBuildDefinitions>, it is "+
"illega! You could no set <Value> in <ModuleSA> for a dynamic PCD!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
}
if ((maxDatumSize != 0) &&
(maxDatumSize != token.datumSize)){
exceptionString = String.format("[FPD file error] For dynamic PCD %s in module %s, the max datum size is %d which "+
"is different with <MaxDatumSize> %d defined in <DynamicPcdBuildDefinitions>!",
token.cName,
moduleName,
maxDatumSize,
token.datumSize);
throw new EntityException(exceptionString);
}
}
} else {
//
// If the token is not in database, create a new token instance and add
// a usage instance into this token in database.
//
token = new Token(pcdBuildData.getCName(),
translateSchemaStringToUUID(pcdBuildData.getTokenSpaceGuid()));
token.datumType = datumType;
token.tokenNumber = tokenNumber;
token.isDynamicPCD = Token.isDynamic(pcdType);
token.datumSize = maxDatumSize;
if (token.isDynamicPCD) {
//
// For Dynamic and Dynamic Ex type, need find the dynamic information
// in <DynamicPcdBuildDefinition> section in FPD file.
//
updateDynamicInformation(moduleName,
token,
datum,
maxDatumSize);
}
dbManager.addTokenToDatabase(primaryKey, token);
}
//
// -----------------------------------------------------------------------------------
// 2.1.3), Add the PcdType in current module into this Pcd token's supported PCD type.
// -----------------------------------------------------------------------------------
//
token.updateSupportPcdType(pcdType);
//
// ------------------------------------------------
// 2.1.4), Create an usage instance for this token.
// ------------------------------------------------
//
usageInstance = new UsageInstance(token,
moduleName,
null,
null,
null,
modules.get(index).type,
pcdType,
modules.get(index).module.getArch().toString(),
null,
datum,
maxDatumSize);
token.addUsageInstance(usageInstance);
}
}
}
/**
Verify the datum value according its datum size and datum type, this
function maybe moved to FPD verification tools in future.
@param cName
@param moduleName
@param datum
@param datumType
@param maxDatumSize
@return String
*/
/***/
public String verifyDatum(String cName,
String moduleName,
String datum,
Token.DATUM_TYPE datumType,
int maxDatumSize) {
String exceptionString = null;
int value;
BigInteger value64;
String subStr;
int index;
if (moduleName == null) {
moduleName = "section <DynamicPcdBuildDefinitions>";
} else {
moduleName = "module " + moduleName;
}
if (maxDatumSize == 0) {
exceptionString = String.format("[FPD file error] You maybe miss <MaxDatumSize> for PCD %s in %s",
cName,
moduleName);
return exceptionString;
}
switch (datumType) {
case UINT8:
if (maxDatumSize != 1) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT8, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
value = Integer.decode(datum);
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not valid "+
"digital format of UINT8",
cName,
moduleName);
return exceptionString;
}
if (value > 0xFF) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s exceed"+
" the max size of UINT8 - 0xFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT16:
if (maxDatumSize != 2) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT16, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
value = Integer.decode(datum);
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is "+
"not valid digital of UINT16",
cName,
moduleName);
return exceptionString;
}
if (value > 0xFFFF) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s "+
"which exceed the range of UINT16 - 0xFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT32:
if (maxDatumSize != 4) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT32, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
if (datum.length() > 2) {
if ((datum.charAt(0) == '0') &&
((datum.charAt(1) == 'x') || (datum.charAt(1) == 'X'))){
subStr = datum.substring(2, datum.length());
value64 = new BigInteger(subStr, 16);
} else {
value64 = new BigInteger(datum);
}
} else {
value64 = new BigInteger(datum);
}
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not "+
"valid digital of UINT32",
cName,
moduleName);
return exceptionString;
}
if (value64.bitLength() > 32) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s which "+
"exceed the range of UINT32 - 0xFFFFFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case UINT64:
if (maxDatumSize != 8) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is UINT64, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
try {
if (datum.length() > 2) {
if ((datum.charAt(0) == '0') &&
((datum.charAt(1) == 'x') || (datum.charAt(1) == 'X'))){
subStr = datum.substring(2, datum.length());
value64 = new BigInteger(subStr, 16);
} else {
value64 = new BigInteger(datum);
}
} else {
value64 = new BigInteger(datum);
}
} catch (NumberFormatException nfeExp) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is not valid"+
" digital of UINT64",
cName,
moduleName);
return exceptionString;
}
if (value64.bitLength() > 64) {
exceptionString = String.format("[FPD file error] The datum for PCD %s in %s is %s "+
"exceed the range of UINT64 - 0xFFFFFFFFFFFFFFFF",
cName,
moduleName,
datum);
return exceptionString;
}
}
break;
case BOOLEAN:
if (maxDatumSize != 1) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is BOOLEAN, but datum size is %d, they are not matched!",
cName,
moduleName,
maxDatumSize);
return exceptionString;
}
if (datum != null) {
if (!(datum.equalsIgnoreCase("TRUE") ||
datum.equalsIgnoreCase("FALSE"))) {
exceptionString = String.format("[FPD file error] The datum type of PCD data %s in %s "+
"is BOOELAN, but value is not 'true'/'TRUE' or 'FALSE'/'false'",
cName,
moduleName);
return exceptionString;
}
}
break;
case POINTER:
if (datum == null) {
break;
}
char ch = datum.charAt(0);
int start, end;
String strValue;
//
// For void* type PCD, only three datum is support:
// 1) Unicode: string with start char is "L"
// 2) Ansci: String start char is ""
// 3) byte array: String start char "{"
//
if (ch == 'L') {
start = datum.indexOf('\"');
end = datum.lastIndexOf('\"');
if ((start > end) ||
(end > datum.length())||
((start == end) && (datum.length() > 0))) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID* and datum is "+
"a UNICODE string because start with L\", but format maybe"+
"is not right, correct UNICODE string is L\"...\"!",
cName,
moduleName);
return exceptionString;
}
strValue = datum.substring(start + 1, end);
if ((strValue.length() * 2) > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is "+
"a UNICODE string, but the datum size is %d exceed to <MaxDatumSize> : %d",
cName,
moduleName,
strValue.length() * 2,
maxDatumSize);
return exceptionString;
}
} else if (ch == '\"'){
start = datum.indexOf('\"');
end = datum.lastIndexOf('\"');
if ((start > end) ||
(end > datum.length())||
((start == end) && (datum.length() > 0))) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID* and datum is "+
"a ANSCII string because start with \", but format maybe"+
"is not right, correct ANSIC string is \"...\"!",
cName,
moduleName);
return exceptionString;
}
strValue = datum.substring(start + 1, end);
if ((strValue.length()) > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is "+
"a ANSCI string, but the datum size is %d which exceed to <MaxDatumSize> : %d",
cName,
moduleName,
strValue.length(),
maxDatumSize);
return exceptionString;
}
} else if (ch =='{') {
String[] strValueArray;
start = datum.indexOf('{');
end = datum.lastIndexOf('}');
strValue = datum.substring(start + 1, end);
strValue = strValue.trim();
if (strValue.length() == 0) {
break;
}
strValueArray = strValue.split(",");
for (index = 0; index < strValueArray.length; index ++) {
try{
value = Integer.decode(strValueArray[index].trim());
} catch (NumberFormatException nfeEx) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and "+
"it is byte array in fact. For every byte in array should be a valid"+
"byte digital, but element %s is not a valid byte digital!",
cName,
moduleName,
strValueArray[index]);
return exceptionString;
}
if (value > 0xFF) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, "+
"it is byte array in fact. But the element of %s exceed the byte range",
cName,
moduleName,
strValueArray[index]);
return exceptionString;
}
}
if (strValueArray.length > maxDatumSize) {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*, and datum is byte"+
"array, but the number of bytes is %d which exceed to <MaxDatumSzie> : %d!",
cName,
moduleName,
strValueArray.length,
maxDatumSize);
return exceptionString;
}
} else {
exceptionString = String.format("[FPD file error] The datum type of PCD %s in %s is VOID*. For VOID* type, you have three format choise:\n "+
"1) UNICODE string: like L\"xxxx\";\r\n"+
"2) ANSIC string: like \"xxx\";\r\n"+
"3) Byte array: like {0x2, 0x45, 0x23}\r\n"+
"But the datum in seems does not following above format!",
cName,
moduleName);
return exceptionString;
}
break;
default:
exceptionString = String.format("[FPD file error] For PCD entry %s in %s, datum type is unknown, it should be one of "+
"UINT8, UINT16, UINT32, UINT64, VOID*, BOOLEAN",
cName,
moduleName);
return exceptionString;
}
return null;
}
/**
Get dynamic information for a dynamic PCD from <DynamicPcdBuildDefinition> seciton in FPD file.
This function should be implemented in GlobalData in future.
@param token The token instance which has hold module's PCD information
@param moduleName The name of module who will use this Dynamic PCD.
@return DynamicPcdBuildDefinitions.PcdBuildData
*/
/***/
private DynamicPcdBuildDefinitions.PcdBuildData getDynamicInfoFromFPD(Token token,
String moduleName)
throws EntityException {
int index = 0;
String exceptionString = null;
String dynamicPrimaryKey = null;
DynamicPcdBuildDefinitions dynamicPcdBuildDefinitions = null;
List<DynamicPcdBuildDefinitions.PcdBuildData> dynamicPcdBuildDataArray = null;
//
// If FPD document is not be opened, open and initialize it.
//
if (fpdDocInstance == null) {
try {
fpdDocInstance = (FrameworkPlatformDescriptionDocument)XmlObject.Factory.parse(new File(fpdFilePath));
} catch(IOException ioE) {
throw new EntityException("File IO error for xml file:" + fpdFilePath + "\n" + ioE.getMessage());
} catch(XmlException xmlE) {
throw new EntityException("Can't parse the FPD xml fle:" + fpdFilePath + "\n" + xmlE.getMessage());
}
}
dynamicPcdBuildDefinitions = fpdDocInstance.getFrameworkPlatformDescription().getDynamicPcdBuildDefinitions();
if (dynamicPcdBuildDefinitions == null) {
exceptionString = String.format("[FPD file error] There are no <PcdDynamicBuildDescriptions> in FPD file but contains Dynamic type "+
"PCD entry %s in module %s!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
dynamicPcdBuildDataArray = dynamicPcdBuildDefinitions.getPcdBuildDataList();
for (index = 0; index < dynamicPcdBuildDataArray.size(); index ++) {
//
// Check <TokenSpaceGuid> is exist? In future, because all schema verification will tools
// will check that, following checking code could be removed.
//
if (dynamicPcdBuildDataArray.get(index).getTokenSpaceGuid() == null) {
exceptionString = String.format("[FPD file error] There is no <TokenSpaceGuid> for PCD %s in <DynamicPcdBuildDefinitions>! This is required!",
dynamicPcdBuildDataArray.get(index).getCName());
throw new EntityException(exceptionString);
}
dynamicPrimaryKey = Token.getPrimaryKeyString(dynamicPcdBuildDataArray.get(index).getCName(),
translateSchemaStringToUUID(dynamicPcdBuildDataArray.get(index).getTokenSpaceGuid()));
if (dynamicPrimaryKey.equalsIgnoreCase(token.getPrimaryKeyString())) {
return dynamicPcdBuildDataArray.get(index);
}
}
return null;
}
/**
Update dynamic information for PCD entry.
Dynamic information is retrieved from <PcdDynamicBuildDeclarations> in
FPD file.
@param moduleName The name of the module who use this PCD
@param token The token instance
@param datum The <datum> in module's PCD information
@param maxDatumSize The <maxDatumSize> in module's PCD information
@return Token
*/
private Token updateDynamicInformation(String moduleName,
Token token,
String datum,
int maxDatumSize)
throws EntityException {
int index = 0;
int offset;
String exceptionString = null;
DynamicTokenValue dynamicValue;
SkuInstance skuInstance = null;
String temp;
boolean hasSkuId0 = false;
Token.PCD_TYPE pcdType = Token.PCD_TYPE.UNKNOWN;
int tokenNumber = 0;
String hiiDefaultValue = null;
String[] variableGuidString = null;
List<DynamicPcdBuildDefinitions.PcdBuildData.SkuInfo> skuInfoList = null;
DynamicPcdBuildDefinitions.PcdBuildData dynamicInfo = null;
dynamicInfo = getDynamicInfoFromFPD(token, moduleName);
if (dynamicInfo == null) {
exceptionString = String.format("[FPD file error] For Dynamic PCD %s used by module %s, "+
"there is no dynamic information in <DynamicPcdBuildDefinitions> "+
"in FPD file, but it is required!",
token.cName,
moduleName);
throw new EntityException(exceptionString);
}
token.datumSize = dynamicInfo.getMaxDatumSize();
exceptionString = verifyDatum(token.cName,
moduleName,
null,
token.datumType,
token.datumSize);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
if ((maxDatumSize != 0) &&
(maxDatumSize != token.datumSize)) {
exceptionString = String.format("FPD file error] For dynamic PCD %s, the datum size in module %s is %d, but "+
"the datum size in <DynamicPcdBuildDefinitions> is %d, they are not match!",
token.cName,
moduleName,
maxDatumSize,
dynamicInfo.getMaxDatumSize());
throw new EntityException(exceptionString);
}
tokenNumber = Integer.decode(dynamicInfo.getToken().toString());
if (tokenNumber != token.tokenNumber) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s, the token number in module %s is 0x%x, but"+
"in <DynamicPcdBuildDefinictions>, the token number is 0x%x, they are not match!",
token.cName,
moduleName,
token.tokenNumber,
tokenNumber);
throw new EntityException(exceptionString);
}
pcdType = Token.getpcdTypeFromString(dynamicInfo.getItemType().toString());
if (pcdType == Token.PCD_TYPE.DYNAMIC_EX) {
token.dynamicExTokenNumber = tokenNumber;
}
skuInfoList = dynamicInfo.getSkuInfoList();
//
// Loop all sku data
//
for (index = 0; index < skuInfoList.size(); index ++) {
skuInstance = new SkuInstance();
//
// Although SkuId in schema is BigInteger, but in fact, sku id is 32 bit value.
//
temp = skuInfoList.get(index).getSkuId().toString();
skuInstance.id = Integer.decode(temp);
if (skuInstance.id == 0) {
hasSkuId0 = true;
}
//
// Judge whether is DefaultGroup at first, because most case is DefautlGroup.
//
if (skuInfoList.get(index).getValue() != null) {
skuInstance.value.setValue(skuInfoList.get(index).getValue().toString());
if ((exceptionString = verifyDatum(token.cName,
null,
skuInfoList.get(index).getValue().toString(),
token.datumType,
token.datumSize)) != null) {
throw new EntityException(exceptionString);
}
token.skuData.add(skuInstance);
//
// Judege wether is same of datum between module's information
// and dynamic information.
//
if (datum != null) {
if ((skuInstance.id == 0) &&
!datum.toString().equalsIgnoreCase(skuInfoList.get(index).getValue().toString())) {
exceptionString = "[FPD file error] For dynamic PCD " + token.cName + ", the value in module " + moduleName + " is " + datum.toString() + " but the "+
"value of sku 0 data in <DynamicPcdBuildDefinition> is " + skuInstance.value.value + ". They are must be same!"+
" or you could not define value for a dynamic PCD in every <ModuleSA>!";
throw new EntityException(exceptionString);
}
}
continue;
}
//
// Judge whether is HII group case.
//
if (skuInfoList.get(index).getVariableName() != null) {
exceptionString = null;
if (skuInfoList.get(index).getVariableGuid() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <VariableGuid> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getVariableOffset() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <VariableOffset> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getHiiDefaultValue() == null) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions> section in FPD "+
"file, who use HII, but there is no <HiiDefaultValue> defined for Sku %d data!",
token.cName,
index);
if (exceptionString != null) {
throw new EntityException(exceptionString);
}
}
if (skuInfoList.get(index).getHiiDefaultValue() != null) {
hiiDefaultValue = skuInfoList.get(index).getHiiDefaultValue().toString();
} else {
hiiDefaultValue = null;
}
if ((exceptionString = verifyDatum(token.cName,
null,
hiiDefaultValue,
token.datumType,
token.datumSize)) != null) {
throw new EntityException(exceptionString);
}
offset = Integer.decode(skuInfoList.get(index).getVariableOffset());
if (offset > 0xFFFF) {
throw new EntityException(String.format("[FPD file error] For dynamic PCD %s , the variable offset defined in sku %d data "+
"exceed 64K, it is not allowed!",
token.cName,
index));
}
//
// Get variable guid string according to the name of guid which will be mapped into a GUID in SPD file.
//
variableGuidString = GlobalData.getGuidInfoGuid(skuInfoList.get(index).getVariableGuid().toString());
if (variableGuidString == null) {
throw new EntityException(String.format("[GUID Error] For dynamic PCD %s, the variable guid %s can be found in all SPD file!",
token.cName,
skuInfoList.get(index).getVariableGuid().toString()));
}
skuInstance.value.setHiiData(skuInfoList.get(index).getVariableName(),
translateSchemaStringToUUID(variableGuidString[1]),
skuInfoList.get(index).getVariableOffset(),
skuInfoList.get(index).getHiiDefaultValue().toString());
token.skuData.add(skuInstance);
continue;
}
if (skuInfoList.get(index).getVpdOffset() != null) {
skuInstance.value.setVpdData(skuInfoList.get(index).getVpdOffset());
token.skuData.add(skuInstance);
continue;
}
exceptionString = String.format("[FPD file error] For dynamic PCD %s, the dynamic info must "+
"be one of 'DefaultGroup', 'HIIGroup', 'VpdGroup'.",
token.cName);
throw new EntityException(exceptionString);
}
if (!hasSkuId0) {
exceptionString = String.format("[FPD file error] For dynamic PCD %s in <DynamicPcdBuildDefinitions>, there are "+
"no sku id = 0 data, which is required for every dynamic PCD",
token.cName);
throw new EntityException(exceptionString);
}
return token;
}
/**
Translate the schema string to UUID instance.
In schema, the string of UUID is defined as following two types string:
1) GuidArrayType: pattern = 0x[a-fA-F0-9]{1,8},( )*0x[a-fA-F0-9]{1,4},(
)*0x[a-fA-F0-9]{1,4}(,( )*\{)?(,?( )*0x[a-fA-F0-9]{1,2}){8}( )*(\})?
2) GuidNamingConvention: pattern =
[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}
This function will convert string and create uuid instance.
@param uuidString UUID string in XML file
@return UUID UUID instance
**/
private UUID translateSchemaStringToUUID(String uuidString)
throws EntityException {
String temp;
String[] splitStringArray;
int index;
int chIndex;
int chLen;
if (uuidString == null) {
return null;
}
if (uuidString.length() == 0) {
return null;
}
if (uuidString.equals("0") ||
uuidString.equalsIgnoreCase("0x0")) {
return new UUID(0, 0);
}
uuidString = uuidString.replaceAll("\\{", "");
uuidString = uuidString.replaceAll("\\}", "");
//
// If the UUID schema string is GuidArrayType type then need translate
// to GuidNamingConvention type at first.
//
if ((uuidString.charAt(0) == '0') && ((uuidString.charAt(1) == 'x') || (uuidString.charAt(1) == 'X'))) {
splitStringArray = uuidString.split("," );
if (splitStringArray.length != 11) {
throw new EntityException ("[FPD file error] Wrong format for UUID string: " + uuidString);
}
//
// Remove blank space from these string and remove header string "0x"
//
for (index = 0; index < 11; index ++) {
splitStringArray[index] = splitStringArray[index].trim();
splitStringArray[index] = splitStringArray[index].substring(2, splitStringArray[index].length());
}
//
// Add heading '0' to normalize the string length
//
for (index = 3; index < 11; index ++) {
chLen = splitStringArray[index].length();
for (chIndex = 0; chIndex < 2 - chLen; chIndex ++) {
splitStringArray[index] = "0" + splitStringArray[index];
}
}
//
// construct the final GuidNamingConvention string
//
temp = String.format("%s-%s-%s-%s%s-%s%s%s%s%s%s",
splitStringArray[0],
splitStringArray[1],
splitStringArray[2],
splitStringArray[3],
splitStringArray[4],
splitStringArray[5],
splitStringArray[6],
splitStringArray[7],
splitStringArray[8],
splitStringArray[9],
splitStringArray[10]);
uuidString = temp;
}
return UUID.fromString(uuidString);
}
/**
check parameter for this action.
@throws EntityException Bad parameter.
**/
private void checkParameter() throws EntityException {
File file = null;
if((fpdFilePath == null) ||(workspacePath == null)) {
throw new EntityException("WorkspacePath and FPDFileName should be blank for CollectPCDAtion!");
}
if(fpdFilePath.length() == 0 || workspacePath.length() == 0) {
throw new EntityException("WorkspacePath and FPDFileName should be blank for CollectPCDAtion!");
}
file = new File(workspacePath);
if(!file.exists()) {
throw new EntityException("WorkpacePath " + workspacePath + " does not exist!");
}
file = new File(fpdFilePath);
if(!file.exists()) {
throw new EntityException("FPD File " + fpdFilePath + " does not exist!");
}
}
/**
Test case function
@param argv parameter from command line
**/
public static void main(String argv[]) throws EntityException {
CollectPCDAction ca = new CollectPCDAction();
ca.setWorkspacePath("m:/tianocore/edk2");
ca.setFPDFilePath("m:/tianocore/edk2/EdkNt32Pkg/Nt32.fpd");
ca.setActionMessageLevel(ActionMessage.MAX_MESSAGE_LEVEL);
GlobalData.initInfo("Tools" + File.separator + "Conf" + File.separator + "FrameworkDatabase.db",
"m:/tianocore/edk2");
ca.execute();
}
}
|
1) make sure a empty database will be generated if the Uinitialized database portion is empty.
2) add a newline at the end of Autogen.c for PCD driver/PEIM.
git-svn-id: 5648d1bec6962b0a6d1d1b40eba8cf5cdb62da3d@606 6f19259b-4bc3-4df7-8a09-765794883524
|
Tools/Source/GenBuild/org/tianocore/build/pcd/action/CollectPCDAction.java
|
1) make sure a empty database will be generated if the Uinitialized database portion is empty.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.